xref: /openbmc/linux/lib/siphash.c (revision 53c83d6d)
1 // SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause)
2 /* Copyright (C) 2016-2022 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
3  *
4  * SipHash: a fast short-input PRF
5  * https://131002.net/siphash/
6  *
7  * This implementation is specifically for SipHash2-4 for a secure PRF
8  * and HalfSipHash1-3/SipHash1-3 for an insecure PRF only suitable for
9  * hashtables.
10  */
11 
12 #include <linux/siphash.h>
13 #include <asm/unaligned.h>
14 
15 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
16 #include <linux/dcache.h>
17 #include <asm/word-at-a-time.h>
18 #endif
19 
20 #define SIPROUND \
21 	do { \
22 	v0 += v1; v1 = rol64(v1, 13); v1 ^= v0; v0 = rol64(v0, 32); \
23 	v2 += v3; v3 = rol64(v3, 16); v3 ^= v2; \
24 	v0 += v3; v3 = rol64(v3, 21); v3 ^= v0; \
25 	v2 += v1; v1 = rol64(v1, 17); v1 ^= v2; v2 = rol64(v2, 32); \
26 	} while (0)
27 
28 #define PREAMBLE(len) \
29 	u64 v0 = 0x736f6d6570736575ULL; \
30 	u64 v1 = 0x646f72616e646f6dULL; \
31 	u64 v2 = 0x6c7967656e657261ULL; \
32 	u64 v3 = 0x7465646279746573ULL; \
33 	u64 b = ((u64)(len)) << 56; \
34 	v3 ^= key->key[1]; \
35 	v2 ^= key->key[0]; \
36 	v1 ^= key->key[1]; \
37 	v0 ^= key->key[0];
38 
39 #define POSTAMBLE \
40 	v3 ^= b; \
41 	SIPROUND; \
42 	SIPROUND; \
43 	v0 ^= b; \
44 	v2 ^= 0xff; \
45 	SIPROUND; \
46 	SIPROUND; \
47 	SIPROUND; \
48 	SIPROUND; \
49 	return (v0 ^ v1) ^ (v2 ^ v3);
50 
51 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
52 u64 __siphash_aligned(const void *data, size_t len, const siphash_key_t *key)
53 {
54 	const u8 *end = data + len - (len % sizeof(u64));
55 	const u8 left = len & (sizeof(u64) - 1);
56 	u64 m;
57 	PREAMBLE(len)
58 	for (; data != end; data += sizeof(u64)) {
59 		m = le64_to_cpup(data);
60 		v3 ^= m;
61 		SIPROUND;
62 		SIPROUND;
63 		v0 ^= m;
64 	}
65 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
66 	if (left)
67 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
68 						  bytemask_from_count(left)));
69 #else
70 	switch (left) {
71 	case 7: b |= ((u64)end[6]) << 48; fallthrough;
72 	case 6: b |= ((u64)end[5]) << 40; fallthrough;
73 	case 5: b |= ((u64)end[4]) << 32; fallthrough;
74 	case 4: b |= le32_to_cpup(data); break;
75 	case 3: b |= ((u64)end[2]) << 16; fallthrough;
76 	case 2: b |= le16_to_cpup(data); break;
77 	case 1: b |= end[0];
78 	}
79 #endif
80 	POSTAMBLE
81 }
82 EXPORT_SYMBOL(__siphash_aligned);
83 #endif
84 
85 u64 __siphash_unaligned(const void *data, size_t len, const siphash_key_t *key)
86 {
87 	const u8 *end = data + len - (len % sizeof(u64));
88 	const u8 left = len & (sizeof(u64) - 1);
89 	u64 m;
90 	PREAMBLE(len)
91 	for (; data != end; data += sizeof(u64)) {
92 		m = get_unaligned_le64(data);
93 		v3 ^= m;
94 		SIPROUND;
95 		SIPROUND;
96 		v0 ^= m;
97 	}
98 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
99 	if (left)
100 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
101 						  bytemask_from_count(left)));
102 #else
103 	switch (left) {
104 	case 7: b |= ((u64)end[6]) << 48; fallthrough;
105 	case 6: b |= ((u64)end[5]) << 40; fallthrough;
106 	case 5: b |= ((u64)end[4]) << 32; fallthrough;
107 	case 4: b |= get_unaligned_le32(end); break;
108 	case 3: b |= ((u64)end[2]) << 16; fallthrough;
109 	case 2: b |= get_unaligned_le16(end); break;
110 	case 1: b |= end[0];
111 	}
112 #endif
113 	POSTAMBLE
114 }
115 EXPORT_SYMBOL(__siphash_unaligned);
116 
117 /**
118  * siphash_1u64 - compute 64-bit siphash PRF value of a u64
119  * @first: first u64
120  * @key: the siphash key
121  */
122 u64 siphash_1u64(const u64 first, const siphash_key_t *key)
123 {
124 	PREAMBLE(8)
125 	v3 ^= first;
126 	SIPROUND;
127 	SIPROUND;
128 	v0 ^= first;
129 	POSTAMBLE
130 }
131 EXPORT_SYMBOL(siphash_1u64);
132 
133 /**
134  * siphash_2u64 - compute 64-bit siphash PRF value of 2 u64
135  * @first: first u64
136  * @second: second u64
137  * @key: the siphash key
138  */
139 u64 siphash_2u64(const u64 first, const u64 second, const siphash_key_t *key)
140 {
141 	PREAMBLE(16)
142 	v3 ^= first;
143 	SIPROUND;
144 	SIPROUND;
145 	v0 ^= first;
146 	v3 ^= second;
147 	SIPROUND;
148 	SIPROUND;
149 	v0 ^= second;
150 	POSTAMBLE
151 }
152 EXPORT_SYMBOL(siphash_2u64);
153 
154 /**
155  * siphash_3u64 - compute 64-bit siphash PRF value of 3 u64
156  * @first: first u64
157  * @second: second u64
158  * @third: third u64
159  * @key: the siphash key
160  */
161 u64 siphash_3u64(const u64 first, const u64 second, const u64 third,
162 		 const siphash_key_t *key)
163 {
164 	PREAMBLE(24)
165 	v3 ^= first;
166 	SIPROUND;
167 	SIPROUND;
168 	v0 ^= first;
169 	v3 ^= second;
170 	SIPROUND;
171 	SIPROUND;
172 	v0 ^= second;
173 	v3 ^= third;
174 	SIPROUND;
175 	SIPROUND;
176 	v0 ^= third;
177 	POSTAMBLE
178 }
179 EXPORT_SYMBOL(siphash_3u64);
180 
181 /**
182  * siphash_4u64 - compute 64-bit siphash PRF value of 4 u64
183  * @first: first u64
184  * @second: second u64
185  * @third: third u64
186  * @forth: forth u64
187  * @key: the siphash key
188  */
189 u64 siphash_4u64(const u64 first, const u64 second, const u64 third,
190 		 const u64 forth, const siphash_key_t *key)
191 {
192 	PREAMBLE(32)
193 	v3 ^= first;
194 	SIPROUND;
195 	SIPROUND;
196 	v0 ^= first;
197 	v3 ^= second;
198 	SIPROUND;
199 	SIPROUND;
200 	v0 ^= second;
201 	v3 ^= third;
202 	SIPROUND;
203 	SIPROUND;
204 	v0 ^= third;
205 	v3 ^= forth;
206 	SIPROUND;
207 	SIPROUND;
208 	v0 ^= forth;
209 	POSTAMBLE
210 }
211 EXPORT_SYMBOL(siphash_4u64);
212 
213 u64 siphash_1u32(const u32 first, const siphash_key_t *key)
214 {
215 	PREAMBLE(4)
216 	b |= first;
217 	POSTAMBLE
218 }
219 EXPORT_SYMBOL(siphash_1u32);
220 
221 u64 siphash_3u32(const u32 first, const u32 second, const u32 third,
222 		 const siphash_key_t *key)
223 {
224 	u64 combined = (u64)second << 32 | first;
225 	PREAMBLE(12)
226 	v3 ^= combined;
227 	SIPROUND;
228 	SIPROUND;
229 	v0 ^= combined;
230 	b |= third;
231 	POSTAMBLE
232 }
233 EXPORT_SYMBOL(siphash_3u32);
234 
235 #if BITS_PER_LONG == 64
236 /* Note that on 64-bit, we make HalfSipHash1-3 actually be SipHash1-3, for
237  * performance reasons. On 32-bit, below, we actually implement HalfSipHash1-3.
238  */
239 
240 #define HSIPROUND SIPROUND
241 #define HPREAMBLE(len) PREAMBLE(len)
242 #define HPOSTAMBLE \
243 	v3 ^= b; \
244 	HSIPROUND; \
245 	v0 ^= b; \
246 	v2 ^= 0xff; \
247 	HSIPROUND; \
248 	HSIPROUND; \
249 	HSIPROUND; \
250 	return (v0 ^ v1) ^ (v2 ^ v3);
251 
252 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
253 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
254 {
255 	const u8 *end = data + len - (len % sizeof(u64));
256 	const u8 left = len & (sizeof(u64) - 1);
257 	u64 m;
258 	HPREAMBLE(len)
259 	for (; data != end; data += sizeof(u64)) {
260 		m = le64_to_cpup(data);
261 		v3 ^= m;
262 		HSIPROUND;
263 		v0 ^= m;
264 	}
265 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
266 	if (left)
267 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
268 						  bytemask_from_count(left)));
269 #else
270 	switch (left) {
271 	case 7: b |= ((u64)end[6]) << 48; fallthrough;
272 	case 6: b |= ((u64)end[5]) << 40; fallthrough;
273 	case 5: b |= ((u64)end[4]) << 32; fallthrough;
274 	case 4: b |= le32_to_cpup(data); break;
275 	case 3: b |= ((u64)end[2]) << 16; fallthrough;
276 	case 2: b |= le16_to_cpup(data); break;
277 	case 1: b |= end[0];
278 	}
279 #endif
280 	HPOSTAMBLE
281 }
282 EXPORT_SYMBOL(__hsiphash_aligned);
283 #endif
284 
285 u32 __hsiphash_unaligned(const void *data, size_t len,
286 			 const hsiphash_key_t *key)
287 {
288 	const u8 *end = data + len - (len % sizeof(u64));
289 	const u8 left = len & (sizeof(u64) - 1);
290 	u64 m;
291 	HPREAMBLE(len)
292 	for (; data != end; data += sizeof(u64)) {
293 		m = get_unaligned_le64(data);
294 		v3 ^= m;
295 		HSIPROUND;
296 		v0 ^= m;
297 	}
298 #if defined(CONFIG_DCACHE_WORD_ACCESS) && BITS_PER_LONG == 64
299 	if (left)
300 		b |= le64_to_cpu((__force __le64)(load_unaligned_zeropad(data) &
301 						  bytemask_from_count(left)));
302 #else
303 	switch (left) {
304 	case 7: b |= ((u64)end[6]) << 48; fallthrough;
305 	case 6: b |= ((u64)end[5]) << 40; fallthrough;
306 	case 5: b |= ((u64)end[4]) << 32; fallthrough;
307 	case 4: b |= get_unaligned_le32(end); break;
308 	case 3: b |= ((u64)end[2]) << 16; fallthrough;
309 	case 2: b |= get_unaligned_le16(end); break;
310 	case 1: b |= end[0];
311 	}
312 #endif
313 	HPOSTAMBLE
314 }
315 EXPORT_SYMBOL(__hsiphash_unaligned);
316 
317 /**
318  * hsiphash_1u32 - compute 64-bit hsiphash PRF value of a u32
319  * @first: first u32
320  * @key: the hsiphash key
321  */
322 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
323 {
324 	HPREAMBLE(4)
325 	b |= first;
326 	HPOSTAMBLE
327 }
328 EXPORT_SYMBOL(hsiphash_1u32);
329 
330 /**
331  * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
332  * @first: first u32
333  * @second: second u32
334  * @key: the hsiphash key
335  */
336 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
337 {
338 	u64 combined = (u64)second << 32 | first;
339 	HPREAMBLE(8)
340 	v3 ^= combined;
341 	HSIPROUND;
342 	v0 ^= combined;
343 	HPOSTAMBLE
344 }
345 EXPORT_SYMBOL(hsiphash_2u32);
346 
347 /**
348  * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
349  * @first: first u32
350  * @second: second u32
351  * @third: third u32
352  * @key: the hsiphash key
353  */
354 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
355 		  const hsiphash_key_t *key)
356 {
357 	u64 combined = (u64)second << 32 | first;
358 	HPREAMBLE(12)
359 	v3 ^= combined;
360 	HSIPROUND;
361 	v0 ^= combined;
362 	b |= third;
363 	HPOSTAMBLE
364 }
365 EXPORT_SYMBOL(hsiphash_3u32);
366 
367 /**
368  * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
369  * @first: first u32
370  * @second: second u32
371  * @third: third u32
372  * @forth: forth u32
373  * @key: the hsiphash key
374  */
375 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
376 		  const u32 forth, const hsiphash_key_t *key)
377 {
378 	u64 combined = (u64)second << 32 | first;
379 	HPREAMBLE(16)
380 	v3 ^= combined;
381 	HSIPROUND;
382 	v0 ^= combined;
383 	combined = (u64)forth << 32 | third;
384 	v3 ^= combined;
385 	HSIPROUND;
386 	v0 ^= combined;
387 	HPOSTAMBLE
388 }
389 EXPORT_SYMBOL(hsiphash_4u32);
390 #else
391 #define HSIPROUND \
392 	do { \
393 	v0 += v1; v1 = rol32(v1, 5); v1 ^= v0; v0 = rol32(v0, 16); \
394 	v2 += v3; v3 = rol32(v3, 8); v3 ^= v2; \
395 	v0 += v3; v3 = rol32(v3, 7); v3 ^= v0; \
396 	v2 += v1; v1 = rol32(v1, 13); v1 ^= v2; v2 = rol32(v2, 16); \
397 	} while (0)
398 
399 #define HPREAMBLE(len) \
400 	u32 v0 = 0; \
401 	u32 v1 = 0; \
402 	u32 v2 = 0x6c796765U; \
403 	u32 v3 = 0x74656462U; \
404 	u32 b = ((u32)(len)) << 24; \
405 	v3 ^= key->key[1]; \
406 	v2 ^= key->key[0]; \
407 	v1 ^= key->key[1]; \
408 	v0 ^= key->key[0];
409 
410 #define HPOSTAMBLE \
411 	v3 ^= b; \
412 	HSIPROUND; \
413 	v0 ^= b; \
414 	v2 ^= 0xff; \
415 	HSIPROUND; \
416 	HSIPROUND; \
417 	HSIPROUND; \
418 	return v1 ^ v3;
419 
420 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
421 u32 __hsiphash_aligned(const void *data, size_t len, const hsiphash_key_t *key)
422 {
423 	const u8 *end = data + len - (len % sizeof(u32));
424 	const u8 left = len & (sizeof(u32) - 1);
425 	u32 m;
426 	HPREAMBLE(len)
427 	for (; data != end; data += sizeof(u32)) {
428 		m = le32_to_cpup(data);
429 		v3 ^= m;
430 		HSIPROUND;
431 		v0 ^= m;
432 	}
433 	switch (left) {
434 	case 3: b |= ((u32)end[2]) << 16; fallthrough;
435 	case 2: b |= le16_to_cpup(data); break;
436 	case 1: b |= end[0];
437 	}
438 	HPOSTAMBLE
439 }
440 EXPORT_SYMBOL(__hsiphash_aligned);
441 #endif
442 
443 u32 __hsiphash_unaligned(const void *data, size_t len,
444 			 const hsiphash_key_t *key)
445 {
446 	const u8 *end = data + len - (len % sizeof(u32));
447 	const u8 left = len & (sizeof(u32) - 1);
448 	u32 m;
449 	HPREAMBLE(len)
450 	for (; data != end; data += sizeof(u32)) {
451 		m = get_unaligned_le32(data);
452 		v3 ^= m;
453 		HSIPROUND;
454 		v0 ^= m;
455 	}
456 	switch (left) {
457 	case 3: b |= ((u32)end[2]) << 16; fallthrough;
458 	case 2: b |= get_unaligned_le16(end); break;
459 	case 1: b |= end[0];
460 	}
461 	HPOSTAMBLE
462 }
463 EXPORT_SYMBOL(__hsiphash_unaligned);
464 
465 /**
466  * hsiphash_1u32 - compute 32-bit hsiphash PRF value of a u32
467  * @first: first u32
468  * @key: the hsiphash key
469  */
470 u32 hsiphash_1u32(const u32 first, const hsiphash_key_t *key)
471 {
472 	HPREAMBLE(4)
473 	v3 ^= first;
474 	HSIPROUND;
475 	v0 ^= first;
476 	HPOSTAMBLE
477 }
478 EXPORT_SYMBOL(hsiphash_1u32);
479 
480 /**
481  * hsiphash_2u32 - compute 32-bit hsiphash PRF value of 2 u32
482  * @first: first u32
483  * @second: second u32
484  * @key: the hsiphash key
485  */
486 u32 hsiphash_2u32(const u32 first, const u32 second, const hsiphash_key_t *key)
487 {
488 	HPREAMBLE(8)
489 	v3 ^= first;
490 	HSIPROUND;
491 	v0 ^= first;
492 	v3 ^= second;
493 	HSIPROUND;
494 	v0 ^= second;
495 	HPOSTAMBLE
496 }
497 EXPORT_SYMBOL(hsiphash_2u32);
498 
499 /**
500  * hsiphash_3u32 - compute 32-bit hsiphash PRF value of 3 u32
501  * @first: first u32
502  * @second: second u32
503  * @third: third u32
504  * @key: the hsiphash key
505  */
506 u32 hsiphash_3u32(const u32 first, const u32 second, const u32 third,
507 		  const hsiphash_key_t *key)
508 {
509 	HPREAMBLE(12)
510 	v3 ^= first;
511 	HSIPROUND;
512 	v0 ^= first;
513 	v3 ^= second;
514 	HSIPROUND;
515 	v0 ^= second;
516 	v3 ^= third;
517 	HSIPROUND;
518 	v0 ^= third;
519 	HPOSTAMBLE
520 }
521 EXPORT_SYMBOL(hsiphash_3u32);
522 
523 /**
524  * hsiphash_4u32 - compute 32-bit hsiphash PRF value of 4 u32
525  * @first: first u32
526  * @second: second u32
527  * @third: third u32
528  * @forth: forth u32
529  * @key: the hsiphash key
530  */
531 u32 hsiphash_4u32(const u32 first, const u32 second, const u32 third,
532 		  const u32 forth, const hsiphash_key_t *key)
533 {
534 	HPREAMBLE(16)
535 	v3 ^= first;
536 	HSIPROUND;
537 	v0 ^= first;
538 	v3 ^= second;
539 	HSIPROUND;
540 	v0 ^= second;
541 	v3 ^= third;
542 	HSIPROUND;
543 	v0 ^= third;
544 	v3 ^= forth;
545 	HSIPROUND;
546 	v0 ^= forth;
547 	HPOSTAMBLE
548 }
549 EXPORT_SYMBOL(hsiphash_4u32);
550 #endif
551