xref: /openbmc/linux/crypto/poly1305_generic.c (revision dea54fba)
1 /*
2  * Poly1305 authenticator algorithm, RFC7539
3  *
4  * Copyright (C) 2015 Martin Willi
5  *
6  * Based on public domain code by Andrew Moon and Daniel J. Bernstein.
7  *
8  * This program is free software; you can redistribute it and/or modify
9  * it under the terms of the GNU General Public License as published by
10  * the Free Software Foundation; either version 2 of the License, or
11  * (at your option) any later version.
12  */
13 
14 #include <crypto/algapi.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/poly1305.h>
17 #include <linux/crypto.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <asm/unaligned.h>
21 
22 static inline u64 mlt(u64 a, u64 b)
23 {
24 	return a * b;
25 }
26 
27 static inline u32 sr(u64 v, u_char n)
28 {
29 	return v >> n;
30 }
31 
32 static inline u32 and(u32 v, u32 mask)
33 {
34 	return v & mask;
35 }
36 
37 int crypto_poly1305_init(struct shash_desc *desc)
38 {
39 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
40 
41 	memset(dctx->h, 0, sizeof(dctx->h));
42 	dctx->buflen = 0;
43 	dctx->rset = false;
44 	dctx->sset = false;
45 
46 	return 0;
47 }
48 EXPORT_SYMBOL_GPL(crypto_poly1305_init);
49 
50 int crypto_poly1305_setkey(struct crypto_shash *tfm,
51 			   const u8 *key, unsigned int keylen)
52 {
53 	/* Poly1305 requires a unique key for each tag, which implies that
54 	 * we can't set it on the tfm that gets accessed by multiple users
55 	 * simultaneously. Instead we expect the key as the first 32 bytes in
56 	 * the update() call. */
57 	return -ENOTSUPP;
58 }
59 EXPORT_SYMBOL_GPL(crypto_poly1305_setkey);
60 
61 static void poly1305_setrkey(struct poly1305_desc_ctx *dctx, const u8 *key)
62 {
63 	/* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
64 	dctx->r[0] = (get_unaligned_le32(key +  0) >> 0) & 0x3ffffff;
65 	dctx->r[1] = (get_unaligned_le32(key +  3) >> 2) & 0x3ffff03;
66 	dctx->r[2] = (get_unaligned_le32(key +  6) >> 4) & 0x3ffc0ff;
67 	dctx->r[3] = (get_unaligned_le32(key +  9) >> 6) & 0x3f03fff;
68 	dctx->r[4] = (get_unaligned_le32(key + 12) >> 8) & 0x00fffff;
69 }
70 
71 static void poly1305_setskey(struct poly1305_desc_ctx *dctx, const u8 *key)
72 {
73 	dctx->s[0] = get_unaligned_le32(key +  0);
74 	dctx->s[1] = get_unaligned_le32(key +  4);
75 	dctx->s[2] = get_unaligned_le32(key +  8);
76 	dctx->s[3] = get_unaligned_le32(key + 12);
77 }
78 
79 unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
80 					const u8 *src, unsigned int srclen)
81 {
82 	if (!dctx->sset) {
83 		if (!dctx->rset && srclen >= POLY1305_BLOCK_SIZE) {
84 			poly1305_setrkey(dctx, src);
85 			src += POLY1305_BLOCK_SIZE;
86 			srclen -= POLY1305_BLOCK_SIZE;
87 			dctx->rset = true;
88 		}
89 		if (srclen >= POLY1305_BLOCK_SIZE) {
90 			poly1305_setskey(dctx, src);
91 			src += POLY1305_BLOCK_SIZE;
92 			srclen -= POLY1305_BLOCK_SIZE;
93 			dctx->sset = true;
94 		}
95 	}
96 	return srclen;
97 }
98 EXPORT_SYMBOL_GPL(crypto_poly1305_setdesckey);
99 
100 static unsigned int poly1305_blocks(struct poly1305_desc_ctx *dctx,
101 				    const u8 *src, unsigned int srclen,
102 				    u32 hibit)
103 {
104 	u32 r0, r1, r2, r3, r4;
105 	u32 s1, s2, s3, s4;
106 	u32 h0, h1, h2, h3, h4;
107 	u64 d0, d1, d2, d3, d4;
108 	unsigned int datalen;
109 
110 	if (unlikely(!dctx->sset)) {
111 		datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
112 		src += srclen - datalen;
113 		srclen = datalen;
114 	}
115 
116 	r0 = dctx->r[0];
117 	r1 = dctx->r[1];
118 	r2 = dctx->r[2];
119 	r3 = dctx->r[3];
120 	r4 = dctx->r[4];
121 
122 	s1 = r1 * 5;
123 	s2 = r2 * 5;
124 	s3 = r3 * 5;
125 	s4 = r4 * 5;
126 
127 	h0 = dctx->h[0];
128 	h1 = dctx->h[1];
129 	h2 = dctx->h[2];
130 	h3 = dctx->h[3];
131 	h4 = dctx->h[4];
132 
133 	while (likely(srclen >= POLY1305_BLOCK_SIZE)) {
134 
135 		/* h += m[i] */
136 		h0 += (get_unaligned_le32(src +  0) >> 0) & 0x3ffffff;
137 		h1 += (get_unaligned_le32(src +  3) >> 2) & 0x3ffffff;
138 		h2 += (get_unaligned_le32(src +  6) >> 4) & 0x3ffffff;
139 		h3 += (get_unaligned_le32(src +  9) >> 6) & 0x3ffffff;
140 		h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
141 
142 		/* h *= r */
143 		d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +
144 		     mlt(h3, s2) + mlt(h4, s1);
145 		d1 = mlt(h0, r1) + mlt(h1, r0) + mlt(h2, s4) +
146 		     mlt(h3, s3) + mlt(h4, s2);
147 		d2 = mlt(h0, r2) + mlt(h1, r1) + mlt(h2, r0) +
148 		     mlt(h3, s4) + mlt(h4, s3);
149 		d3 = mlt(h0, r3) + mlt(h1, r2) + mlt(h2, r1) +
150 		     mlt(h3, r0) + mlt(h4, s4);
151 		d4 = mlt(h0, r4) + mlt(h1, r3) + mlt(h2, r2) +
152 		     mlt(h3, r1) + mlt(h4, r0);
153 
154 		/* (partial) h %= p */
155 		d1 += sr(d0, 26);     h0 = and(d0, 0x3ffffff);
156 		d2 += sr(d1, 26);     h1 = and(d1, 0x3ffffff);
157 		d3 += sr(d2, 26);     h2 = and(d2, 0x3ffffff);
158 		d4 += sr(d3, 26);     h3 = and(d3, 0x3ffffff);
159 		h0 += sr(d4, 26) * 5; h4 = and(d4, 0x3ffffff);
160 		h1 += h0 >> 26;       h0 = h0 & 0x3ffffff;
161 
162 		src += POLY1305_BLOCK_SIZE;
163 		srclen -= POLY1305_BLOCK_SIZE;
164 	}
165 
166 	dctx->h[0] = h0;
167 	dctx->h[1] = h1;
168 	dctx->h[2] = h2;
169 	dctx->h[3] = h3;
170 	dctx->h[4] = h4;
171 
172 	return srclen;
173 }
174 
175 int crypto_poly1305_update(struct shash_desc *desc,
176 			   const u8 *src, unsigned int srclen)
177 {
178 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
179 	unsigned int bytes;
180 
181 	if (unlikely(dctx->buflen)) {
182 		bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
183 		memcpy(dctx->buf + dctx->buflen, src, bytes);
184 		src += bytes;
185 		srclen -= bytes;
186 		dctx->buflen += bytes;
187 
188 		if (dctx->buflen == POLY1305_BLOCK_SIZE) {
189 			poly1305_blocks(dctx, dctx->buf,
190 					POLY1305_BLOCK_SIZE, 1 << 24);
191 			dctx->buflen = 0;
192 		}
193 	}
194 
195 	if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
196 		bytes = poly1305_blocks(dctx, src, srclen, 1 << 24);
197 		src += srclen - bytes;
198 		srclen = bytes;
199 	}
200 
201 	if (unlikely(srclen)) {
202 		dctx->buflen = srclen;
203 		memcpy(dctx->buf, src, srclen);
204 	}
205 
206 	return 0;
207 }
208 EXPORT_SYMBOL_GPL(crypto_poly1305_update);
209 
210 int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
211 {
212 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
213 	__le32 *mac = (__le32 *)dst;
214 	u32 h0, h1, h2, h3, h4;
215 	u32 g0, g1, g2, g3, g4;
216 	u32 mask;
217 	u64 f = 0;
218 
219 	if (unlikely(!dctx->sset))
220 		return -ENOKEY;
221 
222 	if (unlikely(dctx->buflen)) {
223 		dctx->buf[dctx->buflen++] = 1;
224 		memset(dctx->buf + dctx->buflen, 0,
225 		       POLY1305_BLOCK_SIZE - dctx->buflen);
226 		poly1305_blocks(dctx, dctx->buf, POLY1305_BLOCK_SIZE, 0);
227 	}
228 
229 	/* fully carry h */
230 	h0 = dctx->h[0];
231 	h1 = dctx->h[1];
232 	h2 = dctx->h[2];
233 	h3 = dctx->h[3];
234 	h4 = dctx->h[4];
235 
236 	h2 += (h1 >> 26);     h1 = h1 & 0x3ffffff;
237 	h3 += (h2 >> 26);     h2 = h2 & 0x3ffffff;
238 	h4 += (h3 >> 26);     h3 = h3 & 0x3ffffff;
239 	h0 += (h4 >> 26) * 5; h4 = h4 & 0x3ffffff;
240 	h1 += (h0 >> 26);     h0 = h0 & 0x3ffffff;
241 
242 	/* compute h + -p */
243 	g0 = h0 + 5;
244 	g1 = h1 + (g0 >> 26);             g0 &= 0x3ffffff;
245 	g2 = h2 + (g1 >> 26);             g1 &= 0x3ffffff;
246 	g3 = h3 + (g2 >> 26);             g2 &= 0x3ffffff;
247 	g4 = h4 + (g3 >> 26) - (1 << 26); g3 &= 0x3ffffff;
248 
249 	/* select h if h < p, or h + -p if h >= p */
250 	mask = (g4 >> ((sizeof(u32) * 8) - 1)) - 1;
251 	g0 &= mask;
252 	g1 &= mask;
253 	g2 &= mask;
254 	g3 &= mask;
255 	g4 &= mask;
256 	mask = ~mask;
257 	h0 = (h0 & mask) | g0;
258 	h1 = (h1 & mask) | g1;
259 	h2 = (h2 & mask) | g2;
260 	h3 = (h3 & mask) | g3;
261 	h4 = (h4 & mask) | g4;
262 
263 	/* h = h % (2^128) */
264 	h0 = (h0 >>  0) | (h1 << 26);
265 	h1 = (h1 >>  6) | (h2 << 20);
266 	h2 = (h2 >> 12) | (h3 << 14);
267 	h3 = (h3 >> 18) | (h4 <<  8);
268 
269 	/* mac = (h + s) % (2^128) */
270 	f = (f >> 32) + h0 + dctx->s[0]; mac[0] = cpu_to_le32(f);
271 	f = (f >> 32) + h1 + dctx->s[1]; mac[1] = cpu_to_le32(f);
272 	f = (f >> 32) + h2 + dctx->s[2]; mac[2] = cpu_to_le32(f);
273 	f = (f >> 32) + h3 + dctx->s[3]; mac[3] = cpu_to_le32(f);
274 
275 	return 0;
276 }
277 EXPORT_SYMBOL_GPL(crypto_poly1305_final);
278 
279 static struct shash_alg poly1305_alg = {
280 	.digestsize	= POLY1305_DIGEST_SIZE,
281 	.init		= crypto_poly1305_init,
282 	.update		= crypto_poly1305_update,
283 	.final		= crypto_poly1305_final,
284 	.setkey		= crypto_poly1305_setkey,
285 	.descsize	= sizeof(struct poly1305_desc_ctx),
286 	.base		= {
287 		.cra_name		= "poly1305",
288 		.cra_driver_name	= "poly1305-generic",
289 		.cra_priority		= 100,
290 		.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
291 		.cra_alignmask		= sizeof(u32) - 1,
292 		.cra_blocksize		= POLY1305_BLOCK_SIZE,
293 		.cra_module		= THIS_MODULE,
294 	},
295 };
296 
297 static int __init poly1305_mod_init(void)
298 {
299 	return crypto_register_shash(&poly1305_alg);
300 }
301 
302 static void __exit poly1305_mod_exit(void)
303 {
304 	crypto_unregister_shash(&poly1305_alg);
305 }
306 
307 module_init(poly1305_mod_init);
308 module_exit(poly1305_mod_exit);
309 
310 MODULE_LICENSE("GPL");
311 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
312 MODULE_DESCRIPTION("Poly1305 authenticator");
313 MODULE_ALIAS_CRYPTO("poly1305");
314 MODULE_ALIAS_CRYPTO("poly1305-generic");
315