1 /*
2  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
3  *
4  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/simd.h>
13 #include <asm/unaligned.h>
14 #include <crypto/aes.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/internal/skcipher.h>
19 #include <linux/module.h>
20 
21 #include "aes-ce-setkey.h"
22 
23 static int num_rounds(struct crypto_aes_ctx *ctx)
24 {
25 	/*
26 	 * # of rounds specified by AES:
27 	 * 128 bit key		10 rounds
28 	 * 192 bit key		12 rounds
29 	 * 256 bit key		14 rounds
30 	 * => n byte key	=> 6 + (n/4) rounds
31 	 */
32 	return 6 + ctx->key_length / 4;
33 }
34 
35 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
36 				     u32 *macp, u32 const rk[], u32 rounds);
37 
38 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
39 				   u32 const rk[], u32 rounds, u8 mac[],
40 				   u8 ctr[]);
41 
42 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
43 				   u32 const rk[], u32 rounds, u8 mac[],
44 				   u8 ctr[]);
45 
46 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
47 				 u32 rounds);
48 
49 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
50 
51 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
52 		      unsigned int key_len)
53 {
54 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
55 	int ret;
56 
57 	ret = ce_aes_expandkey(ctx, in_key, key_len);
58 	if (!ret)
59 		return 0;
60 
61 	tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
62 	return -EINVAL;
63 }
64 
65 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
66 {
67 	if ((authsize & 1) || authsize < 4)
68 		return -EINVAL;
69 	return 0;
70 }
71 
72 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
73 {
74 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
75 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
76 	u32 l = req->iv[0] + 1;
77 
78 	/* verify that CCM dimension 'L' is set correctly in the IV */
79 	if (l < 2 || l > 8)
80 		return -EINVAL;
81 
82 	/* verify that msglen can in fact be represented in L bytes */
83 	if (l < 4 && msglen >> (8 * l))
84 		return -EOVERFLOW;
85 
86 	/*
87 	 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
88 	 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
89 	 */
90 	n[0] = 0;
91 	n[1] = cpu_to_be32(msglen);
92 
93 	memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
94 
95 	/*
96 	 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
97 	 * - bits 0..2	: max # of bytes required to represent msglen, minus 1
98 	 *                (already set by caller)
99 	 * - bits 3..5	: size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
100 	 * - bit 6	: indicates presence of authenticate-only data
101 	 */
102 	maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
103 	if (req->assoclen)
104 		maciv[0] |= 0x40;
105 
106 	memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
107 	return 0;
108 }
109 
110 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
111 			   u32 abytes, u32 *macp)
112 {
113 	if (crypto_simd_usable()) {
114 		kernel_neon_begin();
115 		ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
116 				     num_rounds(key));
117 		kernel_neon_end();
118 	} else {
119 		if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
120 			int added = min(abytes, AES_BLOCK_SIZE - *macp);
121 
122 			crypto_xor(&mac[*macp], in, added);
123 
124 			*macp += added;
125 			in += added;
126 			abytes -= added;
127 		}
128 
129 		while (abytes >= AES_BLOCK_SIZE) {
130 			__aes_arm64_encrypt(key->key_enc, mac, mac,
131 					    num_rounds(key));
132 			crypto_xor(mac, in, AES_BLOCK_SIZE);
133 
134 			in += AES_BLOCK_SIZE;
135 			abytes -= AES_BLOCK_SIZE;
136 		}
137 
138 		if (abytes > 0) {
139 			__aes_arm64_encrypt(key->key_enc, mac, mac,
140 					    num_rounds(key));
141 			crypto_xor(mac, in, abytes);
142 			*macp = abytes;
143 		}
144 	}
145 }
146 
147 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
148 {
149 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
150 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
151 	struct __packed { __be16 l; __be32 h; u16 len; } ltag;
152 	struct scatter_walk walk;
153 	u32 len = req->assoclen;
154 	u32 macp = 0;
155 
156 	/* prepend the AAD with a length tag */
157 	if (len < 0xff00) {
158 		ltag.l = cpu_to_be16(len);
159 		ltag.len = 2;
160 	} else  {
161 		ltag.l = cpu_to_be16(0xfffe);
162 		put_unaligned_be32(len, &ltag.h);
163 		ltag.len = 6;
164 	}
165 
166 	ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
167 	scatterwalk_start(&walk, req->src);
168 
169 	do {
170 		u32 n = scatterwalk_clamp(&walk, len);
171 		u8 *p;
172 
173 		if (!n) {
174 			scatterwalk_start(&walk, sg_next(walk.sg));
175 			n = scatterwalk_clamp(&walk, len);
176 		}
177 		p = scatterwalk_map(&walk);
178 		ccm_update_mac(ctx, mac, p, n, &macp);
179 		len -= n;
180 
181 		scatterwalk_unmap(p);
182 		scatterwalk_advance(&walk, n);
183 		scatterwalk_done(&walk, 0, len);
184 	} while (len);
185 }
186 
187 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
188 			      struct crypto_aes_ctx *ctx, bool enc)
189 {
190 	u8 buf[AES_BLOCK_SIZE];
191 	int err = 0;
192 
193 	while (walk->nbytes) {
194 		int blocks = walk->nbytes / AES_BLOCK_SIZE;
195 		u32 tail = walk->nbytes % AES_BLOCK_SIZE;
196 		u8 *dst = walk->dst.virt.addr;
197 		u8 *src = walk->src.virt.addr;
198 		u32 nbytes = walk->nbytes;
199 
200 		if (nbytes == walk->total && tail > 0) {
201 			blocks++;
202 			tail = 0;
203 		}
204 
205 		do {
206 			u32 bsize = AES_BLOCK_SIZE;
207 
208 			if (nbytes < AES_BLOCK_SIZE)
209 				bsize = nbytes;
210 
211 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
212 			__aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
213 					    num_rounds(ctx));
214 			__aes_arm64_encrypt(ctx->key_enc, mac, mac,
215 					    num_rounds(ctx));
216 			if (enc)
217 				crypto_xor(mac, src, bsize);
218 			crypto_xor_cpy(dst, src, buf, bsize);
219 			if (!enc)
220 				crypto_xor(mac, dst, bsize);
221 			dst += bsize;
222 			src += bsize;
223 			nbytes -= bsize;
224 		} while (--blocks);
225 
226 		err = skcipher_walk_done(walk, tail);
227 	}
228 
229 	if (!err) {
230 		__aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
231 		__aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
232 		crypto_xor(mac, buf, AES_BLOCK_SIZE);
233 	}
234 	return err;
235 }
236 
237 static int ccm_encrypt(struct aead_request *req)
238 {
239 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
240 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
241 	struct skcipher_walk walk;
242 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
243 	u8 buf[AES_BLOCK_SIZE];
244 	u32 len = req->cryptlen;
245 	int err;
246 
247 	err = ccm_init_mac(req, mac, len);
248 	if (err)
249 		return err;
250 
251 	if (req->assoclen)
252 		ccm_calculate_auth_mac(req, mac);
253 
254 	/* preserve the original iv for the final round */
255 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
256 
257 	err = skcipher_walk_aead_encrypt(&walk, req, false);
258 
259 	if (crypto_simd_usable()) {
260 		while (walk.nbytes) {
261 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
262 
263 			if (walk.nbytes == walk.total)
264 				tail = 0;
265 
266 			kernel_neon_begin();
267 			ce_aes_ccm_encrypt(walk.dst.virt.addr,
268 					   walk.src.virt.addr,
269 					   walk.nbytes - tail, ctx->key_enc,
270 					   num_rounds(ctx), mac, walk.iv);
271 			kernel_neon_end();
272 
273 			err = skcipher_walk_done(&walk, tail);
274 		}
275 		if (!err) {
276 			kernel_neon_begin();
277 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
278 					 num_rounds(ctx));
279 			kernel_neon_end();
280 		}
281 	} else {
282 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
283 	}
284 	if (err)
285 		return err;
286 
287 	/* copy authtag to end of dst */
288 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
289 				 crypto_aead_authsize(aead), 1);
290 
291 	return 0;
292 }
293 
294 static int ccm_decrypt(struct aead_request *req)
295 {
296 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
297 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
298 	unsigned int authsize = crypto_aead_authsize(aead);
299 	struct skcipher_walk walk;
300 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
301 	u8 buf[AES_BLOCK_SIZE];
302 	u32 len = req->cryptlen - authsize;
303 	int err;
304 
305 	err = ccm_init_mac(req, mac, len);
306 	if (err)
307 		return err;
308 
309 	if (req->assoclen)
310 		ccm_calculate_auth_mac(req, mac);
311 
312 	/* preserve the original iv for the final round */
313 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
314 
315 	err = skcipher_walk_aead_decrypt(&walk, req, false);
316 
317 	if (crypto_simd_usable()) {
318 		while (walk.nbytes) {
319 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
320 
321 			if (walk.nbytes == walk.total)
322 				tail = 0;
323 
324 			kernel_neon_begin();
325 			ce_aes_ccm_decrypt(walk.dst.virt.addr,
326 					   walk.src.virt.addr,
327 					   walk.nbytes - tail, ctx->key_enc,
328 					   num_rounds(ctx), mac, walk.iv);
329 			kernel_neon_end();
330 
331 			err = skcipher_walk_done(&walk, tail);
332 		}
333 		if (!err) {
334 			kernel_neon_begin();
335 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
336 					 num_rounds(ctx));
337 			kernel_neon_end();
338 		}
339 	} else {
340 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
341 	}
342 
343 	if (err)
344 		return err;
345 
346 	/* compare calculated auth tag with the stored one */
347 	scatterwalk_map_and_copy(buf, req->src,
348 				 req->assoclen + req->cryptlen - authsize,
349 				 authsize, 0);
350 
351 	if (crypto_memneq(mac, buf, authsize))
352 		return -EBADMSG;
353 	return 0;
354 }
355 
356 static struct aead_alg ccm_aes_alg = {
357 	.base = {
358 		.cra_name		= "ccm(aes)",
359 		.cra_driver_name	= "ccm-aes-ce",
360 		.cra_priority		= 300,
361 		.cra_blocksize		= 1,
362 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
363 		.cra_module		= THIS_MODULE,
364 	},
365 	.ivsize		= AES_BLOCK_SIZE,
366 	.chunksize	= AES_BLOCK_SIZE,
367 	.maxauthsize	= AES_BLOCK_SIZE,
368 	.setkey		= ccm_setkey,
369 	.setauthsize	= ccm_setauthsize,
370 	.encrypt	= ccm_encrypt,
371 	.decrypt	= ccm_decrypt,
372 };
373 
374 static int __init aes_mod_init(void)
375 {
376 	if (!cpu_have_named_feature(AES))
377 		return -ENODEV;
378 	return crypto_register_aead(&ccm_aes_alg);
379 }
380 
381 static void __exit aes_mod_exit(void)
382 {
383 	crypto_unregister_aead(&ccm_aes_alg);
384 }
385 
386 module_init(aes_mod_init);
387 module_exit(aes_mod_exit);
388 
389 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
390 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
391 MODULE_LICENSE("GPL v2");
392 MODULE_ALIAS_CRYPTO("ccm(aes)");
393