1 /*
2  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
3  *
4  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/simd.h>
13 #include <asm/unaligned.h>
14 #include <crypto/aes.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/module.h>
19 
20 #include "aes-ce-setkey.h"
21 
22 static int num_rounds(struct crypto_aes_ctx *ctx)
23 {
24 	/*
25 	 * # of rounds specified by AES:
26 	 * 128 bit key		10 rounds
27 	 * 192 bit key		12 rounds
28 	 * 256 bit key		14 rounds
29 	 * => n byte key	=> 6 + (n/4) rounds
30 	 */
31 	return 6 + ctx->key_length / 4;
32 }
33 
34 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
35 				     u32 *macp, u32 const rk[], u32 rounds);
36 
37 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
38 				   u32 const rk[], u32 rounds, u8 mac[],
39 				   u8 ctr[]);
40 
41 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
42 				   u32 const rk[], u32 rounds, u8 mac[],
43 				   u8 ctr[]);
44 
45 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
46 				 u32 rounds);
47 
48 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
49 
50 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
51 		      unsigned int key_len)
52 {
53 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
54 	int ret;
55 
56 	ret = ce_aes_expandkey(ctx, in_key, key_len);
57 	if (!ret)
58 		return 0;
59 
60 	tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
61 	return -EINVAL;
62 }
63 
64 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
65 {
66 	if ((authsize & 1) || authsize < 4)
67 		return -EINVAL;
68 	return 0;
69 }
70 
71 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
72 {
73 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
74 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
75 	u32 l = req->iv[0] + 1;
76 
77 	/* verify that CCM dimension 'L' is set correctly in the IV */
78 	if (l < 2 || l > 8)
79 		return -EINVAL;
80 
81 	/* verify that msglen can in fact be represented in L bytes */
82 	if (l < 4 && msglen >> (8 * l))
83 		return -EOVERFLOW;
84 
85 	/*
86 	 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
87 	 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
88 	 */
89 	n[0] = 0;
90 	n[1] = cpu_to_be32(msglen);
91 
92 	memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
93 
94 	/*
95 	 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
96 	 * - bits 0..2	: max # of bytes required to represent msglen, minus 1
97 	 *                (already set by caller)
98 	 * - bits 3..5	: size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
99 	 * - bit 6	: indicates presence of authenticate-only data
100 	 */
101 	maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
102 	if (req->assoclen)
103 		maciv[0] |= 0x40;
104 
105 	memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
106 	return 0;
107 }
108 
109 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
110 			   u32 abytes, u32 *macp)
111 {
112 	if (may_use_simd()) {
113 		kernel_neon_begin();
114 		ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
115 				     num_rounds(key));
116 		kernel_neon_end();
117 	} else {
118 		if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
119 			int added = min(abytes, AES_BLOCK_SIZE - *macp);
120 
121 			crypto_xor(&mac[*macp], in, added);
122 
123 			*macp += added;
124 			in += added;
125 			abytes -= added;
126 		}
127 
128 		while (abytes > AES_BLOCK_SIZE) {
129 			__aes_arm64_encrypt(key->key_enc, mac, mac,
130 					    num_rounds(key));
131 			crypto_xor(mac, in, AES_BLOCK_SIZE);
132 
133 			in += AES_BLOCK_SIZE;
134 			abytes -= AES_BLOCK_SIZE;
135 		}
136 
137 		if (abytes > 0) {
138 			__aes_arm64_encrypt(key->key_enc, mac, mac,
139 					    num_rounds(key));
140 			crypto_xor(mac, in, abytes);
141 			*macp = abytes;
142 		} else {
143 			*macp = 0;
144 		}
145 	}
146 }
147 
148 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
149 {
150 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
151 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
152 	struct __packed { __be16 l; __be32 h; u16 len; } ltag;
153 	struct scatter_walk walk;
154 	u32 len = req->assoclen;
155 	u32 macp = 0;
156 
157 	/* prepend the AAD with a length tag */
158 	if (len < 0xff00) {
159 		ltag.l = cpu_to_be16(len);
160 		ltag.len = 2;
161 	} else  {
162 		ltag.l = cpu_to_be16(0xfffe);
163 		put_unaligned_be32(len, &ltag.h);
164 		ltag.len = 6;
165 	}
166 
167 	ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
168 	scatterwalk_start(&walk, req->src);
169 
170 	do {
171 		u32 n = scatterwalk_clamp(&walk, len);
172 		u8 *p;
173 
174 		if (!n) {
175 			scatterwalk_start(&walk, sg_next(walk.sg));
176 			n = scatterwalk_clamp(&walk, len);
177 		}
178 		p = scatterwalk_map(&walk);
179 		ccm_update_mac(ctx, mac, p, n, &macp);
180 		len -= n;
181 
182 		scatterwalk_unmap(p);
183 		scatterwalk_advance(&walk, n);
184 		scatterwalk_done(&walk, 0, len);
185 	} while (len);
186 }
187 
188 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
189 			      struct crypto_aes_ctx *ctx, bool enc)
190 {
191 	u8 buf[AES_BLOCK_SIZE];
192 	int err = 0;
193 
194 	while (walk->nbytes) {
195 		int blocks = walk->nbytes / AES_BLOCK_SIZE;
196 		u32 tail = walk->nbytes % AES_BLOCK_SIZE;
197 		u8 *dst = walk->dst.virt.addr;
198 		u8 *src = walk->src.virt.addr;
199 		u32 nbytes = walk->nbytes;
200 
201 		if (nbytes == walk->total && tail > 0) {
202 			blocks++;
203 			tail = 0;
204 		}
205 
206 		do {
207 			u32 bsize = AES_BLOCK_SIZE;
208 
209 			if (nbytes < AES_BLOCK_SIZE)
210 				bsize = nbytes;
211 
212 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
213 			__aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
214 					    num_rounds(ctx));
215 			__aes_arm64_encrypt(ctx->key_enc, mac, mac,
216 					    num_rounds(ctx));
217 			if (enc)
218 				crypto_xor(mac, src, bsize);
219 			crypto_xor_cpy(dst, src, buf, bsize);
220 			if (!enc)
221 				crypto_xor(mac, dst, bsize);
222 			dst += bsize;
223 			src += bsize;
224 			nbytes -= bsize;
225 		} while (--blocks);
226 
227 		err = skcipher_walk_done(walk, tail);
228 	}
229 
230 	if (!err) {
231 		__aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
232 		__aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
233 		crypto_xor(mac, buf, AES_BLOCK_SIZE);
234 	}
235 	return err;
236 }
237 
238 static int ccm_encrypt(struct aead_request *req)
239 {
240 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
241 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
242 	struct skcipher_walk walk;
243 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
244 	u8 buf[AES_BLOCK_SIZE];
245 	u32 len = req->cryptlen;
246 	int err;
247 
248 	err = ccm_init_mac(req, mac, len);
249 	if (err)
250 		return err;
251 
252 	if (req->assoclen)
253 		ccm_calculate_auth_mac(req, mac);
254 
255 	/* preserve the original iv for the final round */
256 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
257 
258 	err = skcipher_walk_aead_encrypt(&walk, req, true);
259 
260 	if (may_use_simd()) {
261 		while (walk.nbytes) {
262 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
263 
264 			if (walk.nbytes == walk.total)
265 				tail = 0;
266 
267 			kernel_neon_begin();
268 			ce_aes_ccm_encrypt(walk.dst.virt.addr,
269 					   walk.src.virt.addr,
270 					   walk.nbytes - tail, ctx->key_enc,
271 					   num_rounds(ctx), mac, walk.iv);
272 			kernel_neon_end();
273 
274 			err = skcipher_walk_done(&walk, tail);
275 		}
276 		if (!err) {
277 			kernel_neon_begin();
278 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
279 					 num_rounds(ctx));
280 			kernel_neon_end();
281 		}
282 	} else {
283 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
284 	}
285 	if (err)
286 		return err;
287 
288 	/* copy authtag to end of dst */
289 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
290 				 crypto_aead_authsize(aead), 1);
291 
292 	return 0;
293 }
294 
295 static int ccm_decrypt(struct aead_request *req)
296 {
297 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
298 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
299 	unsigned int authsize = crypto_aead_authsize(aead);
300 	struct skcipher_walk walk;
301 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
302 	u8 buf[AES_BLOCK_SIZE];
303 	u32 len = req->cryptlen - authsize;
304 	int err;
305 
306 	err = ccm_init_mac(req, mac, len);
307 	if (err)
308 		return err;
309 
310 	if (req->assoclen)
311 		ccm_calculate_auth_mac(req, mac);
312 
313 	/* preserve the original iv for the final round */
314 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
315 
316 	err = skcipher_walk_aead_decrypt(&walk, req, true);
317 
318 	if (may_use_simd()) {
319 		while (walk.nbytes) {
320 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
321 
322 			if (walk.nbytes == walk.total)
323 				tail = 0;
324 
325 			kernel_neon_begin();
326 			ce_aes_ccm_decrypt(walk.dst.virt.addr,
327 					   walk.src.virt.addr,
328 					   walk.nbytes - tail, ctx->key_enc,
329 					   num_rounds(ctx), mac, walk.iv);
330 			kernel_neon_end();
331 
332 			err = skcipher_walk_done(&walk, tail);
333 		}
334 		if (!err) {
335 			kernel_neon_begin();
336 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
337 					 num_rounds(ctx));
338 			kernel_neon_end();
339 		}
340 	} else {
341 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
342 	}
343 
344 	if (err)
345 		return err;
346 
347 	/* compare calculated auth tag with the stored one */
348 	scatterwalk_map_and_copy(buf, req->src,
349 				 req->assoclen + req->cryptlen - authsize,
350 				 authsize, 0);
351 
352 	if (crypto_memneq(mac, buf, authsize))
353 		return -EBADMSG;
354 	return 0;
355 }
356 
357 static struct aead_alg ccm_aes_alg = {
358 	.base = {
359 		.cra_name		= "ccm(aes)",
360 		.cra_driver_name	= "ccm-aes-ce",
361 		.cra_priority		= 300,
362 		.cra_blocksize		= 1,
363 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
364 		.cra_module		= THIS_MODULE,
365 	},
366 	.ivsize		= AES_BLOCK_SIZE,
367 	.chunksize	= AES_BLOCK_SIZE,
368 	.maxauthsize	= AES_BLOCK_SIZE,
369 	.setkey		= ccm_setkey,
370 	.setauthsize	= ccm_setauthsize,
371 	.encrypt	= ccm_encrypt,
372 	.decrypt	= ccm_decrypt,
373 };
374 
375 static int __init aes_mod_init(void)
376 {
377 	if (!(elf_hwcap & HWCAP_AES))
378 		return -ENODEV;
379 	return crypto_register_aead(&ccm_aes_alg);
380 }
381 
382 static void __exit aes_mod_exit(void)
383 {
384 	crypto_unregister_aead(&ccm_aes_alg);
385 }
386 
387 module_init(aes_mod_init);
388 module_exit(aes_mod_exit);
389 
390 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
391 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
392 MODULE_LICENSE("GPL v2");
393 MODULE_ALIAS_CRYPTO("ccm(aes)");
394