xref: /openbmc/linux/arch/arm64/crypto/aes-glue.c (revision aac5987a)
1 /*
2  * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
3  *
4  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/hwcap.h>
13 #include <crypto/aes.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <linux/module.h>
18 #include <linux/cpufeature.h>
19 #include <crypto/xts.h>
20 
21 #include "aes-ce-setkey.h"
22 
23 #ifdef USE_V8_CRYPTO_EXTENSIONS
24 #define MODE			"ce"
25 #define PRIO			300
26 #define aes_setkey		ce_aes_setkey
27 #define aes_expandkey		ce_aes_expandkey
28 #define aes_ecb_encrypt		ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt		ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt		ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt		ce_aes_cbc_decrypt
32 #define aes_ctr_encrypt		ce_aes_ctr_encrypt
33 #define aes_xts_encrypt		ce_aes_xts_encrypt
34 #define aes_xts_decrypt		ce_aes_xts_decrypt
35 #define aes_mac_update		ce_aes_mac_update
36 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
37 #else
38 #define MODE			"neon"
39 #define PRIO			200
40 #define aes_setkey		crypto_aes_set_key
41 #define aes_expandkey		crypto_aes_expand_key
42 #define aes_ecb_encrypt		neon_aes_ecb_encrypt
43 #define aes_ecb_decrypt		neon_aes_ecb_decrypt
44 #define aes_cbc_encrypt		neon_aes_cbc_encrypt
45 #define aes_cbc_decrypt		neon_aes_cbc_decrypt
46 #define aes_ctr_encrypt		neon_aes_ctr_encrypt
47 #define aes_xts_encrypt		neon_aes_xts_encrypt
48 #define aes_xts_decrypt		neon_aes_xts_decrypt
49 #define aes_mac_update		neon_aes_mac_update
50 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
51 MODULE_ALIAS_CRYPTO("ecb(aes)");
52 MODULE_ALIAS_CRYPTO("cbc(aes)");
53 MODULE_ALIAS_CRYPTO("ctr(aes)");
54 MODULE_ALIAS_CRYPTO("xts(aes)");
55 MODULE_ALIAS_CRYPTO("cmac(aes)");
56 MODULE_ALIAS_CRYPTO("xcbc(aes)");
57 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
58 #endif
59 
60 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
61 MODULE_LICENSE("GPL v2");
62 
63 /* defined in aes-modes.S */
64 asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
65 				int rounds, int blocks, int first);
66 asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
67 				int rounds, int blocks, int first);
68 
69 asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
70 				int rounds, int blocks, u8 iv[], int first);
71 asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
72 				int rounds, int blocks, u8 iv[], int first);
73 
74 asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
75 				int rounds, int blocks, u8 ctr[], int first);
76 
77 asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
78 				int rounds, int blocks, u8 const rk2[], u8 iv[],
79 				int first);
80 asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
81 				int rounds, int blocks, u8 const rk2[], u8 iv[],
82 				int first);
83 
84 asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
85 			       int blocks, u8 dg[], int enc_before,
86 			       int enc_after);
87 
88 struct crypto_aes_xts_ctx {
89 	struct crypto_aes_ctx key1;
90 	struct crypto_aes_ctx __aligned(8) key2;
91 };
92 
93 struct mac_tfm_ctx {
94 	struct crypto_aes_ctx key;
95 	u8 __aligned(8) consts[];
96 };
97 
98 struct mac_desc_ctx {
99 	unsigned int len;
100 	u8 dg[AES_BLOCK_SIZE];
101 };
102 
103 static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
104 			       unsigned int key_len)
105 {
106 	return aes_setkey(crypto_skcipher_tfm(tfm), in_key, key_len);
107 }
108 
109 static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
110 		       unsigned int key_len)
111 {
112 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
113 	int ret;
114 
115 	ret = xts_verify_key(tfm, in_key, key_len);
116 	if (ret)
117 		return ret;
118 
119 	ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
120 	if (!ret)
121 		ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
122 				    key_len / 2);
123 	if (!ret)
124 		return 0;
125 
126 	crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
127 	return -EINVAL;
128 }
129 
130 static int ecb_encrypt(struct skcipher_request *req)
131 {
132 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
133 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
134 	int err, first, rounds = 6 + ctx->key_length / 4;
135 	struct skcipher_walk walk;
136 	unsigned int blocks;
137 
138 	err = skcipher_walk_virt(&walk, req, true);
139 
140 	kernel_neon_begin();
141 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
142 		aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
143 				(u8 *)ctx->key_enc, rounds, blocks, first);
144 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
145 	}
146 	kernel_neon_end();
147 	return err;
148 }
149 
150 static int ecb_decrypt(struct skcipher_request *req)
151 {
152 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
153 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
154 	int err, first, rounds = 6 + ctx->key_length / 4;
155 	struct skcipher_walk walk;
156 	unsigned int blocks;
157 
158 	err = skcipher_walk_virt(&walk, req, true);
159 
160 	kernel_neon_begin();
161 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
162 		aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
163 				(u8 *)ctx->key_dec, rounds, blocks, first);
164 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
165 	}
166 	kernel_neon_end();
167 	return err;
168 }
169 
170 static int cbc_encrypt(struct skcipher_request *req)
171 {
172 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
174 	int err, first, rounds = 6 + ctx->key_length / 4;
175 	struct skcipher_walk walk;
176 	unsigned int blocks;
177 
178 	err = skcipher_walk_virt(&walk, req, true);
179 
180 	kernel_neon_begin();
181 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
182 		aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
183 				(u8 *)ctx->key_enc, rounds, blocks, walk.iv,
184 				first);
185 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
186 	}
187 	kernel_neon_end();
188 	return err;
189 }
190 
191 static int cbc_decrypt(struct skcipher_request *req)
192 {
193 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
194 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
195 	int err, first, rounds = 6 + ctx->key_length / 4;
196 	struct skcipher_walk walk;
197 	unsigned int blocks;
198 
199 	err = skcipher_walk_virt(&walk, req, true);
200 
201 	kernel_neon_begin();
202 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
203 		aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
204 				(u8 *)ctx->key_dec, rounds, blocks, walk.iv,
205 				first);
206 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
207 	}
208 	kernel_neon_end();
209 	return err;
210 }
211 
212 static int ctr_encrypt(struct skcipher_request *req)
213 {
214 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215 	struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
216 	int err, first, rounds = 6 + ctx->key_length / 4;
217 	struct skcipher_walk walk;
218 	int blocks;
219 
220 	err = skcipher_walk_virt(&walk, req, true);
221 
222 	first = 1;
223 	kernel_neon_begin();
224 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
225 		aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
226 				(u8 *)ctx->key_enc, rounds, blocks, walk.iv,
227 				first);
228 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
229 		first = 0;
230 	}
231 	if (walk.nbytes) {
232 		u8 __aligned(8) tail[AES_BLOCK_SIZE];
233 		unsigned int nbytes = walk.nbytes;
234 		u8 *tdst = walk.dst.virt.addr;
235 		u8 *tsrc = walk.src.virt.addr;
236 
237 		/*
238 		 * Tell aes_ctr_encrypt() to process a tail block.
239 		 */
240 		blocks = -1;
241 
242 		aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
243 				blocks, walk.iv, first);
244 		if (tdst != tsrc)
245 			memcpy(tdst, tsrc, nbytes);
246 		crypto_xor(tdst, tail, nbytes);
247 		err = skcipher_walk_done(&walk, 0);
248 	}
249 	kernel_neon_end();
250 
251 	return err;
252 }
253 
254 static int xts_encrypt(struct skcipher_request *req)
255 {
256 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
257 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
258 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
259 	struct skcipher_walk walk;
260 	unsigned int blocks;
261 
262 	err = skcipher_walk_virt(&walk, req, true);
263 
264 	kernel_neon_begin();
265 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
266 		aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
267 				(u8 *)ctx->key1.key_enc, rounds, blocks,
268 				(u8 *)ctx->key2.key_enc, walk.iv, first);
269 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
270 	}
271 	kernel_neon_end();
272 
273 	return err;
274 }
275 
276 static int xts_decrypt(struct skcipher_request *req)
277 {
278 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
279 	struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
280 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
281 	struct skcipher_walk walk;
282 	unsigned int blocks;
283 
284 	err = skcipher_walk_virt(&walk, req, true);
285 
286 	kernel_neon_begin();
287 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
288 		aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
289 				(u8 *)ctx->key1.key_dec, rounds, blocks,
290 				(u8 *)ctx->key2.key_enc, walk.iv, first);
291 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
292 	}
293 	kernel_neon_end();
294 
295 	return err;
296 }
297 
298 static struct skcipher_alg aes_algs[] = { {
299 	.base = {
300 		.cra_name		= "__ecb(aes)",
301 		.cra_driver_name	= "__ecb-aes-" MODE,
302 		.cra_priority		= PRIO,
303 		.cra_flags		= CRYPTO_ALG_INTERNAL,
304 		.cra_blocksize		= AES_BLOCK_SIZE,
305 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
306 		.cra_module		= THIS_MODULE,
307 	},
308 	.min_keysize	= AES_MIN_KEY_SIZE,
309 	.max_keysize	= AES_MAX_KEY_SIZE,
310 	.setkey		= skcipher_aes_setkey,
311 	.encrypt	= ecb_encrypt,
312 	.decrypt	= ecb_decrypt,
313 }, {
314 	.base = {
315 		.cra_name		= "__cbc(aes)",
316 		.cra_driver_name	= "__cbc-aes-" MODE,
317 		.cra_priority		= PRIO,
318 		.cra_flags		= CRYPTO_ALG_INTERNAL,
319 		.cra_blocksize		= AES_BLOCK_SIZE,
320 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
321 		.cra_module		= THIS_MODULE,
322 	},
323 	.min_keysize	= AES_MIN_KEY_SIZE,
324 	.max_keysize	= AES_MAX_KEY_SIZE,
325 	.ivsize		= AES_BLOCK_SIZE,
326 	.setkey		= skcipher_aes_setkey,
327 	.encrypt	= cbc_encrypt,
328 	.decrypt	= cbc_decrypt,
329 }, {
330 	.base = {
331 		.cra_name		= "__ctr(aes)",
332 		.cra_driver_name	= "__ctr-aes-" MODE,
333 		.cra_priority		= PRIO,
334 		.cra_flags		= CRYPTO_ALG_INTERNAL,
335 		.cra_blocksize		= 1,
336 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
337 		.cra_module		= THIS_MODULE,
338 	},
339 	.min_keysize	= AES_MIN_KEY_SIZE,
340 	.max_keysize	= AES_MAX_KEY_SIZE,
341 	.ivsize		= AES_BLOCK_SIZE,
342 	.chunksize	= AES_BLOCK_SIZE,
343 	.setkey		= skcipher_aes_setkey,
344 	.encrypt	= ctr_encrypt,
345 	.decrypt	= ctr_encrypt,
346 }, {
347 	.base = {
348 		.cra_name		= "ctr(aes)",
349 		.cra_driver_name	= "ctr-aes-" MODE,
350 		.cra_priority		= PRIO - 1,
351 		.cra_blocksize		= 1,
352 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
353 		.cra_module		= THIS_MODULE,
354 	},
355 	.min_keysize	= AES_MIN_KEY_SIZE,
356 	.max_keysize	= AES_MAX_KEY_SIZE,
357 	.ivsize		= AES_BLOCK_SIZE,
358 	.chunksize	= AES_BLOCK_SIZE,
359 	.setkey		= skcipher_aes_setkey,
360 	.encrypt	= ctr_encrypt,
361 	.decrypt	= ctr_encrypt,
362 }, {
363 	.base = {
364 		.cra_name		= "__xts(aes)",
365 		.cra_driver_name	= "__xts-aes-" MODE,
366 		.cra_priority		= PRIO,
367 		.cra_flags		= CRYPTO_ALG_INTERNAL,
368 		.cra_blocksize		= AES_BLOCK_SIZE,
369 		.cra_ctxsize		= sizeof(struct crypto_aes_xts_ctx),
370 		.cra_module		= THIS_MODULE,
371 	},
372 	.min_keysize	= 2 * AES_MIN_KEY_SIZE,
373 	.max_keysize	= 2 * AES_MAX_KEY_SIZE,
374 	.ivsize		= AES_BLOCK_SIZE,
375 	.setkey		= xts_set_key,
376 	.encrypt	= xts_encrypt,
377 	.decrypt	= xts_decrypt,
378 } };
379 
380 static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
381 			 unsigned int key_len)
382 {
383 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
384 	int err;
385 
386 	err = aes_expandkey(&ctx->key, in_key, key_len);
387 	if (err)
388 		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
389 
390 	return err;
391 }
392 
393 static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
394 {
395 	u64 a = be64_to_cpu(x->a);
396 	u64 b = be64_to_cpu(x->b);
397 
398 	y->a = cpu_to_be64((a << 1) | (b >> 63));
399 	y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
400 }
401 
402 static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
403 		       unsigned int key_len)
404 {
405 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
406 	be128 *consts = (be128 *)ctx->consts;
407 	u8 *rk = (u8 *)ctx->key.key_enc;
408 	int rounds = 6 + key_len / 4;
409 	int err;
410 
411 	err = cbcmac_setkey(tfm, in_key, key_len);
412 	if (err)
413 		return err;
414 
415 	/* encrypt the zero vector */
416 	kernel_neon_begin();
417 	aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1, 1);
418 	kernel_neon_end();
419 
420 	cmac_gf128_mul_by_x(consts, consts);
421 	cmac_gf128_mul_by_x(consts + 1, consts);
422 
423 	return 0;
424 }
425 
426 static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
427 		       unsigned int key_len)
428 {
429 	static u8 const ks[3][AES_BLOCK_SIZE] = {
430 		{ [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
431 		{ [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
432 		{ [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
433 	};
434 
435 	struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
436 	u8 *rk = (u8 *)ctx->key.key_enc;
437 	int rounds = 6 + key_len / 4;
438 	u8 key[AES_BLOCK_SIZE];
439 	int err;
440 
441 	err = cbcmac_setkey(tfm, in_key, key_len);
442 	if (err)
443 		return err;
444 
445 	kernel_neon_begin();
446 	aes_ecb_encrypt(key, ks[0], rk, rounds, 1, 1);
447 	aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2, 0);
448 	kernel_neon_end();
449 
450 	return cbcmac_setkey(tfm, key, sizeof(key));
451 }
452 
453 static int mac_init(struct shash_desc *desc)
454 {
455 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
456 
457 	memset(ctx->dg, 0, AES_BLOCK_SIZE);
458 	ctx->len = 0;
459 
460 	return 0;
461 }
462 
463 static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
464 {
465 	struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
466 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
467 	int rounds = 6 + tctx->key.key_length / 4;
468 
469 	while (len > 0) {
470 		unsigned int l;
471 
472 		if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
473 		    (ctx->len + len) > AES_BLOCK_SIZE) {
474 
475 			int blocks = len / AES_BLOCK_SIZE;
476 
477 			len %= AES_BLOCK_SIZE;
478 
479 			kernel_neon_begin();
480 			aes_mac_update(p, tctx->key.key_enc, rounds, blocks,
481 				       ctx->dg, (ctx->len != 0), (len != 0));
482 			kernel_neon_end();
483 
484 			p += blocks * AES_BLOCK_SIZE;
485 
486 			if (!len) {
487 				ctx->len = AES_BLOCK_SIZE;
488 				break;
489 			}
490 			ctx->len = 0;
491 		}
492 
493 		l = min(len, AES_BLOCK_SIZE - ctx->len);
494 
495 		if (l <= AES_BLOCK_SIZE) {
496 			crypto_xor(ctx->dg + ctx->len, p, l);
497 			ctx->len += l;
498 			len -= l;
499 			p += l;
500 		}
501 	}
502 
503 	return 0;
504 }
505 
506 static int cbcmac_final(struct shash_desc *desc, u8 *out)
507 {
508 	struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
509 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
510 	int rounds = 6 + tctx->key.key_length / 4;
511 
512 	kernel_neon_begin();
513 	aes_mac_update(NULL, tctx->key.key_enc, rounds, 0, ctx->dg, 1, 0);
514 	kernel_neon_end();
515 
516 	memcpy(out, ctx->dg, AES_BLOCK_SIZE);
517 
518 	return 0;
519 }
520 
521 static int cmac_final(struct shash_desc *desc, u8 *out)
522 {
523 	struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
524 	struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
525 	int rounds = 6 + tctx->key.key_length / 4;
526 	u8 *consts = tctx->consts;
527 
528 	if (ctx->len != AES_BLOCK_SIZE) {
529 		ctx->dg[ctx->len] ^= 0x80;
530 		consts += AES_BLOCK_SIZE;
531 	}
532 
533 	kernel_neon_begin();
534 	aes_mac_update(consts, tctx->key.key_enc, rounds, 1, ctx->dg, 0, 1);
535 	kernel_neon_end();
536 
537 	memcpy(out, ctx->dg, AES_BLOCK_SIZE);
538 
539 	return 0;
540 }
541 
542 static struct shash_alg mac_algs[] = { {
543 	.base.cra_name		= "cmac(aes)",
544 	.base.cra_driver_name	= "cmac-aes-" MODE,
545 	.base.cra_priority	= PRIO,
546 	.base.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
547 	.base.cra_blocksize	= AES_BLOCK_SIZE,
548 	.base.cra_ctxsize	= sizeof(struct mac_tfm_ctx) +
549 				  2 * AES_BLOCK_SIZE,
550 	.base.cra_module	= THIS_MODULE,
551 
552 	.digestsize		= AES_BLOCK_SIZE,
553 	.init			= mac_init,
554 	.update			= mac_update,
555 	.final			= cmac_final,
556 	.setkey			= cmac_setkey,
557 	.descsize		= sizeof(struct mac_desc_ctx),
558 }, {
559 	.base.cra_name		= "xcbc(aes)",
560 	.base.cra_driver_name	= "xcbc-aes-" MODE,
561 	.base.cra_priority	= PRIO,
562 	.base.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
563 	.base.cra_blocksize	= AES_BLOCK_SIZE,
564 	.base.cra_ctxsize	= sizeof(struct mac_tfm_ctx) +
565 				  2 * AES_BLOCK_SIZE,
566 	.base.cra_module	= THIS_MODULE,
567 
568 	.digestsize		= AES_BLOCK_SIZE,
569 	.init			= mac_init,
570 	.update			= mac_update,
571 	.final			= cmac_final,
572 	.setkey			= xcbc_setkey,
573 	.descsize		= sizeof(struct mac_desc_ctx),
574 }, {
575 	.base.cra_name		= "cbcmac(aes)",
576 	.base.cra_driver_name	= "cbcmac-aes-" MODE,
577 	.base.cra_priority	= PRIO,
578 	.base.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
579 	.base.cra_blocksize	= 1,
580 	.base.cra_ctxsize	= sizeof(struct mac_tfm_ctx),
581 	.base.cra_module	= THIS_MODULE,
582 
583 	.digestsize		= AES_BLOCK_SIZE,
584 	.init			= mac_init,
585 	.update			= mac_update,
586 	.final			= cbcmac_final,
587 	.setkey			= cbcmac_setkey,
588 	.descsize		= sizeof(struct mac_desc_ctx),
589 } };
590 
591 static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
592 
593 static void aes_exit(void)
594 {
595 	int i;
596 
597 	for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
598 		if (aes_simd_algs[i])
599 			simd_skcipher_free(aes_simd_algs[i]);
600 
601 	crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
602 	crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
603 }
604 
605 static int __init aes_init(void)
606 {
607 	struct simd_skcipher_alg *simd;
608 	const char *basename;
609 	const char *algname;
610 	const char *drvname;
611 	int err;
612 	int i;
613 
614 	err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
615 	if (err)
616 		return err;
617 
618 	err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
619 	if (err)
620 		goto unregister_ciphers;
621 
622 	for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
623 		if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
624 			continue;
625 
626 		algname = aes_algs[i].base.cra_name + 2;
627 		drvname = aes_algs[i].base.cra_driver_name + 2;
628 		basename = aes_algs[i].base.cra_driver_name;
629 		simd = simd_skcipher_create_compat(algname, drvname, basename);
630 		err = PTR_ERR(simd);
631 		if (IS_ERR(simd))
632 			goto unregister_simds;
633 
634 		aes_simd_algs[i] = simd;
635 	}
636 
637 	return 0;
638 
639 unregister_simds:
640 	aes_exit();
641 unregister_ciphers:
642 	crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
643 	return err;
644 }
645 
646 #ifdef USE_V8_CRYPTO_EXTENSIONS
647 module_cpu_feature_match(AES, aes_init);
648 #else
649 module_init(aes_init);
650 EXPORT_SYMBOL(neon_aes_ecb_encrypt);
651 EXPORT_SYMBOL(neon_aes_cbc_encrypt);
652 #endif
653 module_exit(aes_exit);
654