xref: /openbmc/linux/arch/arm64/crypto/aes-glue.c (revision d2999e1b)
1 /*
2  * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
3  *
4  * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/hwcap.h>
13 #include <crypto/aes.h>
14 #include <crypto/ablk_helper.h>
15 #include <crypto/algapi.h>
16 #include <linux/module.h>
17 #include <linux/cpufeature.h>
18 
19 #ifdef USE_V8_CRYPTO_EXTENSIONS
20 #define MODE			"ce"
21 #define PRIO			300
22 #define aes_ecb_encrypt		ce_aes_ecb_encrypt
23 #define aes_ecb_decrypt		ce_aes_ecb_decrypt
24 #define aes_cbc_encrypt		ce_aes_cbc_encrypt
25 #define aes_cbc_decrypt		ce_aes_cbc_decrypt
26 #define aes_ctr_encrypt		ce_aes_ctr_encrypt
27 #define aes_xts_encrypt		ce_aes_xts_encrypt
28 #define aes_xts_decrypt		ce_aes_xts_decrypt
29 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
30 #else
31 #define MODE			"neon"
32 #define PRIO			200
33 #define aes_ecb_encrypt		neon_aes_ecb_encrypt
34 #define aes_ecb_decrypt		neon_aes_ecb_decrypt
35 #define aes_cbc_encrypt		neon_aes_cbc_encrypt
36 #define aes_cbc_decrypt		neon_aes_cbc_decrypt
37 #define aes_ctr_encrypt		neon_aes_ctr_encrypt
38 #define aes_xts_encrypt		neon_aes_xts_encrypt
39 #define aes_xts_decrypt		neon_aes_xts_decrypt
40 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
41 MODULE_ALIAS("ecb(aes)");
42 MODULE_ALIAS("cbc(aes)");
43 MODULE_ALIAS("ctr(aes)");
44 MODULE_ALIAS("xts(aes)");
45 #endif
46 
47 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
48 MODULE_LICENSE("GPL v2");
49 
50 /* defined in aes-modes.S */
51 asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
52 				int rounds, int blocks, int first);
53 asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
54 				int rounds, int blocks, int first);
55 
56 asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
57 				int rounds, int blocks, u8 iv[], int first);
58 asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
59 				int rounds, int blocks, u8 iv[], int first);
60 
61 asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
62 				int rounds, int blocks, u8 ctr[], int first);
63 
64 asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
65 				int rounds, int blocks, u8 const rk2[], u8 iv[],
66 				int first);
67 asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
68 				int rounds, int blocks, u8 const rk2[], u8 iv[],
69 				int first);
70 
71 struct crypto_aes_xts_ctx {
72 	struct crypto_aes_ctx key1;
73 	struct crypto_aes_ctx __aligned(8) key2;
74 };
75 
76 static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
77 		       unsigned int key_len)
78 {
79 	struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
80 	int ret;
81 
82 	ret = crypto_aes_expand_key(&ctx->key1, in_key, key_len / 2);
83 	if (!ret)
84 		ret = crypto_aes_expand_key(&ctx->key2, &in_key[key_len / 2],
85 					    key_len / 2);
86 	if (!ret)
87 		return 0;
88 
89 	tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
90 	return -EINVAL;
91 }
92 
93 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
94 		       struct scatterlist *src, unsigned int nbytes)
95 {
96 	struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
97 	int err, first, rounds = 6 + ctx->key_length / 4;
98 	struct blkcipher_walk walk;
99 	unsigned int blocks;
100 
101 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
102 	blkcipher_walk_init(&walk, dst, src, nbytes);
103 	err = blkcipher_walk_virt(desc, &walk);
104 
105 	kernel_neon_begin();
106 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
107 		aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
108 				(u8 *)ctx->key_enc, rounds, blocks, first);
109 		err = blkcipher_walk_done(desc, &walk, 0);
110 	}
111 	kernel_neon_end();
112 	return err;
113 }
114 
115 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
116 		       struct scatterlist *src, unsigned int nbytes)
117 {
118 	struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
119 	int err, first, rounds = 6 + ctx->key_length / 4;
120 	struct blkcipher_walk walk;
121 	unsigned int blocks;
122 
123 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
124 	blkcipher_walk_init(&walk, dst, src, nbytes);
125 	err = blkcipher_walk_virt(desc, &walk);
126 
127 	kernel_neon_begin();
128 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
129 		aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
130 				(u8 *)ctx->key_dec, rounds, blocks, first);
131 		err = blkcipher_walk_done(desc, &walk, 0);
132 	}
133 	kernel_neon_end();
134 	return err;
135 }
136 
137 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
138 		       struct scatterlist *src, unsigned int nbytes)
139 {
140 	struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
141 	int err, first, rounds = 6 + ctx->key_length / 4;
142 	struct blkcipher_walk walk;
143 	unsigned int blocks;
144 
145 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
146 	blkcipher_walk_init(&walk, dst, src, nbytes);
147 	err = blkcipher_walk_virt(desc, &walk);
148 
149 	kernel_neon_begin();
150 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
151 		aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
152 				(u8 *)ctx->key_enc, rounds, blocks, walk.iv,
153 				first);
154 		err = blkcipher_walk_done(desc, &walk, 0);
155 	}
156 	kernel_neon_end();
157 	return err;
158 }
159 
160 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
161 		       struct scatterlist *src, unsigned int nbytes)
162 {
163 	struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
164 	int err, first, rounds = 6 + ctx->key_length / 4;
165 	struct blkcipher_walk walk;
166 	unsigned int blocks;
167 
168 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
169 	blkcipher_walk_init(&walk, dst, src, nbytes);
170 	err = blkcipher_walk_virt(desc, &walk);
171 
172 	kernel_neon_begin();
173 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
174 		aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
175 				(u8 *)ctx->key_dec, rounds, blocks, walk.iv,
176 				first);
177 		err = blkcipher_walk_done(desc, &walk, 0);
178 	}
179 	kernel_neon_end();
180 	return err;
181 }
182 
183 static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
184 		       struct scatterlist *src, unsigned int nbytes)
185 {
186 	struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
187 	int err, first, rounds = 6 + ctx->key_length / 4;
188 	struct blkcipher_walk walk;
189 	int blocks;
190 
191 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
192 	blkcipher_walk_init(&walk, dst, src, nbytes);
193 	err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
194 
195 	first = 1;
196 	kernel_neon_begin();
197 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
198 		aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
199 				(u8 *)ctx->key_enc, rounds, blocks, walk.iv,
200 				first);
201 		first = 0;
202 		nbytes -= blocks * AES_BLOCK_SIZE;
203 		if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
204 			break;
205 		err = blkcipher_walk_done(desc, &walk,
206 					  walk.nbytes % AES_BLOCK_SIZE);
207 	}
208 	if (nbytes) {
209 		u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
210 		u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
211 		u8 __aligned(8) tail[AES_BLOCK_SIZE];
212 
213 		/*
214 		 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
215 		 * to tell aes_ctr_encrypt() to only read half a block.
216 		 */
217 		blocks = (nbytes <= 8) ? -1 : 1;
218 
219 		aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc, rounds,
220 				blocks, walk.iv, first);
221 		memcpy(tdst, tail, nbytes);
222 		err = blkcipher_walk_done(desc, &walk, 0);
223 	}
224 	kernel_neon_end();
225 
226 	return err;
227 }
228 
229 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
230 		       struct scatterlist *src, unsigned int nbytes)
231 {
232 	struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
233 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
234 	struct blkcipher_walk walk;
235 	unsigned int blocks;
236 
237 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
238 	blkcipher_walk_init(&walk, dst, src, nbytes);
239 	err = blkcipher_walk_virt(desc, &walk);
240 
241 	kernel_neon_begin();
242 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
243 		aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
244 				(u8 *)ctx->key1.key_enc, rounds, blocks,
245 				(u8 *)ctx->key2.key_enc, walk.iv, first);
246 		err = blkcipher_walk_done(desc, &walk, 0);
247 	}
248 	kernel_neon_end();
249 
250 	return err;
251 }
252 
253 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
254 		       struct scatterlist *src, unsigned int nbytes)
255 {
256 	struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
257 	int err, first, rounds = 6 + ctx->key1.key_length / 4;
258 	struct blkcipher_walk walk;
259 	unsigned int blocks;
260 
261 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
262 	blkcipher_walk_init(&walk, dst, src, nbytes);
263 	err = blkcipher_walk_virt(desc, &walk);
264 
265 	kernel_neon_begin();
266 	for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
267 		aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
268 				(u8 *)ctx->key1.key_dec, rounds, blocks,
269 				(u8 *)ctx->key2.key_enc, walk.iv, first);
270 		err = blkcipher_walk_done(desc, &walk, 0);
271 	}
272 	kernel_neon_end();
273 
274 	return err;
275 }
276 
277 static struct crypto_alg aes_algs[] = { {
278 	.cra_name		= "__ecb-aes-" MODE,
279 	.cra_driver_name	= "__driver-ecb-aes-" MODE,
280 	.cra_priority		= 0,
281 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
282 	.cra_blocksize		= AES_BLOCK_SIZE,
283 	.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
284 	.cra_alignmask		= 7,
285 	.cra_type		= &crypto_blkcipher_type,
286 	.cra_module		= THIS_MODULE,
287 	.cra_blkcipher = {
288 		.min_keysize	= AES_MIN_KEY_SIZE,
289 		.max_keysize	= AES_MAX_KEY_SIZE,
290 		.ivsize		= AES_BLOCK_SIZE,
291 		.setkey		= crypto_aes_set_key,
292 		.encrypt	= ecb_encrypt,
293 		.decrypt	= ecb_decrypt,
294 	},
295 }, {
296 	.cra_name		= "__cbc-aes-" MODE,
297 	.cra_driver_name	= "__driver-cbc-aes-" MODE,
298 	.cra_priority		= 0,
299 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
300 	.cra_blocksize		= AES_BLOCK_SIZE,
301 	.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
302 	.cra_alignmask		= 7,
303 	.cra_type		= &crypto_blkcipher_type,
304 	.cra_module		= THIS_MODULE,
305 	.cra_blkcipher = {
306 		.min_keysize	= AES_MIN_KEY_SIZE,
307 		.max_keysize	= AES_MAX_KEY_SIZE,
308 		.ivsize		= AES_BLOCK_SIZE,
309 		.setkey		= crypto_aes_set_key,
310 		.encrypt	= cbc_encrypt,
311 		.decrypt	= cbc_decrypt,
312 	},
313 }, {
314 	.cra_name		= "__ctr-aes-" MODE,
315 	.cra_driver_name	= "__driver-ctr-aes-" MODE,
316 	.cra_priority		= 0,
317 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
318 	.cra_blocksize		= 1,
319 	.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
320 	.cra_alignmask		= 7,
321 	.cra_type		= &crypto_blkcipher_type,
322 	.cra_module		= THIS_MODULE,
323 	.cra_blkcipher = {
324 		.min_keysize	= AES_MIN_KEY_SIZE,
325 		.max_keysize	= AES_MAX_KEY_SIZE,
326 		.ivsize		= AES_BLOCK_SIZE,
327 		.setkey		= crypto_aes_set_key,
328 		.encrypt	= ctr_encrypt,
329 		.decrypt	= ctr_encrypt,
330 	},
331 }, {
332 	.cra_name		= "__xts-aes-" MODE,
333 	.cra_driver_name	= "__driver-xts-aes-" MODE,
334 	.cra_priority		= 0,
335 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
336 	.cra_blocksize		= AES_BLOCK_SIZE,
337 	.cra_ctxsize		= sizeof(struct crypto_aes_xts_ctx),
338 	.cra_alignmask		= 7,
339 	.cra_type		= &crypto_blkcipher_type,
340 	.cra_module		= THIS_MODULE,
341 	.cra_blkcipher = {
342 		.min_keysize	= 2 * AES_MIN_KEY_SIZE,
343 		.max_keysize	= 2 * AES_MAX_KEY_SIZE,
344 		.ivsize		= AES_BLOCK_SIZE,
345 		.setkey		= xts_set_key,
346 		.encrypt	= xts_encrypt,
347 		.decrypt	= xts_decrypt,
348 	},
349 }, {
350 	.cra_name		= "ecb(aes)",
351 	.cra_driver_name	= "ecb-aes-" MODE,
352 	.cra_priority		= PRIO,
353 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
354 	.cra_blocksize		= AES_BLOCK_SIZE,
355 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
356 	.cra_alignmask		= 7,
357 	.cra_type		= &crypto_ablkcipher_type,
358 	.cra_module		= THIS_MODULE,
359 	.cra_init		= ablk_init,
360 	.cra_exit		= ablk_exit,
361 	.cra_ablkcipher = {
362 		.min_keysize	= AES_MIN_KEY_SIZE,
363 		.max_keysize	= AES_MAX_KEY_SIZE,
364 		.ivsize		= AES_BLOCK_SIZE,
365 		.setkey		= ablk_set_key,
366 		.encrypt	= ablk_encrypt,
367 		.decrypt	= ablk_decrypt,
368 	}
369 }, {
370 	.cra_name		= "cbc(aes)",
371 	.cra_driver_name	= "cbc-aes-" MODE,
372 	.cra_priority		= PRIO,
373 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
374 	.cra_blocksize		= AES_BLOCK_SIZE,
375 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
376 	.cra_alignmask		= 7,
377 	.cra_type		= &crypto_ablkcipher_type,
378 	.cra_module		= THIS_MODULE,
379 	.cra_init		= ablk_init,
380 	.cra_exit		= ablk_exit,
381 	.cra_ablkcipher = {
382 		.min_keysize	= AES_MIN_KEY_SIZE,
383 		.max_keysize	= AES_MAX_KEY_SIZE,
384 		.ivsize		= AES_BLOCK_SIZE,
385 		.setkey		= ablk_set_key,
386 		.encrypt	= ablk_encrypt,
387 		.decrypt	= ablk_decrypt,
388 	}
389 }, {
390 	.cra_name		= "ctr(aes)",
391 	.cra_driver_name	= "ctr-aes-" MODE,
392 	.cra_priority		= PRIO,
393 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
394 	.cra_blocksize		= 1,
395 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
396 	.cra_alignmask		= 7,
397 	.cra_type		= &crypto_ablkcipher_type,
398 	.cra_module		= THIS_MODULE,
399 	.cra_init		= ablk_init,
400 	.cra_exit		= ablk_exit,
401 	.cra_ablkcipher = {
402 		.min_keysize	= AES_MIN_KEY_SIZE,
403 		.max_keysize	= AES_MAX_KEY_SIZE,
404 		.ivsize		= AES_BLOCK_SIZE,
405 		.setkey		= ablk_set_key,
406 		.encrypt	= ablk_encrypt,
407 		.decrypt	= ablk_decrypt,
408 	}
409 }, {
410 	.cra_name		= "xts(aes)",
411 	.cra_driver_name	= "xts-aes-" MODE,
412 	.cra_priority		= PRIO,
413 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
414 	.cra_blocksize		= AES_BLOCK_SIZE,
415 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
416 	.cra_alignmask		= 7,
417 	.cra_type		= &crypto_ablkcipher_type,
418 	.cra_module		= THIS_MODULE,
419 	.cra_init		= ablk_init,
420 	.cra_exit		= ablk_exit,
421 	.cra_ablkcipher = {
422 		.min_keysize	= 2 * AES_MIN_KEY_SIZE,
423 		.max_keysize	= 2 * AES_MAX_KEY_SIZE,
424 		.ivsize		= AES_BLOCK_SIZE,
425 		.setkey		= ablk_set_key,
426 		.encrypt	= ablk_encrypt,
427 		.decrypt	= ablk_decrypt,
428 	}
429 } };
430 
431 static int __init aes_init(void)
432 {
433 	return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
434 }
435 
436 static void __exit aes_exit(void)
437 {
438 	crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
439 }
440 
441 #ifdef USE_V8_CRYPTO_EXTENSIONS
442 module_cpu_feature_match(AES, aes_init);
443 #else
444 module_init(aes_init);
445 #endif
446 module_exit(aes_exit);
447