xref: /openbmc/linux/arch/sparc/crypto/aes_glue.c (revision 50df3be7)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3  *
4  * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5  *
6  * Copyright (C) 2008, Intel Corp.
7  *    Author: Huang Ying <ying.huang@intel.com>
8  *
9  * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10  * interface for 64-bit kernels.
11  *    Authors: Adrian Hoban <adrian.hoban@intel.com>
12  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
13  *             Tadeusz Struk (tadeusz.struk@intel.com)
14  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
15  *    Copyright (c) 2010, Intel Corporation.
16  */
17 
18 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
19 
20 #include <linux/crypto.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/types.h>
25 #include <crypto/algapi.h>
26 #include <crypto/aes.h>
27 
28 #include <asm/fpumacro.h>
29 #include <asm/pstate.h>
30 #include <asm/elf.h>
31 
32 #include "opcodes.h"
33 
34 struct aes_ops {
35 	void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
36 	void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
37 	void (*load_encrypt_keys)(const u64 *key);
38 	void (*load_decrypt_keys)(const u64 *key);
39 	void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
40 			    unsigned int len);
41 	void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
42 			    unsigned int len);
43 	void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
44 			    unsigned int len, u64 *iv);
45 	void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
46 			    unsigned int len, u64 *iv);
47 	void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
48 			  unsigned int len, u64 *iv);
49 };
50 
51 struct crypto_sparc64_aes_ctx {
52 	struct aes_ops *ops;
53 	u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
54 	u32 key_length;
55 	u32 expanded_key_length;
56 };
57 
58 extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
59 				    u32 *output);
60 extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
61 				    u32 *output);
62 extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
63 				    u32 *output);
64 
65 extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
66 				    u32 *output);
67 extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
68 				    u32 *output);
69 extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
70 				    u32 *output);
71 
72 extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
73 extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
74 extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
75 
76 extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
77 extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
78 extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
79 
80 extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
81 					u64 *output, unsigned int len);
82 extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
83 					u64 *output, unsigned int len);
84 extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
85 					u64 *output, unsigned int len);
86 
87 extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
88 					u64 *output, unsigned int len);
89 extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
90 					u64 *output, unsigned int len);
91 extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
92 					u64 *output, unsigned int len);
93 
94 extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
95 					u64 *output, unsigned int len,
96 					u64 *iv);
97 
98 extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
99 					u64 *output, unsigned int len,
100 					u64 *iv);
101 
102 extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
103 					u64 *output, unsigned int len,
104 					u64 *iv);
105 
106 extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
107 					u64 *output, unsigned int len,
108 					u64 *iv);
109 
110 extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
111 					u64 *output, unsigned int len,
112 					u64 *iv);
113 
114 extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
115 					u64 *output, unsigned int len,
116 					u64 *iv);
117 
118 extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
119 				      u64 *output, unsigned int len,
120 				      u64 *iv);
121 extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
122 				      u64 *output, unsigned int len,
123 				      u64 *iv);
124 extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
125 				      u64 *output, unsigned int len,
126 				      u64 *iv);
127 
128 static struct aes_ops aes128_ops = {
129 	.encrypt		= aes_sparc64_encrypt_128,
130 	.decrypt		= aes_sparc64_decrypt_128,
131 	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_128,
132 	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_128,
133 	.ecb_encrypt		= aes_sparc64_ecb_encrypt_128,
134 	.ecb_decrypt		= aes_sparc64_ecb_decrypt_128,
135 	.cbc_encrypt		= aes_sparc64_cbc_encrypt_128,
136 	.cbc_decrypt		= aes_sparc64_cbc_decrypt_128,
137 	.ctr_crypt		= aes_sparc64_ctr_crypt_128,
138 };
139 
140 static struct aes_ops aes192_ops = {
141 	.encrypt		= aes_sparc64_encrypt_192,
142 	.decrypt		= aes_sparc64_decrypt_192,
143 	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_192,
144 	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_192,
145 	.ecb_encrypt		= aes_sparc64_ecb_encrypt_192,
146 	.ecb_decrypt		= aes_sparc64_ecb_decrypt_192,
147 	.cbc_encrypt		= aes_sparc64_cbc_encrypt_192,
148 	.cbc_decrypt		= aes_sparc64_cbc_decrypt_192,
149 	.ctr_crypt		= aes_sparc64_ctr_crypt_192,
150 };
151 
152 static struct aes_ops aes256_ops = {
153 	.encrypt		= aes_sparc64_encrypt_256,
154 	.decrypt		= aes_sparc64_decrypt_256,
155 	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_256,
156 	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_256,
157 	.ecb_encrypt		= aes_sparc64_ecb_encrypt_256,
158 	.ecb_decrypt		= aes_sparc64_ecb_decrypt_256,
159 	.cbc_encrypt		= aes_sparc64_cbc_encrypt_256,
160 	.cbc_decrypt		= aes_sparc64_cbc_decrypt_256,
161 	.ctr_crypt		= aes_sparc64_ctr_crypt_256,
162 };
163 
164 extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
165 				   unsigned int key_len);
166 
167 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
168 		       unsigned int key_len)
169 {
170 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
171 	u32 *flags = &tfm->crt_flags;
172 
173 	switch (key_len) {
174 	case AES_KEYSIZE_128:
175 		ctx->expanded_key_length = 0xb0;
176 		ctx->ops = &aes128_ops;
177 		break;
178 
179 	case AES_KEYSIZE_192:
180 		ctx->expanded_key_length = 0xd0;
181 		ctx->ops = &aes192_ops;
182 		break;
183 
184 	case AES_KEYSIZE_256:
185 		ctx->expanded_key_length = 0xf0;
186 		ctx->ops = &aes256_ops;
187 		break;
188 
189 	default:
190 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
191 		return -EINVAL;
192 	}
193 
194 	aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
195 	ctx->key_length = key_len;
196 
197 	return 0;
198 }
199 
200 static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
201 {
202 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
203 
204 	ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
205 }
206 
207 static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
208 {
209 	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
210 
211 	ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
212 }
213 
214 #define AES_BLOCK_MASK	(~(AES_BLOCK_SIZE-1))
215 
216 static int ecb_encrypt(struct blkcipher_desc *desc,
217 		       struct scatterlist *dst, struct scatterlist *src,
218 		       unsigned int nbytes)
219 {
220 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
221 	struct blkcipher_walk walk;
222 	int err;
223 
224 	blkcipher_walk_init(&walk, dst, src, nbytes);
225 	err = blkcipher_walk_virt(desc, &walk);
226 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
227 
228 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
229 	while ((nbytes = walk.nbytes)) {
230 		unsigned int block_len = nbytes & AES_BLOCK_MASK;
231 
232 		if (likely(block_len)) {
233 			ctx->ops->ecb_encrypt(&ctx->key[0],
234 					      (const u64 *)walk.src.virt.addr,
235 					      (u64 *) walk.dst.virt.addr,
236 					      block_len);
237 		}
238 		nbytes &= AES_BLOCK_SIZE - 1;
239 		err = blkcipher_walk_done(desc, &walk, nbytes);
240 	}
241 	fprs_write(0);
242 	return err;
243 }
244 
245 static int ecb_decrypt(struct blkcipher_desc *desc,
246 		       struct scatterlist *dst, struct scatterlist *src,
247 		       unsigned int nbytes)
248 {
249 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
250 	struct blkcipher_walk walk;
251 	u64 *key_end;
252 	int err;
253 
254 	blkcipher_walk_init(&walk, dst, src, nbytes);
255 	err = blkcipher_walk_virt(desc, &walk);
256 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
257 
258 	ctx->ops->load_decrypt_keys(&ctx->key[0]);
259 	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
260 	while ((nbytes = walk.nbytes)) {
261 		unsigned int block_len = nbytes & AES_BLOCK_MASK;
262 
263 		if (likely(block_len)) {
264 			ctx->ops->ecb_decrypt(key_end,
265 					      (const u64 *) walk.src.virt.addr,
266 					      (u64 *) walk.dst.virt.addr, block_len);
267 		}
268 		nbytes &= AES_BLOCK_SIZE - 1;
269 		err = blkcipher_walk_done(desc, &walk, nbytes);
270 	}
271 	fprs_write(0);
272 
273 	return err;
274 }
275 
276 static int cbc_encrypt(struct blkcipher_desc *desc,
277 		       struct scatterlist *dst, struct scatterlist *src,
278 		       unsigned int nbytes)
279 {
280 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
281 	struct blkcipher_walk walk;
282 	int err;
283 
284 	blkcipher_walk_init(&walk, dst, src, nbytes);
285 	err = blkcipher_walk_virt(desc, &walk);
286 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
287 
288 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
289 	while ((nbytes = walk.nbytes)) {
290 		unsigned int block_len = nbytes & AES_BLOCK_MASK;
291 
292 		if (likely(block_len)) {
293 			ctx->ops->cbc_encrypt(&ctx->key[0],
294 					      (const u64 *)walk.src.virt.addr,
295 					      (u64 *) walk.dst.virt.addr,
296 					      block_len, (u64 *) walk.iv);
297 		}
298 		nbytes &= AES_BLOCK_SIZE - 1;
299 		err = blkcipher_walk_done(desc, &walk, nbytes);
300 	}
301 	fprs_write(0);
302 	return err;
303 }
304 
305 static int cbc_decrypt(struct blkcipher_desc *desc,
306 		       struct scatterlist *dst, struct scatterlist *src,
307 		       unsigned int nbytes)
308 {
309 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
310 	struct blkcipher_walk walk;
311 	u64 *key_end;
312 	int err;
313 
314 	blkcipher_walk_init(&walk, dst, src, nbytes);
315 	err = blkcipher_walk_virt(desc, &walk);
316 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
317 
318 	ctx->ops->load_decrypt_keys(&ctx->key[0]);
319 	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
320 	while ((nbytes = walk.nbytes)) {
321 		unsigned int block_len = nbytes & AES_BLOCK_MASK;
322 
323 		if (likely(block_len)) {
324 			ctx->ops->cbc_decrypt(key_end,
325 					      (const u64 *) walk.src.virt.addr,
326 					      (u64 *) walk.dst.virt.addr,
327 					      block_len, (u64 *) walk.iv);
328 		}
329 		nbytes &= AES_BLOCK_SIZE - 1;
330 		err = blkcipher_walk_done(desc, &walk, nbytes);
331 	}
332 	fprs_write(0);
333 
334 	return err;
335 }
336 
337 static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
338 			    struct blkcipher_walk *walk)
339 {
340 	u8 *ctrblk = walk->iv;
341 	u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
342 	u8 *src = walk->src.virt.addr;
343 	u8 *dst = walk->dst.virt.addr;
344 	unsigned int nbytes = walk->nbytes;
345 
346 	ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
347 			      keystream, AES_BLOCK_SIZE);
348 	crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
349 	crypto_inc(ctrblk, AES_BLOCK_SIZE);
350 }
351 
352 static int ctr_crypt(struct blkcipher_desc *desc,
353 		     struct scatterlist *dst, struct scatterlist *src,
354 		     unsigned int nbytes)
355 {
356 	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
357 	struct blkcipher_walk walk;
358 	int err;
359 
360 	blkcipher_walk_init(&walk, dst, src, nbytes);
361 	err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
363 
364 	ctx->ops->load_encrypt_keys(&ctx->key[0]);
365 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
366 		unsigned int block_len = nbytes & AES_BLOCK_MASK;
367 
368 		if (likely(block_len)) {
369 			ctx->ops->ctr_crypt(&ctx->key[0],
370 					    (const u64 *)walk.src.virt.addr,
371 					    (u64 *) walk.dst.virt.addr,
372 					    block_len, (u64 *) walk.iv);
373 		}
374 		nbytes &= AES_BLOCK_SIZE - 1;
375 		err = blkcipher_walk_done(desc, &walk, nbytes);
376 	}
377 	if (walk.nbytes) {
378 		ctr_crypt_final(ctx, &walk);
379 		err = blkcipher_walk_done(desc, &walk, 0);
380 	}
381 	fprs_write(0);
382 	return err;
383 }
384 
385 static struct crypto_alg algs[] = { {
386 	.cra_name		= "aes",
387 	.cra_driver_name	= "aes-sparc64",
388 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
389 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
390 	.cra_blocksize		= AES_BLOCK_SIZE,
391 	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
392 	.cra_alignmask		= 3,
393 	.cra_module		= THIS_MODULE,
394 	.cra_u	= {
395 		.cipher	= {
396 			.cia_min_keysize	= AES_MIN_KEY_SIZE,
397 			.cia_max_keysize	= AES_MAX_KEY_SIZE,
398 			.cia_setkey		= aes_set_key,
399 			.cia_encrypt		= crypto_aes_encrypt,
400 			.cia_decrypt		= crypto_aes_decrypt
401 		}
402 	}
403 }, {
404 	.cra_name		= "ecb(aes)",
405 	.cra_driver_name	= "ecb-aes-sparc64",
406 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
407 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
408 	.cra_blocksize		= AES_BLOCK_SIZE,
409 	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
410 	.cra_alignmask		= 7,
411 	.cra_type		= &crypto_blkcipher_type,
412 	.cra_module		= THIS_MODULE,
413 	.cra_u = {
414 		.blkcipher = {
415 			.min_keysize	= AES_MIN_KEY_SIZE,
416 			.max_keysize	= AES_MAX_KEY_SIZE,
417 			.setkey		= aes_set_key,
418 			.encrypt	= ecb_encrypt,
419 			.decrypt	= ecb_decrypt,
420 		},
421 	},
422 }, {
423 	.cra_name		= "cbc(aes)",
424 	.cra_driver_name	= "cbc-aes-sparc64",
425 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
426 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
427 	.cra_blocksize		= AES_BLOCK_SIZE,
428 	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
429 	.cra_alignmask		= 7,
430 	.cra_type		= &crypto_blkcipher_type,
431 	.cra_module		= THIS_MODULE,
432 	.cra_u = {
433 		.blkcipher = {
434 			.min_keysize	= AES_MIN_KEY_SIZE,
435 			.max_keysize	= AES_MAX_KEY_SIZE,
436 			.ivsize		= AES_BLOCK_SIZE,
437 			.setkey		= aes_set_key,
438 			.encrypt	= cbc_encrypt,
439 			.decrypt	= cbc_decrypt,
440 		},
441 	},
442 }, {
443 	.cra_name		= "ctr(aes)",
444 	.cra_driver_name	= "ctr-aes-sparc64",
445 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
446 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
447 	.cra_blocksize		= 1,
448 	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
449 	.cra_alignmask		= 7,
450 	.cra_type		= &crypto_blkcipher_type,
451 	.cra_module		= THIS_MODULE,
452 	.cra_u = {
453 		.blkcipher = {
454 			.min_keysize	= AES_MIN_KEY_SIZE,
455 			.max_keysize	= AES_MAX_KEY_SIZE,
456 			.ivsize		= AES_BLOCK_SIZE,
457 			.setkey		= aes_set_key,
458 			.encrypt	= ctr_crypt,
459 			.decrypt	= ctr_crypt,
460 		},
461 	},
462 } };
463 
464 static bool __init sparc64_has_aes_opcode(void)
465 {
466 	unsigned long cfr;
467 
468 	if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
469 		return false;
470 
471 	__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
472 	if (!(cfr & CFR_AES))
473 		return false;
474 
475 	return true;
476 }
477 
478 static int __init aes_sparc64_mod_init(void)
479 {
480 	if (sparc64_has_aes_opcode()) {
481 		pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
482 		return crypto_register_algs(algs, ARRAY_SIZE(algs));
483 	}
484 	pr_info("sparc64 aes opcodes not available.\n");
485 	return -ENODEV;
486 }
487 
488 static void __exit aes_sparc64_mod_fini(void)
489 {
490 	crypto_unregister_algs(algs, ARRAY_SIZE(algs));
491 }
492 
493 module_init(aes_sparc64_mod_init);
494 module_exit(aes_sparc64_mod_fini);
495 
496 MODULE_LICENSE("GPL");
497 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
498 
499 MODULE_ALIAS_CRYPTO("aes");
500 
501 #include "crop_devid.c"
502