xref: /openbmc/linux/arch/s390/crypto/aes_s390.c (revision 6dfcd296)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2005, 2007
8  *   Author(s): Jan Glauber (jang@de.ibm.com)
9  *		Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
10  *
11  * Derived from "crypto/aes_generic.c"
12  *
13  * This program is free software; you can redistribute it and/or modify it
14  * under the terms of the GNU General Public License as published by the Free
15  * Software Foundation; either version 2 of the License, or (at your option)
16  * any later version.
17  *
18  */
19 
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
22 
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/module.h>
28 #include <linux/cpufeature.h>
29 #include <linux/init.h>
30 #include <linux/spinlock.h>
31 #include <crypto/xts.h>
32 #include <asm/cpacf.h>
33 
34 static u8 *ctrblk;
35 static DEFINE_SPINLOCK(ctrblk_lock);
36 
37 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
38 
39 struct s390_aes_ctx {
40 	u8 key[AES_MAX_KEY_SIZE];
41 	int key_len;
42 	unsigned long fc;
43 	union {
44 		struct crypto_skcipher *blk;
45 		struct crypto_cipher *cip;
46 	} fallback;
47 };
48 
49 struct s390_xts_ctx {
50 	u8 key[32];
51 	u8 pcc_key[32];
52 	int key_len;
53 	unsigned long fc;
54 	struct crypto_skcipher *fallback;
55 };
56 
57 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
58 		unsigned int key_len)
59 {
60 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
61 	int ret;
62 
63 	sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
64 	sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
65 			CRYPTO_TFM_REQ_MASK);
66 
67 	ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
68 	if (ret) {
69 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
70 		tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
71 				CRYPTO_TFM_RES_MASK);
72 	}
73 	return ret;
74 }
75 
76 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
77 		       unsigned int key_len)
78 {
79 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
80 	unsigned long fc;
81 
82 	/* Pick the correct function code based on the key length */
83 	fc = (key_len == 16) ? CPACF_KM_AES_128 :
84 	     (key_len == 24) ? CPACF_KM_AES_192 :
85 	     (key_len == 32) ? CPACF_KM_AES_256 : 0;
86 
87 	/* Check if the function code is available */
88 	sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
89 	if (!sctx->fc)
90 		return setkey_fallback_cip(tfm, in_key, key_len);
91 
92 	sctx->key_len = key_len;
93 	memcpy(sctx->key, in_key, key_len);
94 	return 0;
95 }
96 
97 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
98 {
99 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
100 
101 	if (unlikely(!sctx->fc)) {
102 		crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
103 		return;
104 	}
105 	cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE);
106 }
107 
108 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
109 {
110 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
111 
112 	if (unlikely(!sctx->fc)) {
113 		crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
114 		return;
115 	}
116 	cpacf_km(sctx->fc | CPACF_DECRYPT,
117 		 &sctx->key, out, in, AES_BLOCK_SIZE);
118 }
119 
120 static int fallback_init_cip(struct crypto_tfm *tfm)
121 {
122 	const char *name = tfm->__crt_alg->cra_name;
123 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
124 
125 	sctx->fallback.cip = crypto_alloc_cipher(name, 0,
126 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
127 
128 	if (IS_ERR(sctx->fallback.cip)) {
129 		pr_err("Allocating AES fallback algorithm %s failed\n",
130 		       name);
131 		return PTR_ERR(sctx->fallback.cip);
132 	}
133 
134 	return 0;
135 }
136 
137 static void fallback_exit_cip(struct crypto_tfm *tfm)
138 {
139 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
140 
141 	crypto_free_cipher(sctx->fallback.cip);
142 	sctx->fallback.cip = NULL;
143 }
144 
145 static struct crypto_alg aes_alg = {
146 	.cra_name		=	"aes",
147 	.cra_driver_name	=	"aes-s390",
148 	.cra_priority		=	300,
149 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER |
150 					CRYPTO_ALG_NEED_FALLBACK,
151 	.cra_blocksize		=	AES_BLOCK_SIZE,
152 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
153 	.cra_module		=	THIS_MODULE,
154 	.cra_init               =       fallback_init_cip,
155 	.cra_exit               =       fallback_exit_cip,
156 	.cra_u			=	{
157 		.cipher = {
158 			.cia_min_keysize	=	AES_MIN_KEY_SIZE,
159 			.cia_max_keysize	=	AES_MAX_KEY_SIZE,
160 			.cia_setkey		=	aes_set_key,
161 			.cia_encrypt		=	aes_encrypt,
162 			.cia_decrypt		=	aes_decrypt,
163 		}
164 	}
165 };
166 
167 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
168 		unsigned int len)
169 {
170 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
171 	unsigned int ret;
172 
173 	crypto_skcipher_clear_flags(sctx->fallback.blk, CRYPTO_TFM_REQ_MASK);
174 	crypto_skcipher_set_flags(sctx->fallback.blk, tfm->crt_flags &
175 						      CRYPTO_TFM_REQ_MASK);
176 
177 	ret = crypto_skcipher_setkey(sctx->fallback.blk, key, len);
178 
179 	tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
180 	tfm->crt_flags |= crypto_skcipher_get_flags(sctx->fallback.blk) &
181 			  CRYPTO_TFM_RES_MASK;
182 
183 	return ret;
184 }
185 
186 static int fallback_blk_dec(struct blkcipher_desc *desc,
187 		struct scatterlist *dst, struct scatterlist *src,
188 		unsigned int nbytes)
189 {
190 	unsigned int ret;
191 	struct crypto_blkcipher *tfm = desc->tfm;
192 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
193 	SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
194 
195 	skcipher_request_set_tfm(req, sctx->fallback.blk);
196 	skcipher_request_set_callback(req, desc->flags, NULL, NULL);
197 	skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
198 
199 	ret = crypto_skcipher_decrypt(req);
200 
201 	skcipher_request_zero(req);
202 	return ret;
203 }
204 
205 static int fallback_blk_enc(struct blkcipher_desc *desc,
206 		struct scatterlist *dst, struct scatterlist *src,
207 		unsigned int nbytes)
208 {
209 	unsigned int ret;
210 	struct crypto_blkcipher *tfm = desc->tfm;
211 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(tfm);
212 	SKCIPHER_REQUEST_ON_STACK(req, sctx->fallback.blk);
213 
214 	skcipher_request_set_tfm(req, sctx->fallback.blk);
215 	skcipher_request_set_callback(req, desc->flags, NULL, NULL);
216 	skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
217 
218 	ret = crypto_skcipher_encrypt(req);
219 	return ret;
220 }
221 
222 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
223 			   unsigned int key_len)
224 {
225 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
226 	unsigned long fc;
227 
228 	/* Pick the correct function code based on the key length */
229 	fc = (key_len == 16) ? CPACF_KM_AES_128 :
230 	     (key_len == 24) ? CPACF_KM_AES_192 :
231 	     (key_len == 32) ? CPACF_KM_AES_256 : 0;
232 
233 	/* Check if the function code is available */
234 	sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
235 	if (!sctx->fc)
236 		return setkey_fallback_blk(tfm, in_key, key_len);
237 
238 	sctx->key_len = key_len;
239 	memcpy(sctx->key, in_key, key_len);
240 	return 0;
241 }
242 
243 static int ecb_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
244 			 struct blkcipher_walk *walk)
245 {
246 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
247 	unsigned int nbytes, n;
248 	int ret;
249 
250 	ret = blkcipher_walk_virt(desc, walk);
251 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
252 		/* only use complete blocks */
253 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
254 		cpacf_km(sctx->fc | modifier, sctx->key,
255 			 walk->dst.virt.addr, walk->src.virt.addr, n);
256 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
257 	}
258 
259 	return ret;
260 }
261 
262 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
263 			   struct scatterlist *dst, struct scatterlist *src,
264 			   unsigned int nbytes)
265 {
266 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
267 	struct blkcipher_walk walk;
268 
269 	if (unlikely(!sctx->fc))
270 		return fallback_blk_enc(desc, dst, src, nbytes);
271 
272 	blkcipher_walk_init(&walk, dst, src, nbytes);
273 	return ecb_aes_crypt(desc, 0, &walk);
274 }
275 
276 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
277 			   struct scatterlist *dst, struct scatterlist *src,
278 			   unsigned int nbytes)
279 {
280 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
281 	struct blkcipher_walk walk;
282 
283 	if (unlikely(!sctx->fc))
284 		return fallback_blk_dec(desc, dst, src, nbytes);
285 
286 	blkcipher_walk_init(&walk, dst, src, nbytes);
287 	return ecb_aes_crypt(desc, CPACF_DECRYPT, &walk);
288 }
289 
290 static int fallback_init_blk(struct crypto_tfm *tfm)
291 {
292 	const char *name = tfm->__crt_alg->cra_name;
293 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
294 
295 	sctx->fallback.blk = crypto_alloc_skcipher(name, 0,
296 						   CRYPTO_ALG_ASYNC |
297 						   CRYPTO_ALG_NEED_FALLBACK);
298 
299 	if (IS_ERR(sctx->fallback.blk)) {
300 		pr_err("Allocating AES fallback algorithm %s failed\n",
301 		       name);
302 		return PTR_ERR(sctx->fallback.blk);
303 	}
304 
305 	return 0;
306 }
307 
308 static void fallback_exit_blk(struct crypto_tfm *tfm)
309 {
310 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
311 
312 	crypto_free_skcipher(sctx->fallback.blk);
313 }
314 
315 static struct crypto_alg ecb_aes_alg = {
316 	.cra_name		=	"ecb(aes)",
317 	.cra_driver_name	=	"ecb-aes-s390",
318 	.cra_priority		=	400,	/* combo: aes + ecb */
319 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
320 					CRYPTO_ALG_NEED_FALLBACK,
321 	.cra_blocksize		=	AES_BLOCK_SIZE,
322 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
323 	.cra_type		=	&crypto_blkcipher_type,
324 	.cra_module		=	THIS_MODULE,
325 	.cra_init		=	fallback_init_blk,
326 	.cra_exit		=	fallback_exit_blk,
327 	.cra_u			=	{
328 		.blkcipher = {
329 			.min_keysize		=	AES_MIN_KEY_SIZE,
330 			.max_keysize		=	AES_MAX_KEY_SIZE,
331 			.setkey			=	ecb_aes_set_key,
332 			.encrypt		=	ecb_aes_encrypt,
333 			.decrypt		=	ecb_aes_decrypt,
334 		}
335 	}
336 };
337 
338 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
339 			   unsigned int key_len)
340 {
341 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
342 	unsigned long fc;
343 
344 	/* Pick the correct function code based on the key length */
345 	fc = (key_len == 16) ? CPACF_KMC_AES_128 :
346 	     (key_len == 24) ? CPACF_KMC_AES_192 :
347 	     (key_len == 32) ? CPACF_KMC_AES_256 : 0;
348 
349 	/* Check if the function code is available */
350 	sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
351 	if (!sctx->fc)
352 		return setkey_fallback_blk(tfm, in_key, key_len);
353 
354 	sctx->key_len = key_len;
355 	memcpy(sctx->key, in_key, key_len);
356 	return 0;
357 }
358 
359 static int cbc_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
360 			 struct blkcipher_walk *walk)
361 {
362 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
363 	unsigned int nbytes, n;
364 	int ret;
365 	struct {
366 		u8 iv[AES_BLOCK_SIZE];
367 		u8 key[AES_MAX_KEY_SIZE];
368 	} param;
369 
370 	ret = blkcipher_walk_virt(desc, walk);
371 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
372 	memcpy(param.key, sctx->key, sctx->key_len);
373 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
374 		/* only use complete blocks */
375 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
376 		cpacf_kmc(sctx->fc | modifier, &param,
377 			  walk->dst.virt.addr, walk->src.virt.addr, n);
378 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
379 	}
380 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
381 	return ret;
382 }
383 
384 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
385 			   struct scatterlist *dst, struct scatterlist *src,
386 			   unsigned int nbytes)
387 {
388 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
389 	struct blkcipher_walk walk;
390 
391 	if (unlikely(!sctx->fc))
392 		return fallback_blk_enc(desc, dst, src, nbytes);
393 
394 	blkcipher_walk_init(&walk, dst, src, nbytes);
395 	return cbc_aes_crypt(desc, 0, &walk);
396 }
397 
398 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
399 			   struct scatterlist *dst, struct scatterlist *src,
400 			   unsigned int nbytes)
401 {
402 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
403 	struct blkcipher_walk walk;
404 
405 	if (unlikely(!sctx->fc))
406 		return fallback_blk_dec(desc, dst, src, nbytes);
407 
408 	blkcipher_walk_init(&walk, dst, src, nbytes);
409 	return cbc_aes_crypt(desc, CPACF_DECRYPT, &walk);
410 }
411 
412 static struct crypto_alg cbc_aes_alg = {
413 	.cra_name		=	"cbc(aes)",
414 	.cra_driver_name	=	"cbc-aes-s390",
415 	.cra_priority		=	400,	/* combo: aes + cbc */
416 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
417 					CRYPTO_ALG_NEED_FALLBACK,
418 	.cra_blocksize		=	AES_BLOCK_SIZE,
419 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
420 	.cra_type		=	&crypto_blkcipher_type,
421 	.cra_module		=	THIS_MODULE,
422 	.cra_init		=	fallback_init_blk,
423 	.cra_exit		=	fallback_exit_blk,
424 	.cra_u			=	{
425 		.blkcipher = {
426 			.min_keysize		=	AES_MIN_KEY_SIZE,
427 			.max_keysize		=	AES_MAX_KEY_SIZE,
428 			.ivsize			=	AES_BLOCK_SIZE,
429 			.setkey			=	cbc_aes_set_key,
430 			.encrypt		=	cbc_aes_encrypt,
431 			.decrypt		=	cbc_aes_decrypt,
432 		}
433 	}
434 };
435 
436 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
437 				   unsigned int len)
438 {
439 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
440 	unsigned int ret;
441 
442 	crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
443 	crypto_skcipher_set_flags(xts_ctx->fallback, tfm->crt_flags &
444 						     CRYPTO_TFM_REQ_MASK);
445 
446 	ret = crypto_skcipher_setkey(xts_ctx->fallback, key, len);
447 
448 	tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
449 	tfm->crt_flags |= crypto_skcipher_get_flags(xts_ctx->fallback) &
450 			  CRYPTO_TFM_RES_MASK;
451 
452 	return ret;
453 }
454 
455 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
456 		struct scatterlist *dst, struct scatterlist *src,
457 		unsigned int nbytes)
458 {
459 	struct crypto_blkcipher *tfm = desc->tfm;
460 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
461 	SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
462 	unsigned int ret;
463 
464 	skcipher_request_set_tfm(req, xts_ctx->fallback);
465 	skcipher_request_set_callback(req, desc->flags, NULL, NULL);
466 	skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
467 
468 	ret = crypto_skcipher_decrypt(req);
469 
470 	skcipher_request_zero(req);
471 	return ret;
472 }
473 
474 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
475 		struct scatterlist *dst, struct scatterlist *src,
476 		unsigned int nbytes)
477 {
478 	struct crypto_blkcipher *tfm = desc->tfm;
479 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(tfm);
480 	SKCIPHER_REQUEST_ON_STACK(req, xts_ctx->fallback);
481 	unsigned int ret;
482 
483 	skcipher_request_set_tfm(req, xts_ctx->fallback);
484 	skcipher_request_set_callback(req, desc->flags, NULL, NULL);
485 	skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
486 
487 	ret = crypto_skcipher_encrypt(req);
488 
489 	skcipher_request_zero(req);
490 	return ret;
491 }
492 
493 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
494 			   unsigned int key_len)
495 {
496 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
497 	unsigned long fc;
498 	int err;
499 
500 	err = xts_check_key(tfm, in_key, key_len);
501 	if (err)
502 		return err;
503 
504 	/* Pick the correct function code based on the key length */
505 	fc = (key_len == 32) ? CPACF_KM_XTS_128 :
506 	     (key_len == 64) ? CPACF_KM_XTS_256 : 0;
507 
508 	/* Check if the function code is available */
509 	xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
510 	if (!xts_ctx->fc)
511 		return xts_fallback_setkey(tfm, in_key, key_len);
512 
513 	/* Split the XTS key into the two subkeys */
514 	key_len = key_len / 2;
515 	xts_ctx->key_len = key_len;
516 	memcpy(xts_ctx->key, in_key, key_len);
517 	memcpy(xts_ctx->pcc_key, in_key + key_len, key_len);
518 	return 0;
519 }
520 
521 static int xts_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
522 			 struct blkcipher_walk *walk)
523 {
524 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
525 	unsigned int offset, nbytes, n;
526 	int ret;
527 	struct {
528 		u8 key[32];
529 		u8 tweak[16];
530 		u8 block[16];
531 		u8 bit[16];
532 		u8 xts[16];
533 	} pcc_param;
534 	struct {
535 		u8 key[32];
536 		u8 init[16];
537 	} xts_param;
538 
539 	ret = blkcipher_walk_virt(desc, walk);
540 	offset = xts_ctx->key_len & 0x10;
541 	memset(pcc_param.block, 0, sizeof(pcc_param.block));
542 	memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
543 	memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
544 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
545 	memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len);
546 	cpacf_pcc(xts_ctx->fc, pcc_param.key + offset);
547 
548 	memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len);
549 	memcpy(xts_param.init, pcc_param.xts, 16);
550 
551 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
552 		/* only use complete blocks */
553 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
554 		cpacf_km(xts_ctx->fc | modifier, xts_param.key + offset,
555 			 walk->dst.virt.addr, walk->src.virt.addr, n);
556 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
557 	}
558 	return ret;
559 }
560 
561 static int xts_aes_encrypt(struct blkcipher_desc *desc,
562 			   struct scatterlist *dst, struct scatterlist *src,
563 			   unsigned int nbytes)
564 {
565 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
566 	struct blkcipher_walk walk;
567 
568 	if (unlikely(!xts_ctx->fc))
569 		return xts_fallback_encrypt(desc, dst, src, nbytes);
570 
571 	blkcipher_walk_init(&walk, dst, src, nbytes);
572 	return xts_aes_crypt(desc, 0, &walk);
573 }
574 
575 static int xts_aes_decrypt(struct blkcipher_desc *desc,
576 			   struct scatterlist *dst, struct scatterlist *src,
577 			   unsigned int nbytes)
578 {
579 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
580 	struct blkcipher_walk walk;
581 
582 	if (unlikely(!xts_ctx->fc))
583 		return xts_fallback_decrypt(desc, dst, src, nbytes);
584 
585 	blkcipher_walk_init(&walk, dst, src, nbytes);
586 	return xts_aes_crypt(desc, CPACF_DECRYPT, &walk);
587 }
588 
589 static int xts_fallback_init(struct crypto_tfm *tfm)
590 {
591 	const char *name = tfm->__crt_alg->cra_name;
592 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
593 
594 	xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
595 						  CRYPTO_ALG_ASYNC |
596 						  CRYPTO_ALG_NEED_FALLBACK);
597 
598 	if (IS_ERR(xts_ctx->fallback)) {
599 		pr_err("Allocating XTS fallback algorithm %s failed\n",
600 		       name);
601 		return PTR_ERR(xts_ctx->fallback);
602 	}
603 	return 0;
604 }
605 
606 static void xts_fallback_exit(struct crypto_tfm *tfm)
607 {
608 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
609 
610 	crypto_free_skcipher(xts_ctx->fallback);
611 }
612 
613 static struct crypto_alg xts_aes_alg = {
614 	.cra_name		=	"xts(aes)",
615 	.cra_driver_name	=	"xts-aes-s390",
616 	.cra_priority		=	400,	/* combo: aes + xts */
617 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
618 					CRYPTO_ALG_NEED_FALLBACK,
619 	.cra_blocksize		=	AES_BLOCK_SIZE,
620 	.cra_ctxsize		=	sizeof(struct s390_xts_ctx),
621 	.cra_type		=	&crypto_blkcipher_type,
622 	.cra_module		=	THIS_MODULE,
623 	.cra_init		=	xts_fallback_init,
624 	.cra_exit		=	xts_fallback_exit,
625 	.cra_u			=	{
626 		.blkcipher = {
627 			.min_keysize		=	2 * AES_MIN_KEY_SIZE,
628 			.max_keysize		=	2 * AES_MAX_KEY_SIZE,
629 			.ivsize			=	AES_BLOCK_SIZE,
630 			.setkey			=	xts_aes_set_key,
631 			.encrypt		=	xts_aes_encrypt,
632 			.decrypt		=	xts_aes_decrypt,
633 		}
634 	}
635 };
636 
637 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
638 			   unsigned int key_len)
639 {
640 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
641 	unsigned long fc;
642 
643 	/* Pick the correct function code based on the key length */
644 	fc = (key_len == 16) ? CPACF_KMCTR_AES_128 :
645 	     (key_len == 24) ? CPACF_KMCTR_AES_192 :
646 	     (key_len == 32) ? CPACF_KMCTR_AES_256 : 0;
647 
648 	/* Check if the function code is available */
649 	sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
650 	if (!sctx->fc)
651 		return setkey_fallback_blk(tfm, in_key, key_len);
652 
653 	sctx->key_len = key_len;
654 	memcpy(sctx->key, in_key, key_len);
655 	return 0;
656 }
657 
658 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
659 {
660 	unsigned int i, n;
661 
662 	/* only use complete blocks, max. PAGE_SIZE */
663 	memcpy(ctrptr, iv, AES_BLOCK_SIZE);
664 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
665 	for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
666 		memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
667 		crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
668 		ctrptr += AES_BLOCK_SIZE;
669 	}
670 	return n;
671 }
672 
673 static int ctr_aes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
674 			 struct blkcipher_walk *walk)
675 {
676 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
677 	u8 buf[AES_BLOCK_SIZE], *ctrptr;
678 	unsigned int n, nbytes;
679 	int ret, locked;
680 
681 	locked = spin_trylock(&ctrblk_lock);
682 
683 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
684 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
685 		n = AES_BLOCK_SIZE;
686 		if (nbytes >= 2*AES_BLOCK_SIZE && locked)
687 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
688 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
689 		cpacf_kmctr(sctx->fc | modifier, sctx->key,
690 			    walk->dst.virt.addr, walk->src.virt.addr,
691 			    n, ctrptr);
692 		if (ctrptr == ctrblk)
693 			memcpy(walk->iv, ctrptr + n - AES_BLOCK_SIZE,
694 			       AES_BLOCK_SIZE);
695 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
696 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
697 	}
698 	if (locked)
699 		spin_unlock(&ctrblk_lock);
700 	/*
701 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
702 	 */
703 	if (nbytes) {
704 		cpacf_kmctr(sctx->fc | modifier, sctx->key,
705 			    buf, walk->src.virt.addr,
706 			    AES_BLOCK_SIZE, walk->iv);
707 		memcpy(walk->dst.virt.addr, buf, nbytes);
708 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
709 		ret = blkcipher_walk_done(desc, walk, 0);
710 	}
711 
712 	return ret;
713 }
714 
715 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
716 			   struct scatterlist *dst, struct scatterlist *src,
717 			   unsigned int nbytes)
718 {
719 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
720 	struct blkcipher_walk walk;
721 
722 	if (unlikely(!sctx->fc))
723 		return fallback_blk_enc(desc, dst, src, nbytes);
724 
725 	blkcipher_walk_init(&walk, dst, src, nbytes);
726 	return ctr_aes_crypt(desc, 0, &walk);
727 }
728 
729 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
730 			   struct scatterlist *dst, struct scatterlist *src,
731 			   unsigned int nbytes)
732 {
733 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
734 	struct blkcipher_walk walk;
735 
736 	if (unlikely(!sctx->fc))
737 		return fallback_blk_dec(desc, dst, src, nbytes);
738 
739 	blkcipher_walk_init(&walk, dst, src, nbytes);
740 	return ctr_aes_crypt(desc, CPACF_DECRYPT, &walk);
741 }
742 
743 static struct crypto_alg ctr_aes_alg = {
744 	.cra_name		=	"ctr(aes)",
745 	.cra_driver_name	=	"ctr-aes-s390",
746 	.cra_priority		=	400,	/* combo: aes + ctr */
747 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
748 					CRYPTO_ALG_NEED_FALLBACK,
749 	.cra_blocksize		=	1,
750 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
751 	.cra_type		=	&crypto_blkcipher_type,
752 	.cra_module		=	THIS_MODULE,
753 	.cra_init		=	fallback_init_blk,
754 	.cra_exit		=	fallback_exit_blk,
755 	.cra_u			=	{
756 		.blkcipher = {
757 			.min_keysize		=	AES_MIN_KEY_SIZE,
758 			.max_keysize		=	AES_MAX_KEY_SIZE,
759 			.ivsize			=	AES_BLOCK_SIZE,
760 			.setkey			=	ctr_aes_set_key,
761 			.encrypt		=	ctr_aes_encrypt,
762 			.decrypt		=	ctr_aes_decrypt,
763 		}
764 	}
765 };
766 
767 static struct crypto_alg *aes_s390_algs_ptr[5];
768 static int aes_s390_algs_num;
769 
770 static int aes_s390_register_alg(struct crypto_alg *alg)
771 {
772 	int ret;
773 
774 	ret = crypto_register_alg(alg);
775 	if (!ret)
776 		aes_s390_algs_ptr[aes_s390_algs_num++] = alg;
777 	return ret;
778 }
779 
780 static void aes_s390_fini(void)
781 {
782 	while (aes_s390_algs_num--)
783 		crypto_unregister_alg(aes_s390_algs_ptr[aes_s390_algs_num]);
784 	if (ctrblk)
785 		free_page((unsigned long) ctrblk);
786 }
787 
788 static int __init aes_s390_init(void)
789 {
790 	int ret;
791 
792 	/* Query available functions for KM, KMC and KMCTR */
793 	cpacf_query(CPACF_KM, &km_functions);
794 	cpacf_query(CPACF_KMC, &kmc_functions);
795 	cpacf_query(CPACF_KMCTR, &kmctr_functions);
796 
797 	if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) ||
798 	    cpacf_test_func(&km_functions, CPACF_KM_AES_192) ||
799 	    cpacf_test_func(&km_functions, CPACF_KM_AES_256)) {
800 		ret = aes_s390_register_alg(&aes_alg);
801 		if (ret)
802 			goto out_err;
803 		ret = aes_s390_register_alg(&ecb_aes_alg);
804 		if (ret)
805 			goto out_err;
806 	}
807 
808 	if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) ||
809 	    cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) ||
810 	    cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) {
811 		ret = aes_s390_register_alg(&cbc_aes_alg);
812 		if (ret)
813 			goto out_err;
814 	}
815 
816 	if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) ||
817 	    cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) {
818 		ret = aes_s390_register_alg(&xts_aes_alg);
819 		if (ret)
820 			goto out_err;
821 	}
822 
823 	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_128) ||
824 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_192) ||
825 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_256)) {
826 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
827 		if (!ctrblk) {
828 			ret = -ENOMEM;
829 			goto out_err;
830 		}
831 		ret = aes_s390_register_alg(&ctr_aes_alg);
832 		if (ret)
833 			goto out_err;
834 	}
835 
836 	return 0;
837 out_err:
838 	aes_s390_fini();
839 	return ret;
840 }
841 
842 module_cpu_feature_match(MSA, aes_s390_init);
843 module_exit(aes_s390_fini);
844 
845 MODULE_ALIAS_CRYPTO("aes-all");
846 
847 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
848 MODULE_LICENSE("GPL");
849