xref: /openbmc/linux/arch/s390/crypto/aes_s390.c (revision 79f08d9e)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2005, 2007
8  *   Author(s): Jan Glauber (jang@de.ibm.com)
9  *		Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
10  *
11  * Derived from "crypto/aes_generic.c"
12  *
13  * This program is free software; you can redistribute it and/or modify it
14  * under the terms of the GNU General Public License as published by the Free
15  * Software Foundation; either version 2 of the License, or (at your option)
16  * any later version.
17  *
18  */
19 
20 #define KMSG_COMPONENT "aes_s390"
21 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
22 
23 #include <crypto/aes.h>
24 #include <crypto/algapi.h>
25 #include <linux/err.h>
26 #include <linux/module.h>
27 #include <linux/init.h>
28 #include "crypt_s390.h"
29 
30 #define AES_KEYLEN_128		1
31 #define AES_KEYLEN_192		2
32 #define AES_KEYLEN_256		4
33 
34 static u8 *ctrblk;
35 static char keylen_flag;
36 
37 struct s390_aes_ctx {
38 	u8 iv[AES_BLOCK_SIZE];
39 	u8 key[AES_MAX_KEY_SIZE];
40 	long enc;
41 	long dec;
42 	int key_len;
43 	union {
44 		struct crypto_blkcipher *blk;
45 		struct crypto_cipher *cip;
46 	} fallback;
47 };
48 
49 struct pcc_param {
50 	u8 key[32];
51 	u8 tweak[16];
52 	u8 block[16];
53 	u8 bit[16];
54 	u8 xts[16];
55 };
56 
57 struct s390_xts_ctx {
58 	u8 key[32];
59 	u8 xts_param[16];
60 	struct pcc_param pcc;
61 	long enc;
62 	long dec;
63 	int key_len;
64 	struct crypto_blkcipher *fallback;
65 };
66 
67 /*
68  * Check if the key_len is supported by the HW.
69  * Returns 0 if it is, a positive number if it is not and software fallback is
70  * required or a negative number in case the key size is not valid
71  */
72 static int need_fallback(unsigned int key_len)
73 {
74 	switch (key_len) {
75 	case 16:
76 		if (!(keylen_flag & AES_KEYLEN_128))
77 			return 1;
78 		break;
79 	case 24:
80 		if (!(keylen_flag & AES_KEYLEN_192))
81 			return 1;
82 		break;
83 	case 32:
84 		if (!(keylen_flag & AES_KEYLEN_256))
85 			return 1;
86 		break;
87 	default:
88 		return -1;
89 		break;
90 	}
91 	return 0;
92 }
93 
94 static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
95 		unsigned int key_len)
96 {
97 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
98 	int ret;
99 
100 	sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
101 	sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
102 			CRYPTO_TFM_REQ_MASK);
103 
104 	ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
105 	if (ret) {
106 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
107 		tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
108 				CRYPTO_TFM_RES_MASK);
109 	}
110 	return ret;
111 }
112 
113 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
114 		       unsigned int key_len)
115 {
116 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
117 	u32 *flags = &tfm->crt_flags;
118 	int ret;
119 
120 	ret = need_fallback(key_len);
121 	if (ret < 0) {
122 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
123 		return -EINVAL;
124 	}
125 
126 	sctx->key_len = key_len;
127 	if (!ret) {
128 		memcpy(sctx->key, in_key, key_len);
129 		return 0;
130 	}
131 
132 	return setkey_fallback_cip(tfm, in_key, key_len);
133 }
134 
135 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
136 {
137 	const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
138 
139 	if (unlikely(need_fallback(sctx->key_len))) {
140 		crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
141 		return;
142 	}
143 
144 	switch (sctx->key_len) {
145 	case 16:
146 		crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
147 			      AES_BLOCK_SIZE);
148 		break;
149 	case 24:
150 		crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
151 			      AES_BLOCK_SIZE);
152 		break;
153 	case 32:
154 		crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
155 			      AES_BLOCK_SIZE);
156 		break;
157 	}
158 }
159 
160 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
161 {
162 	const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
163 
164 	if (unlikely(need_fallback(sctx->key_len))) {
165 		crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
166 		return;
167 	}
168 
169 	switch (sctx->key_len) {
170 	case 16:
171 		crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
172 			      AES_BLOCK_SIZE);
173 		break;
174 	case 24:
175 		crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
176 			      AES_BLOCK_SIZE);
177 		break;
178 	case 32:
179 		crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
180 			      AES_BLOCK_SIZE);
181 		break;
182 	}
183 }
184 
185 static int fallback_init_cip(struct crypto_tfm *tfm)
186 {
187 	const char *name = tfm->__crt_alg->cra_name;
188 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
189 
190 	sctx->fallback.cip = crypto_alloc_cipher(name, 0,
191 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
192 
193 	if (IS_ERR(sctx->fallback.cip)) {
194 		pr_err("Allocating AES fallback algorithm %s failed\n",
195 		       name);
196 		return PTR_ERR(sctx->fallback.cip);
197 	}
198 
199 	return 0;
200 }
201 
202 static void fallback_exit_cip(struct crypto_tfm *tfm)
203 {
204 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
205 
206 	crypto_free_cipher(sctx->fallback.cip);
207 	sctx->fallback.cip = NULL;
208 }
209 
210 static struct crypto_alg aes_alg = {
211 	.cra_name		=	"aes",
212 	.cra_driver_name	=	"aes-s390",
213 	.cra_priority		=	CRYPT_S390_PRIORITY,
214 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER |
215 					CRYPTO_ALG_NEED_FALLBACK,
216 	.cra_blocksize		=	AES_BLOCK_SIZE,
217 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
218 	.cra_module		=	THIS_MODULE,
219 	.cra_init               =       fallback_init_cip,
220 	.cra_exit               =       fallback_exit_cip,
221 	.cra_u			=	{
222 		.cipher = {
223 			.cia_min_keysize	=	AES_MIN_KEY_SIZE,
224 			.cia_max_keysize	=	AES_MAX_KEY_SIZE,
225 			.cia_setkey		=	aes_set_key,
226 			.cia_encrypt		=	aes_encrypt,
227 			.cia_decrypt		=	aes_decrypt,
228 		}
229 	}
230 };
231 
232 static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
233 		unsigned int len)
234 {
235 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
236 	unsigned int ret;
237 
238 	sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
239 	sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
240 			CRYPTO_TFM_REQ_MASK);
241 
242 	ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
243 	if (ret) {
244 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
245 		tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
246 				CRYPTO_TFM_RES_MASK);
247 	}
248 	return ret;
249 }
250 
251 static int fallback_blk_dec(struct blkcipher_desc *desc,
252 		struct scatterlist *dst, struct scatterlist *src,
253 		unsigned int nbytes)
254 {
255 	unsigned int ret;
256 	struct crypto_blkcipher *tfm;
257 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
258 
259 	tfm = desc->tfm;
260 	desc->tfm = sctx->fallback.blk;
261 
262 	ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
263 
264 	desc->tfm = tfm;
265 	return ret;
266 }
267 
268 static int fallback_blk_enc(struct blkcipher_desc *desc,
269 		struct scatterlist *dst, struct scatterlist *src,
270 		unsigned int nbytes)
271 {
272 	unsigned int ret;
273 	struct crypto_blkcipher *tfm;
274 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
275 
276 	tfm = desc->tfm;
277 	desc->tfm = sctx->fallback.blk;
278 
279 	ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
280 
281 	desc->tfm = tfm;
282 	return ret;
283 }
284 
285 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
286 			   unsigned int key_len)
287 {
288 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
289 	int ret;
290 
291 	ret = need_fallback(key_len);
292 	if (ret > 0) {
293 		sctx->key_len = key_len;
294 		return setkey_fallback_blk(tfm, in_key, key_len);
295 	}
296 
297 	switch (key_len) {
298 	case 16:
299 		sctx->enc = KM_AES_128_ENCRYPT;
300 		sctx->dec = KM_AES_128_DECRYPT;
301 		break;
302 	case 24:
303 		sctx->enc = KM_AES_192_ENCRYPT;
304 		sctx->dec = KM_AES_192_DECRYPT;
305 		break;
306 	case 32:
307 		sctx->enc = KM_AES_256_ENCRYPT;
308 		sctx->dec = KM_AES_256_DECRYPT;
309 		break;
310 	}
311 
312 	return aes_set_key(tfm, in_key, key_len);
313 }
314 
315 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
316 			 struct blkcipher_walk *walk)
317 {
318 	int ret = blkcipher_walk_virt(desc, walk);
319 	unsigned int nbytes;
320 
321 	while ((nbytes = walk->nbytes)) {
322 		/* only use complete blocks */
323 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
324 		u8 *out = walk->dst.virt.addr;
325 		u8 *in = walk->src.virt.addr;
326 
327 		ret = crypt_s390_km(func, param, out, in, n);
328 		if (ret < 0 || ret != n)
329 			return -EIO;
330 
331 		nbytes &= AES_BLOCK_SIZE - 1;
332 		ret = blkcipher_walk_done(desc, walk, nbytes);
333 	}
334 
335 	return ret;
336 }
337 
338 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
339 			   struct scatterlist *dst, struct scatterlist *src,
340 			   unsigned int nbytes)
341 {
342 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
343 	struct blkcipher_walk walk;
344 
345 	if (unlikely(need_fallback(sctx->key_len)))
346 		return fallback_blk_enc(desc, dst, src, nbytes);
347 
348 	blkcipher_walk_init(&walk, dst, src, nbytes);
349 	return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
350 }
351 
352 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
353 			   struct scatterlist *dst, struct scatterlist *src,
354 			   unsigned int nbytes)
355 {
356 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
357 	struct blkcipher_walk walk;
358 
359 	if (unlikely(need_fallback(sctx->key_len)))
360 		return fallback_blk_dec(desc, dst, src, nbytes);
361 
362 	blkcipher_walk_init(&walk, dst, src, nbytes);
363 	return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
364 }
365 
366 static int fallback_init_blk(struct crypto_tfm *tfm)
367 {
368 	const char *name = tfm->__crt_alg->cra_name;
369 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
370 
371 	sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
372 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
373 
374 	if (IS_ERR(sctx->fallback.blk)) {
375 		pr_err("Allocating AES fallback algorithm %s failed\n",
376 		       name);
377 		return PTR_ERR(sctx->fallback.blk);
378 	}
379 
380 	return 0;
381 }
382 
383 static void fallback_exit_blk(struct crypto_tfm *tfm)
384 {
385 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
386 
387 	crypto_free_blkcipher(sctx->fallback.blk);
388 	sctx->fallback.blk = NULL;
389 }
390 
391 static struct crypto_alg ecb_aes_alg = {
392 	.cra_name		=	"ecb(aes)",
393 	.cra_driver_name	=	"ecb-aes-s390",
394 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
395 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
396 					CRYPTO_ALG_NEED_FALLBACK,
397 	.cra_blocksize		=	AES_BLOCK_SIZE,
398 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
399 	.cra_type		=	&crypto_blkcipher_type,
400 	.cra_module		=	THIS_MODULE,
401 	.cra_init		=	fallback_init_blk,
402 	.cra_exit		=	fallback_exit_blk,
403 	.cra_u			=	{
404 		.blkcipher = {
405 			.min_keysize		=	AES_MIN_KEY_SIZE,
406 			.max_keysize		=	AES_MAX_KEY_SIZE,
407 			.setkey			=	ecb_aes_set_key,
408 			.encrypt		=	ecb_aes_encrypt,
409 			.decrypt		=	ecb_aes_decrypt,
410 		}
411 	}
412 };
413 
414 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
415 			   unsigned int key_len)
416 {
417 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
418 	int ret;
419 
420 	ret = need_fallback(key_len);
421 	if (ret > 0) {
422 		sctx->key_len = key_len;
423 		return setkey_fallback_blk(tfm, in_key, key_len);
424 	}
425 
426 	switch (key_len) {
427 	case 16:
428 		sctx->enc = KMC_AES_128_ENCRYPT;
429 		sctx->dec = KMC_AES_128_DECRYPT;
430 		break;
431 	case 24:
432 		sctx->enc = KMC_AES_192_ENCRYPT;
433 		sctx->dec = KMC_AES_192_DECRYPT;
434 		break;
435 	case 32:
436 		sctx->enc = KMC_AES_256_ENCRYPT;
437 		sctx->dec = KMC_AES_256_DECRYPT;
438 		break;
439 	}
440 
441 	return aes_set_key(tfm, in_key, key_len);
442 }
443 
444 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
445 			 struct blkcipher_walk *walk)
446 {
447 	int ret = blkcipher_walk_virt(desc, walk);
448 	unsigned int nbytes = walk->nbytes;
449 
450 	if (!nbytes)
451 		goto out;
452 
453 	memcpy(param, walk->iv, AES_BLOCK_SIZE);
454 	do {
455 		/* only use complete blocks */
456 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
457 		u8 *out = walk->dst.virt.addr;
458 		u8 *in = walk->src.virt.addr;
459 
460 		ret = crypt_s390_kmc(func, param, out, in, n);
461 		if (ret < 0 || ret != n)
462 			return -EIO;
463 
464 		nbytes &= AES_BLOCK_SIZE - 1;
465 		ret = blkcipher_walk_done(desc, walk, nbytes);
466 	} while ((nbytes = walk->nbytes));
467 	memcpy(walk->iv, param, AES_BLOCK_SIZE);
468 
469 out:
470 	return ret;
471 }
472 
473 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
474 			   struct scatterlist *dst, struct scatterlist *src,
475 			   unsigned int nbytes)
476 {
477 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
478 	struct blkcipher_walk walk;
479 
480 	if (unlikely(need_fallback(sctx->key_len)))
481 		return fallback_blk_enc(desc, dst, src, nbytes);
482 
483 	blkcipher_walk_init(&walk, dst, src, nbytes);
484 	return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
485 }
486 
487 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
488 			   struct scatterlist *dst, struct scatterlist *src,
489 			   unsigned int nbytes)
490 {
491 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
492 	struct blkcipher_walk walk;
493 
494 	if (unlikely(need_fallback(sctx->key_len)))
495 		return fallback_blk_dec(desc, dst, src, nbytes);
496 
497 	blkcipher_walk_init(&walk, dst, src, nbytes);
498 	return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
499 }
500 
501 static struct crypto_alg cbc_aes_alg = {
502 	.cra_name		=	"cbc(aes)",
503 	.cra_driver_name	=	"cbc-aes-s390",
504 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
505 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
506 					CRYPTO_ALG_NEED_FALLBACK,
507 	.cra_blocksize		=	AES_BLOCK_SIZE,
508 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
509 	.cra_type		=	&crypto_blkcipher_type,
510 	.cra_module		=	THIS_MODULE,
511 	.cra_init		=	fallback_init_blk,
512 	.cra_exit		=	fallback_exit_blk,
513 	.cra_u			=	{
514 		.blkcipher = {
515 			.min_keysize		=	AES_MIN_KEY_SIZE,
516 			.max_keysize		=	AES_MAX_KEY_SIZE,
517 			.ivsize			=	AES_BLOCK_SIZE,
518 			.setkey			=	cbc_aes_set_key,
519 			.encrypt		=	cbc_aes_encrypt,
520 			.decrypt		=	cbc_aes_decrypt,
521 		}
522 	}
523 };
524 
525 static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
526 				   unsigned int len)
527 {
528 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
529 	unsigned int ret;
530 
531 	xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
532 	xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
533 			CRYPTO_TFM_REQ_MASK);
534 
535 	ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
536 	if (ret) {
537 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
538 		tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
539 				CRYPTO_TFM_RES_MASK);
540 	}
541 	return ret;
542 }
543 
544 static int xts_fallback_decrypt(struct blkcipher_desc *desc,
545 		struct scatterlist *dst, struct scatterlist *src,
546 		unsigned int nbytes)
547 {
548 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
549 	struct crypto_blkcipher *tfm;
550 	unsigned int ret;
551 
552 	tfm = desc->tfm;
553 	desc->tfm = xts_ctx->fallback;
554 
555 	ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
556 
557 	desc->tfm = tfm;
558 	return ret;
559 }
560 
561 static int xts_fallback_encrypt(struct blkcipher_desc *desc,
562 		struct scatterlist *dst, struct scatterlist *src,
563 		unsigned int nbytes)
564 {
565 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
566 	struct crypto_blkcipher *tfm;
567 	unsigned int ret;
568 
569 	tfm = desc->tfm;
570 	desc->tfm = xts_ctx->fallback;
571 
572 	ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
573 
574 	desc->tfm = tfm;
575 	return ret;
576 }
577 
578 static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
579 			   unsigned int key_len)
580 {
581 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
582 	u32 *flags = &tfm->crt_flags;
583 
584 	switch (key_len) {
585 	case 32:
586 		xts_ctx->enc = KM_XTS_128_ENCRYPT;
587 		xts_ctx->dec = KM_XTS_128_DECRYPT;
588 		memcpy(xts_ctx->key + 16, in_key, 16);
589 		memcpy(xts_ctx->pcc.key + 16, in_key + 16, 16);
590 		break;
591 	case 48:
592 		xts_ctx->enc = 0;
593 		xts_ctx->dec = 0;
594 		xts_fallback_setkey(tfm, in_key, key_len);
595 		break;
596 	case 64:
597 		xts_ctx->enc = KM_XTS_256_ENCRYPT;
598 		xts_ctx->dec = KM_XTS_256_DECRYPT;
599 		memcpy(xts_ctx->key, in_key, 32);
600 		memcpy(xts_ctx->pcc.key, in_key + 32, 32);
601 		break;
602 	default:
603 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
604 		return -EINVAL;
605 	}
606 	xts_ctx->key_len = key_len;
607 	return 0;
608 }
609 
610 static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
611 			 struct s390_xts_ctx *xts_ctx,
612 			 struct blkcipher_walk *walk)
613 {
614 	unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
615 	int ret = blkcipher_walk_virt(desc, walk);
616 	unsigned int nbytes = walk->nbytes;
617 	unsigned int n;
618 	u8 *in, *out;
619 	void *param;
620 
621 	if (!nbytes)
622 		goto out;
623 
624 	memset(xts_ctx->pcc.block, 0, sizeof(xts_ctx->pcc.block));
625 	memset(xts_ctx->pcc.bit, 0, sizeof(xts_ctx->pcc.bit));
626 	memset(xts_ctx->pcc.xts, 0, sizeof(xts_ctx->pcc.xts));
627 	memcpy(xts_ctx->pcc.tweak, walk->iv, sizeof(xts_ctx->pcc.tweak));
628 	param = xts_ctx->pcc.key + offset;
629 	ret = crypt_s390_pcc(func, param);
630 	if (ret < 0)
631 		return -EIO;
632 
633 	memcpy(xts_ctx->xts_param, xts_ctx->pcc.xts, 16);
634 	param = xts_ctx->key + offset;
635 	do {
636 		/* only use complete blocks */
637 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
638 		out = walk->dst.virt.addr;
639 		in = walk->src.virt.addr;
640 
641 		ret = crypt_s390_km(func, param, out, in, n);
642 		if (ret < 0 || ret != n)
643 			return -EIO;
644 
645 		nbytes &= AES_BLOCK_SIZE - 1;
646 		ret = blkcipher_walk_done(desc, walk, nbytes);
647 	} while ((nbytes = walk->nbytes));
648 out:
649 	return ret;
650 }
651 
652 static int xts_aes_encrypt(struct blkcipher_desc *desc,
653 			   struct scatterlist *dst, struct scatterlist *src,
654 			   unsigned int nbytes)
655 {
656 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
657 	struct blkcipher_walk walk;
658 
659 	if (unlikely(xts_ctx->key_len == 48))
660 		return xts_fallback_encrypt(desc, dst, src, nbytes);
661 
662 	blkcipher_walk_init(&walk, dst, src, nbytes);
663 	return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
664 }
665 
666 static int xts_aes_decrypt(struct blkcipher_desc *desc,
667 			   struct scatterlist *dst, struct scatterlist *src,
668 			   unsigned int nbytes)
669 {
670 	struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
671 	struct blkcipher_walk walk;
672 
673 	if (unlikely(xts_ctx->key_len == 48))
674 		return xts_fallback_decrypt(desc, dst, src, nbytes);
675 
676 	blkcipher_walk_init(&walk, dst, src, nbytes);
677 	return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
678 }
679 
680 static int xts_fallback_init(struct crypto_tfm *tfm)
681 {
682 	const char *name = tfm->__crt_alg->cra_name;
683 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
684 
685 	xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
686 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
687 
688 	if (IS_ERR(xts_ctx->fallback)) {
689 		pr_err("Allocating XTS fallback algorithm %s failed\n",
690 		       name);
691 		return PTR_ERR(xts_ctx->fallback);
692 	}
693 	return 0;
694 }
695 
696 static void xts_fallback_exit(struct crypto_tfm *tfm)
697 {
698 	struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
699 
700 	crypto_free_blkcipher(xts_ctx->fallback);
701 	xts_ctx->fallback = NULL;
702 }
703 
704 static struct crypto_alg xts_aes_alg = {
705 	.cra_name		=	"xts(aes)",
706 	.cra_driver_name	=	"xts-aes-s390",
707 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
708 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
709 					CRYPTO_ALG_NEED_FALLBACK,
710 	.cra_blocksize		=	AES_BLOCK_SIZE,
711 	.cra_ctxsize		=	sizeof(struct s390_xts_ctx),
712 	.cra_type		=	&crypto_blkcipher_type,
713 	.cra_module		=	THIS_MODULE,
714 	.cra_init		=	xts_fallback_init,
715 	.cra_exit		=	xts_fallback_exit,
716 	.cra_u			=	{
717 		.blkcipher = {
718 			.min_keysize		=	2 * AES_MIN_KEY_SIZE,
719 			.max_keysize		=	2 * AES_MAX_KEY_SIZE,
720 			.ivsize			=	AES_BLOCK_SIZE,
721 			.setkey			=	xts_aes_set_key,
722 			.encrypt		=	xts_aes_encrypt,
723 			.decrypt		=	xts_aes_decrypt,
724 		}
725 	}
726 };
727 
728 static int xts_aes_alg_reg;
729 
730 static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
731 			   unsigned int key_len)
732 {
733 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
734 
735 	switch (key_len) {
736 	case 16:
737 		sctx->enc = KMCTR_AES_128_ENCRYPT;
738 		sctx->dec = KMCTR_AES_128_DECRYPT;
739 		break;
740 	case 24:
741 		sctx->enc = KMCTR_AES_192_ENCRYPT;
742 		sctx->dec = KMCTR_AES_192_DECRYPT;
743 		break;
744 	case 32:
745 		sctx->enc = KMCTR_AES_256_ENCRYPT;
746 		sctx->dec = KMCTR_AES_256_DECRYPT;
747 		break;
748 	}
749 
750 	return aes_set_key(tfm, in_key, key_len);
751 }
752 
753 static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
754 			 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
755 {
756 	int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
757 	unsigned int i, n, nbytes;
758 	u8 buf[AES_BLOCK_SIZE];
759 	u8 *out, *in;
760 
761 	if (!walk->nbytes)
762 		return ret;
763 
764 	memcpy(ctrblk, walk->iv, AES_BLOCK_SIZE);
765 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
766 		out = walk->dst.virt.addr;
767 		in = walk->src.virt.addr;
768 		while (nbytes >= AES_BLOCK_SIZE) {
769 			/* only use complete blocks, max. PAGE_SIZE */
770 			n = (nbytes > PAGE_SIZE) ? PAGE_SIZE :
771 						 nbytes & ~(AES_BLOCK_SIZE - 1);
772 			for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
773 				memcpy(ctrblk + i, ctrblk + i - AES_BLOCK_SIZE,
774 				       AES_BLOCK_SIZE);
775 				crypto_inc(ctrblk + i, AES_BLOCK_SIZE);
776 			}
777 			ret = crypt_s390_kmctr(func, sctx->key, out, in, n, ctrblk);
778 			if (ret < 0 || ret != n)
779 				return -EIO;
780 			if (n > AES_BLOCK_SIZE)
781 				memcpy(ctrblk, ctrblk + n - AES_BLOCK_SIZE,
782 				       AES_BLOCK_SIZE);
783 			crypto_inc(ctrblk, AES_BLOCK_SIZE);
784 			out += n;
785 			in += n;
786 			nbytes -= n;
787 		}
788 		ret = blkcipher_walk_done(desc, walk, nbytes);
789 	}
790 	/*
791 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
792 	 */
793 	if (nbytes) {
794 		out = walk->dst.virt.addr;
795 		in = walk->src.virt.addr;
796 		ret = crypt_s390_kmctr(func, sctx->key, buf, in,
797 				       AES_BLOCK_SIZE, ctrblk);
798 		if (ret < 0 || ret != AES_BLOCK_SIZE)
799 			return -EIO;
800 		memcpy(out, buf, nbytes);
801 		crypto_inc(ctrblk, AES_BLOCK_SIZE);
802 		ret = blkcipher_walk_done(desc, walk, 0);
803 	}
804 	memcpy(walk->iv, ctrblk, AES_BLOCK_SIZE);
805 	return ret;
806 }
807 
808 static int ctr_aes_encrypt(struct blkcipher_desc *desc,
809 			   struct scatterlist *dst, struct scatterlist *src,
810 			   unsigned int nbytes)
811 {
812 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
813 	struct blkcipher_walk walk;
814 
815 	blkcipher_walk_init(&walk, dst, src, nbytes);
816 	return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
817 }
818 
819 static int ctr_aes_decrypt(struct blkcipher_desc *desc,
820 			   struct scatterlist *dst, struct scatterlist *src,
821 			   unsigned int nbytes)
822 {
823 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
824 	struct blkcipher_walk walk;
825 
826 	blkcipher_walk_init(&walk, dst, src, nbytes);
827 	return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
828 }
829 
830 static struct crypto_alg ctr_aes_alg = {
831 	.cra_name		=	"ctr(aes)",
832 	.cra_driver_name	=	"ctr-aes-s390",
833 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
834 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
835 	.cra_blocksize		=	1,
836 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
837 	.cra_type		=	&crypto_blkcipher_type,
838 	.cra_module		=	THIS_MODULE,
839 	.cra_u			=	{
840 		.blkcipher = {
841 			.min_keysize		=	AES_MIN_KEY_SIZE,
842 			.max_keysize		=	AES_MAX_KEY_SIZE,
843 			.ivsize			=	AES_BLOCK_SIZE,
844 			.setkey			=	ctr_aes_set_key,
845 			.encrypt		=	ctr_aes_encrypt,
846 			.decrypt		=	ctr_aes_decrypt,
847 		}
848 	}
849 };
850 
851 static int ctr_aes_alg_reg;
852 
853 static int __init aes_s390_init(void)
854 {
855 	int ret;
856 
857 	if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
858 		keylen_flag |= AES_KEYLEN_128;
859 	if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
860 		keylen_flag |= AES_KEYLEN_192;
861 	if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
862 		keylen_flag |= AES_KEYLEN_256;
863 
864 	if (!keylen_flag)
865 		return -EOPNOTSUPP;
866 
867 	/* z9 109 and z9 BC/EC only support 128 bit key length */
868 	if (keylen_flag == AES_KEYLEN_128)
869 		pr_info("AES hardware acceleration is only available for"
870 			" 128-bit keys\n");
871 
872 	ret = crypto_register_alg(&aes_alg);
873 	if (ret)
874 		goto aes_err;
875 
876 	ret = crypto_register_alg(&ecb_aes_alg);
877 	if (ret)
878 		goto ecb_aes_err;
879 
880 	ret = crypto_register_alg(&cbc_aes_alg);
881 	if (ret)
882 		goto cbc_aes_err;
883 
884 	if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
885 			CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
886 	    crypt_s390_func_available(KM_XTS_256_ENCRYPT,
887 			CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
888 		ret = crypto_register_alg(&xts_aes_alg);
889 		if (ret)
890 			goto xts_aes_err;
891 		xts_aes_alg_reg = 1;
892 	}
893 
894 	if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
895 				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
896 	    crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
897 				CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
898 	    crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
899 				CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
900 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
901 		if (!ctrblk) {
902 			ret = -ENOMEM;
903 			goto ctr_aes_err;
904 		}
905 		ret = crypto_register_alg(&ctr_aes_alg);
906 		if (ret) {
907 			free_page((unsigned long) ctrblk);
908 			goto ctr_aes_err;
909 		}
910 		ctr_aes_alg_reg = 1;
911 	}
912 
913 out:
914 	return ret;
915 
916 ctr_aes_err:
917 	crypto_unregister_alg(&xts_aes_alg);
918 xts_aes_err:
919 	crypto_unregister_alg(&cbc_aes_alg);
920 cbc_aes_err:
921 	crypto_unregister_alg(&ecb_aes_alg);
922 ecb_aes_err:
923 	crypto_unregister_alg(&aes_alg);
924 aes_err:
925 	goto out;
926 }
927 
928 static void __exit aes_s390_fini(void)
929 {
930 	if (ctr_aes_alg_reg) {
931 		crypto_unregister_alg(&ctr_aes_alg);
932 		free_page((unsigned long) ctrblk);
933 	}
934 	if (xts_aes_alg_reg)
935 		crypto_unregister_alg(&xts_aes_alg);
936 	crypto_unregister_alg(&cbc_aes_alg);
937 	crypto_unregister_alg(&ecb_aes_alg);
938 	crypto_unregister_alg(&aes_alg);
939 }
940 
941 module_init(aes_s390_init);
942 module_exit(aes_s390_fini);
943 
944 MODULE_ALIAS("aes-all");
945 
946 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
947 MODULE_LICENSE("GPL");
948