xref: /openbmc/linux/arch/s390/crypto/paes_s390.c (revision 68198dca)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Cryptographic API.
4  *
5  * s390 implementation of the AES Cipher Algorithm with protected keys.
6  *
7  * s390 Version:
8  *   Copyright IBM Corp. 2017
9  *   Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
10  *		Harald Freudenberger <freude@de.ibm.com>
11  */
12 
13 #define KMSG_COMPONENT "paes_s390"
14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
15 
16 #include <crypto/aes.h>
17 #include <crypto/algapi.h>
18 #include <linux/bug.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/cpufeature.h>
22 #include <linux/init.h>
23 #include <linux/spinlock.h>
24 #include <crypto/xts.h>
25 #include <asm/cpacf.h>
26 #include <asm/pkey.h>
27 
28 static u8 *ctrblk;
29 static DEFINE_SPINLOCK(ctrblk_lock);
30 
31 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
32 
33 struct s390_paes_ctx {
34 	struct pkey_seckey sk;
35 	struct pkey_protkey pk;
36 	unsigned long fc;
37 };
38 
39 struct s390_pxts_ctx {
40 	struct pkey_seckey sk[2];
41 	struct pkey_protkey pk[2];
42 	unsigned long fc;
43 };
44 
45 static inline int __paes_convert_key(struct pkey_seckey *sk,
46 				     struct pkey_protkey *pk)
47 {
48 	int i, ret;
49 
50 	/* try three times in case of failure */
51 	for (i = 0; i < 3; i++) {
52 		ret = pkey_skey2pkey(sk, pk);
53 		if (ret == 0)
54 			break;
55 	}
56 
57 	return ret;
58 }
59 
60 static int __paes_set_key(struct s390_paes_ctx *ctx)
61 {
62 	unsigned long fc;
63 
64 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
65 		return -EINVAL;
66 
67 	/* Pick the correct function code based on the protected key type */
68 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
69 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
70 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
71 
72 	/* Check if the function code is available */
73 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
74 
75 	return ctx->fc ? 0 : -EINVAL;
76 }
77 
78 static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
79 			    unsigned int key_len)
80 {
81 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
82 
83 	if (key_len != SECKEYBLOBSIZE)
84 		return -EINVAL;
85 
86 	memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
87 	if (__paes_set_key(ctx)) {
88 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
89 		return -EINVAL;
90 	}
91 	return 0;
92 }
93 
94 static int ecb_paes_crypt(struct blkcipher_desc *desc,
95 			  unsigned long modifier,
96 			  struct blkcipher_walk *walk)
97 {
98 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
99 	unsigned int nbytes, n, k;
100 	int ret;
101 
102 	ret = blkcipher_walk_virt(desc, walk);
103 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
104 		/* only use complete blocks */
105 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
106 		k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
107 			     walk->dst.virt.addr, walk->src.virt.addr, n);
108 		if (k)
109 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
110 		if (k < n) {
111 			if (__paes_set_key(ctx) != 0)
112 				return blkcipher_walk_done(desc, walk, -EIO);
113 		}
114 	}
115 	return ret;
116 }
117 
118 static int ecb_paes_encrypt(struct blkcipher_desc *desc,
119 			    struct scatterlist *dst, struct scatterlist *src,
120 			    unsigned int nbytes)
121 {
122 	struct blkcipher_walk walk;
123 
124 	blkcipher_walk_init(&walk, dst, src, nbytes);
125 	return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
126 }
127 
128 static int ecb_paes_decrypt(struct blkcipher_desc *desc,
129 			    struct scatterlist *dst, struct scatterlist *src,
130 			    unsigned int nbytes)
131 {
132 	struct blkcipher_walk walk;
133 
134 	blkcipher_walk_init(&walk, dst, src, nbytes);
135 	return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
136 }
137 
138 static struct crypto_alg ecb_paes_alg = {
139 	.cra_name		=	"ecb(paes)",
140 	.cra_driver_name	=	"ecb-paes-s390",
141 	.cra_priority		=	400,	/* combo: aes + ecb */
142 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
143 	.cra_blocksize		=	AES_BLOCK_SIZE,
144 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
145 	.cra_type		=	&crypto_blkcipher_type,
146 	.cra_module		=	THIS_MODULE,
147 	.cra_list		=	LIST_HEAD_INIT(ecb_paes_alg.cra_list),
148 	.cra_u			=	{
149 		.blkcipher = {
150 			.min_keysize		=	SECKEYBLOBSIZE,
151 			.max_keysize		=	SECKEYBLOBSIZE,
152 			.setkey			=	ecb_paes_set_key,
153 			.encrypt		=	ecb_paes_encrypt,
154 			.decrypt		=	ecb_paes_decrypt,
155 		}
156 	}
157 };
158 
159 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
160 {
161 	unsigned long fc;
162 
163 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
164 		return -EINVAL;
165 
166 	/* Pick the correct function code based on the protected key type */
167 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
168 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
169 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
170 
171 	/* Check if the function code is available */
172 	ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
173 
174 	return ctx->fc ? 0 : -EINVAL;
175 }
176 
177 static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
178 			    unsigned int key_len)
179 {
180 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
181 
182 	memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
183 	if (__cbc_paes_set_key(ctx)) {
184 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
185 		return -EINVAL;
186 	}
187 	return 0;
188 }
189 
190 static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
191 			  struct blkcipher_walk *walk)
192 {
193 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
194 	unsigned int nbytes, n, k;
195 	int ret;
196 	struct {
197 		u8 iv[AES_BLOCK_SIZE];
198 		u8 key[MAXPROTKEYSIZE];
199 	} param;
200 
201 	ret = blkcipher_walk_virt(desc, walk);
202 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
203 	memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
204 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
205 		/* only use complete blocks */
206 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
207 		k = cpacf_kmc(ctx->fc | modifier, &param,
208 			      walk->dst.virt.addr, walk->src.virt.addr, n);
209 		if (k)
210 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
211 		if (n < k) {
212 			if (__cbc_paes_set_key(ctx) != 0)
213 				return blkcipher_walk_done(desc, walk, -EIO);
214 			memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
215 		}
216 	}
217 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
218 	return ret;
219 }
220 
221 static int cbc_paes_encrypt(struct blkcipher_desc *desc,
222 			    struct scatterlist *dst, struct scatterlist *src,
223 			    unsigned int nbytes)
224 {
225 	struct blkcipher_walk walk;
226 
227 	blkcipher_walk_init(&walk, dst, src, nbytes);
228 	return cbc_paes_crypt(desc, 0, &walk);
229 }
230 
231 static int cbc_paes_decrypt(struct blkcipher_desc *desc,
232 			    struct scatterlist *dst, struct scatterlist *src,
233 			    unsigned int nbytes)
234 {
235 	struct blkcipher_walk walk;
236 
237 	blkcipher_walk_init(&walk, dst, src, nbytes);
238 	return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
239 }
240 
241 static struct crypto_alg cbc_paes_alg = {
242 	.cra_name		=	"cbc(paes)",
243 	.cra_driver_name	=	"cbc-paes-s390",
244 	.cra_priority		=	400,	/* combo: aes + cbc */
245 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
246 	.cra_blocksize		=	AES_BLOCK_SIZE,
247 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
248 	.cra_type		=	&crypto_blkcipher_type,
249 	.cra_module		=	THIS_MODULE,
250 	.cra_list		=	LIST_HEAD_INIT(cbc_paes_alg.cra_list),
251 	.cra_u			=	{
252 		.blkcipher = {
253 			.min_keysize		=	SECKEYBLOBSIZE,
254 			.max_keysize		=	SECKEYBLOBSIZE,
255 			.ivsize			=	AES_BLOCK_SIZE,
256 			.setkey			=	cbc_paes_set_key,
257 			.encrypt		=	cbc_paes_encrypt,
258 			.decrypt		=	cbc_paes_decrypt,
259 		}
260 	}
261 };
262 
263 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
264 {
265 	unsigned long fc;
266 
267 	if (__paes_convert_key(&ctx->sk[0], &ctx->pk[0]) ||
268 	    __paes_convert_key(&ctx->sk[1], &ctx->pk[1]))
269 		return -EINVAL;
270 
271 	if (ctx->pk[0].type != ctx->pk[1].type)
272 		return -EINVAL;
273 
274 	/* Pick the correct function code based on the protected key type */
275 	fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
276 		(ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
277 		CPACF_KM_PXTS_256 : 0;
278 
279 	/* Check if the function code is available */
280 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
281 
282 	return ctx->fc ? 0 : -EINVAL;
283 }
284 
285 static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
286 			    unsigned int key_len)
287 {
288 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
289 	u8 ckey[2 * AES_MAX_KEY_SIZE];
290 	unsigned int ckey_len;
291 
292 	memcpy(ctx->sk[0].seckey, in_key, SECKEYBLOBSIZE);
293 	memcpy(ctx->sk[1].seckey, in_key + SECKEYBLOBSIZE, SECKEYBLOBSIZE);
294 	if (__xts_paes_set_key(ctx)) {
295 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
296 		return -EINVAL;
297 	}
298 
299 	/*
300 	 * xts_check_key verifies the key length is not odd and makes
301 	 * sure that the two keys are not the same. This can be done
302 	 * on the two protected keys as well
303 	 */
304 	ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
305 		AES_KEYSIZE_128 : AES_KEYSIZE_256;
306 	memcpy(ckey, ctx->pk[0].protkey, ckey_len);
307 	memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
308 	return xts_check_key(tfm, ckey, 2*ckey_len);
309 }
310 
311 static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
312 			  struct blkcipher_walk *walk)
313 {
314 	struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
315 	unsigned int keylen, offset, nbytes, n, k;
316 	int ret;
317 	struct {
318 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
319 		u8 tweak[16];
320 		u8 block[16];
321 		u8 bit[16];
322 		u8 xts[16];
323 	} pcc_param;
324 	struct {
325 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
326 		u8 init[16];
327 	} xts_param;
328 
329 	ret = blkcipher_walk_virt(desc, walk);
330 	keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
331 	offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
332 retry:
333 	memset(&pcc_param, 0, sizeof(pcc_param));
334 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
335 	memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
336 	cpacf_pcc(ctx->fc, pcc_param.key + offset);
337 
338 	memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
339 	memcpy(xts_param.init, pcc_param.xts, 16);
340 
341 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
342 		/* only use complete blocks */
343 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
344 		k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
345 			     walk->dst.virt.addr, walk->src.virt.addr, n);
346 		if (k)
347 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
348 		if (k < n) {
349 			if (__xts_paes_set_key(ctx) != 0)
350 				return blkcipher_walk_done(desc, walk, -EIO);
351 			goto retry;
352 		}
353 	}
354 	return ret;
355 }
356 
357 static int xts_paes_encrypt(struct blkcipher_desc *desc,
358 			    struct scatterlist *dst, struct scatterlist *src,
359 			    unsigned int nbytes)
360 {
361 	struct blkcipher_walk walk;
362 
363 	blkcipher_walk_init(&walk, dst, src, nbytes);
364 	return xts_paes_crypt(desc, 0, &walk);
365 }
366 
367 static int xts_paes_decrypt(struct blkcipher_desc *desc,
368 			    struct scatterlist *dst, struct scatterlist *src,
369 			    unsigned int nbytes)
370 {
371 	struct blkcipher_walk walk;
372 
373 	blkcipher_walk_init(&walk, dst, src, nbytes);
374 	return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
375 }
376 
377 static struct crypto_alg xts_paes_alg = {
378 	.cra_name		=	"xts(paes)",
379 	.cra_driver_name	=	"xts-paes-s390",
380 	.cra_priority		=	400,	/* combo: aes + xts */
381 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
382 	.cra_blocksize		=	AES_BLOCK_SIZE,
383 	.cra_ctxsize		=	sizeof(struct s390_pxts_ctx),
384 	.cra_type		=	&crypto_blkcipher_type,
385 	.cra_module		=	THIS_MODULE,
386 	.cra_list		=	LIST_HEAD_INIT(xts_paes_alg.cra_list),
387 	.cra_u			=	{
388 		.blkcipher = {
389 			.min_keysize		=	2 * SECKEYBLOBSIZE,
390 			.max_keysize		=	2 * SECKEYBLOBSIZE,
391 			.ivsize			=	AES_BLOCK_SIZE,
392 			.setkey			=	xts_paes_set_key,
393 			.encrypt		=	xts_paes_encrypt,
394 			.decrypt		=	xts_paes_decrypt,
395 		}
396 	}
397 };
398 
399 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
400 {
401 	unsigned long fc;
402 
403 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
404 		return -EINVAL;
405 
406 	/* Pick the correct function code based on the protected key type */
407 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
408 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
409 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
410 		CPACF_KMCTR_PAES_256 : 0;
411 
412 	/* Check if the function code is available */
413 	ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
414 
415 	return ctx->fc ? 0 : -EINVAL;
416 }
417 
418 static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
419 			    unsigned int key_len)
420 {
421 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
422 
423 	memcpy(ctx->sk.seckey, in_key, key_len);
424 	if (__ctr_paes_set_key(ctx)) {
425 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
426 		return -EINVAL;
427 	}
428 	return 0;
429 }
430 
431 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
432 {
433 	unsigned int i, n;
434 
435 	/* only use complete blocks, max. PAGE_SIZE */
436 	memcpy(ctrptr, iv, AES_BLOCK_SIZE);
437 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
438 	for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
439 		memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
440 		crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
441 		ctrptr += AES_BLOCK_SIZE;
442 	}
443 	return n;
444 }
445 
446 static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
447 			  struct blkcipher_walk *walk)
448 {
449 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
450 	u8 buf[AES_BLOCK_SIZE], *ctrptr;
451 	unsigned int nbytes, n, k;
452 	int ret, locked;
453 
454 	locked = spin_trylock(&ctrblk_lock);
455 
456 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
457 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
458 		n = AES_BLOCK_SIZE;
459 		if (nbytes >= 2*AES_BLOCK_SIZE && locked)
460 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
461 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
462 		k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
463 				walk->dst.virt.addr, walk->src.virt.addr,
464 				n, ctrptr);
465 		if (k) {
466 			if (ctrptr == ctrblk)
467 				memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
468 				       AES_BLOCK_SIZE);
469 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
470 			ret = blkcipher_walk_done(desc, walk, nbytes - n);
471 		}
472 		if (k < n) {
473 			if (__ctr_paes_set_key(ctx) != 0) {
474 				if (locked)
475 					spin_unlock(&ctrblk_lock);
476 				return blkcipher_walk_done(desc, walk, -EIO);
477 			}
478 		}
479 	}
480 	if (locked)
481 		spin_unlock(&ctrblk_lock);
482 	/*
483 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
484 	 */
485 	if (nbytes) {
486 		while (1) {
487 			if (cpacf_kmctr(ctx->fc | modifier,
488 					ctx->pk.protkey, buf,
489 					walk->src.virt.addr, AES_BLOCK_SIZE,
490 					walk->iv) == AES_BLOCK_SIZE)
491 				break;
492 			if (__ctr_paes_set_key(ctx) != 0)
493 				return blkcipher_walk_done(desc, walk, -EIO);
494 		}
495 		memcpy(walk->dst.virt.addr, buf, nbytes);
496 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
497 		ret = blkcipher_walk_done(desc, walk, 0);
498 	}
499 
500 	return ret;
501 }
502 
503 static int ctr_paes_encrypt(struct blkcipher_desc *desc,
504 			    struct scatterlist *dst, struct scatterlist *src,
505 			    unsigned int nbytes)
506 {
507 	struct blkcipher_walk walk;
508 
509 	blkcipher_walk_init(&walk, dst, src, nbytes);
510 	return ctr_paes_crypt(desc, 0, &walk);
511 }
512 
513 static int ctr_paes_decrypt(struct blkcipher_desc *desc,
514 			    struct scatterlist *dst, struct scatterlist *src,
515 			    unsigned int nbytes)
516 {
517 	struct blkcipher_walk walk;
518 
519 	blkcipher_walk_init(&walk, dst, src, nbytes);
520 	return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
521 }
522 
523 static struct crypto_alg ctr_paes_alg = {
524 	.cra_name		=	"ctr(paes)",
525 	.cra_driver_name	=	"ctr-paes-s390",
526 	.cra_priority		=	400,	/* combo: aes + ctr */
527 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
528 	.cra_blocksize		=	1,
529 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
530 	.cra_type		=	&crypto_blkcipher_type,
531 	.cra_module		=	THIS_MODULE,
532 	.cra_list		=	LIST_HEAD_INIT(ctr_paes_alg.cra_list),
533 	.cra_u			=	{
534 		.blkcipher = {
535 			.min_keysize		=	SECKEYBLOBSIZE,
536 			.max_keysize		=	SECKEYBLOBSIZE,
537 			.ivsize			=	AES_BLOCK_SIZE,
538 			.setkey			=	ctr_paes_set_key,
539 			.encrypt		=	ctr_paes_encrypt,
540 			.decrypt		=	ctr_paes_decrypt,
541 		}
542 	}
543 };
544 
545 static inline void __crypto_unregister_alg(struct crypto_alg *alg)
546 {
547 	if (!list_empty(&alg->cra_list))
548 		crypto_unregister_alg(alg);
549 }
550 
551 static void paes_s390_fini(void)
552 {
553 	if (ctrblk)
554 		free_page((unsigned long) ctrblk);
555 	__crypto_unregister_alg(&ctr_paes_alg);
556 	__crypto_unregister_alg(&xts_paes_alg);
557 	__crypto_unregister_alg(&cbc_paes_alg);
558 	__crypto_unregister_alg(&ecb_paes_alg);
559 }
560 
561 static int __init paes_s390_init(void)
562 {
563 	int ret;
564 
565 	/* Query available functions for KM, KMC and KMCTR */
566 	cpacf_query(CPACF_KM, &km_functions);
567 	cpacf_query(CPACF_KMC, &kmc_functions);
568 	cpacf_query(CPACF_KMCTR, &kmctr_functions);
569 
570 	if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
571 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
572 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
573 		ret = crypto_register_alg(&ecb_paes_alg);
574 		if (ret)
575 			goto out_err;
576 	}
577 
578 	if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
579 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
580 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
581 		ret = crypto_register_alg(&cbc_paes_alg);
582 		if (ret)
583 			goto out_err;
584 	}
585 
586 	if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
587 	    cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
588 		ret = crypto_register_alg(&xts_paes_alg);
589 		if (ret)
590 			goto out_err;
591 	}
592 
593 	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
594 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
595 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
596 		ret = crypto_register_alg(&ctr_paes_alg);
597 		if (ret)
598 			goto out_err;
599 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
600 		if (!ctrblk) {
601 			ret = -ENOMEM;
602 			goto out_err;
603 		}
604 	}
605 
606 	return 0;
607 out_err:
608 	paes_s390_fini();
609 	return ret;
610 }
611 
612 module_init(paes_s390_init);
613 module_exit(paes_s390_fini);
614 
615 MODULE_ALIAS_CRYPTO("paes");
616 
617 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
618 MODULE_LICENSE("GPL");
619