xref: /openbmc/linux/arch/s390/crypto/paes_s390.c (revision 6b5fc336)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm with protected keys.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2017
8  *   Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
9  *		Harald Freudenberger <freude@de.ibm.com>
10  *
11  * This program is free software; you can redistribute it and/or modify
12  * it under the terms of the GNU General Public License (version 2 only)
13  * as published by the Free Software Foundation.
14  *
15  */
16 
17 #define KMSG_COMPONENT "paes_s390"
18 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
19 
20 #include <crypto/aes.h>
21 #include <crypto/algapi.h>
22 #include <linux/bug.h>
23 #include <linux/err.h>
24 #include <linux/module.h>
25 #include <linux/cpufeature.h>
26 #include <linux/init.h>
27 #include <linux/spinlock.h>
28 #include <crypto/xts.h>
29 #include <asm/cpacf.h>
30 #include <asm/pkey.h>
31 
32 static u8 *ctrblk;
33 static DEFINE_SPINLOCK(ctrblk_lock);
34 
35 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions;
36 
37 struct s390_paes_ctx {
38 	struct pkey_seckey sk;
39 	struct pkey_protkey pk;
40 	unsigned long fc;
41 };
42 
43 struct s390_pxts_ctx {
44 	struct pkey_seckey sk[2];
45 	struct pkey_protkey pk[2];
46 	unsigned long fc;
47 };
48 
49 static inline int __paes_convert_key(struct pkey_seckey *sk,
50 				     struct pkey_protkey *pk)
51 {
52 	int i, ret;
53 
54 	/* try three times in case of failure */
55 	for (i = 0; i < 3; i++) {
56 		ret = pkey_skey2pkey(sk, pk);
57 		if (ret == 0)
58 			break;
59 	}
60 
61 	return ret;
62 }
63 
64 static int __paes_set_key(struct s390_paes_ctx *ctx)
65 {
66 	unsigned long fc;
67 
68 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
69 		return -EINVAL;
70 
71 	/* Pick the correct function code based on the protected key type */
72 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 :
73 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 :
74 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0;
75 
76 	/* Check if the function code is available */
77 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
78 
79 	return ctx->fc ? 0 : -EINVAL;
80 }
81 
82 static int ecb_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
83 			    unsigned int key_len)
84 {
85 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
86 
87 	if (key_len != SECKEYBLOBSIZE)
88 		return -EINVAL;
89 
90 	memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
91 	if (__paes_set_key(ctx)) {
92 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
93 		return -EINVAL;
94 	}
95 	return 0;
96 }
97 
98 static int ecb_paes_crypt(struct blkcipher_desc *desc,
99 			  unsigned long modifier,
100 			  struct blkcipher_walk *walk)
101 {
102 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
103 	unsigned int nbytes, n, k;
104 	int ret;
105 
106 	ret = blkcipher_walk_virt(desc, walk);
107 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
108 		/* only use complete blocks */
109 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
110 		k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey,
111 			     walk->dst.virt.addr, walk->src.virt.addr, n);
112 		if (k)
113 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
114 		if (k < n) {
115 			if (__paes_set_key(ctx) != 0)
116 				return blkcipher_walk_done(desc, walk, -EIO);
117 		}
118 	}
119 	return ret;
120 }
121 
122 static int ecb_paes_encrypt(struct blkcipher_desc *desc,
123 			    struct scatterlist *dst, struct scatterlist *src,
124 			    unsigned int nbytes)
125 {
126 	struct blkcipher_walk walk;
127 
128 	blkcipher_walk_init(&walk, dst, src, nbytes);
129 	return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
130 }
131 
132 static int ecb_paes_decrypt(struct blkcipher_desc *desc,
133 			    struct scatterlist *dst, struct scatterlist *src,
134 			    unsigned int nbytes)
135 {
136 	struct blkcipher_walk walk;
137 
138 	blkcipher_walk_init(&walk, dst, src, nbytes);
139 	return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
140 }
141 
142 static struct crypto_alg ecb_paes_alg = {
143 	.cra_name		=	"ecb(paes)",
144 	.cra_driver_name	=	"ecb-paes-s390",
145 	.cra_priority		=	400,	/* combo: aes + ecb */
146 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
147 	.cra_blocksize		=	AES_BLOCK_SIZE,
148 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
149 	.cra_type		=	&crypto_blkcipher_type,
150 	.cra_module		=	THIS_MODULE,
151 	.cra_list		=	LIST_HEAD_INIT(ecb_paes_alg.cra_list),
152 	.cra_u			=	{
153 		.blkcipher = {
154 			.min_keysize		=	SECKEYBLOBSIZE,
155 			.max_keysize		=	SECKEYBLOBSIZE,
156 			.setkey			=	ecb_paes_set_key,
157 			.encrypt		=	ecb_paes_encrypt,
158 			.decrypt		=	ecb_paes_decrypt,
159 		}
160 	}
161 };
162 
163 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx)
164 {
165 	unsigned long fc;
166 
167 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
168 		return -EINVAL;
169 
170 	/* Pick the correct function code based on the protected key type */
171 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 :
172 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 :
173 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0;
174 
175 	/* Check if the function code is available */
176 	ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
177 
178 	return ctx->fc ? 0 : -EINVAL;
179 }
180 
181 static int cbc_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
182 			    unsigned int key_len)
183 {
184 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
185 
186 	memcpy(ctx->sk.seckey, in_key, SECKEYBLOBSIZE);
187 	if (__cbc_paes_set_key(ctx)) {
188 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
189 		return -EINVAL;
190 	}
191 	return 0;
192 }
193 
194 static int cbc_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
195 			  struct blkcipher_walk *walk)
196 {
197 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198 	unsigned int nbytes, n, k;
199 	int ret;
200 	struct {
201 		u8 iv[AES_BLOCK_SIZE];
202 		u8 key[MAXPROTKEYSIZE];
203 	} param;
204 
205 	ret = blkcipher_walk_virt(desc, walk);
206 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
207 	memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
208 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
209 		/* only use complete blocks */
210 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
211 		k = cpacf_kmc(ctx->fc | modifier, &param,
212 			      walk->dst.virt.addr, walk->src.virt.addr, n);
213 		if (k)
214 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
215 		if (n < k) {
216 			if (__cbc_paes_set_key(ctx) != 0)
217 				return blkcipher_walk_done(desc, walk, -EIO);
218 			memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE);
219 		}
220 	}
221 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
222 	return ret;
223 }
224 
225 static int cbc_paes_encrypt(struct blkcipher_desc *desc,
226 			    struct scatterlist *dst, struct scatterlist *src,
227 			    unsigned int nbytes)
228 {
229 	struct blkcipher_walk walk;
230 
231 	blkcipher_walk_init(&walk, dst, src, nbytes);
232 	return cbc_paes_crypt(desc, 0, &walk);
233 }
234 
235 static int cbc_paes_decrypt(struct blkcipher_desc *desc,
236 			    struct scatterlist *dst, struct scatterlist *src,
237 			    unsigned int nbytes)
238 {
239 	struct blkcipher_walk walk;
240 
241 	blkcipher_walk_init(&walk, dst, src, nbytes);
242 	return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
243 }
244 
245 static struct crypto_alg cbc_paes_alg = {
246 	.cra_name		=	"cbc(paes)",
247 	.cra_driver_name	=	"cbc-paes-s390",
248 	.cra_priority		=	400,	/* combo: aes + cbc */
249 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
250 	.cra_blocksize		=	AES_BLOCK_SIZE,
251 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
252 	.cra_type		=	&crypto_blkcipher_type,
253 	.cra_module		=	THIS_MODULE,
254 	.cra_list		=	LIST_HEAD_INIT(cbc_paes_alg.cra_list),
255 	.cra_u			=	{
256 		.blkcipher = {
257 			.min_keysize		=	SECKEYBLOBSIZE,
258 			.max_keysize		=	SECKEYBLOBSIZE,
259 			.ivsize			=	AES_BLOCK_SIZE,
260 			.setkey			=	cbc_paes_set_key,
261 			.encrypt		=	cbc_paes_encrypt,
262 			.decrypt		=	cbc_paes_decrypt,
263 		}
264 	}
265 };
266 
267 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx)
268 {
269 	unsigned long fc;
270 
271 	if (__paes_convert_key(&ctx->sk[0], &ctx->pk[0]) ||
272 	    __paes_convert_key(&ctx->sk[1], &ctx->pk[1]))
273 		return -EINVAL;
274 
275 	if (ctx->pk[0].type != ctx->pk[1].type)
276 		return -EINVAL;
277 
278 	/* Pick the correct function code based on the protected key type */
279 	fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 :
280 		(ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ?
281 		CPACF_KM_PXTS_256 : 0;
282 
283 	/* Check if the function code is available */
284 	ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
285 
286 	return ctx->fc ? 0 : -EINVAL;
287 }
288 
289 static int xts_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
290 			    unsigned int key_len)
291 {
292 	struct s390_pxts_ctx *ctx = crypto_tfm_ctx(tfm);
293 	u8 ckey[2 * AES_MAX_KEY_SIZE];
294 	unsigned int ckey_len;
295 
296 	memcpy(ctx->sk[0].seckey, in_key, SECKEYBLOBSIZE);
297 	memcpy(ctx->sk[1].seckey, in_key + SECKEYBLOBSIZE, SECKEYBLOBSIZE);
298 	if (__xts_paes_set_key(ctx)) {
299 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
300 		return -EINVAL;
301 	}
302 
303 	/*
304 	 * xts_check_key verifies the key length is not odd and makes
305 	 * sure that the two keys are not the same. This can be done
306 	 * on the two protected keys as well
307 	 */
308 	ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ?
309 		AES_KEYSIZE_128 : AES_KEYSIZE_256;
310 	memcpy(ckey, ctx->pk[0].protkey, ckey_len);
311 	memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len);
312 	return xts_check_key(tfm, ckey, 2*ckey_len);
313 }
314 
315 static int xts_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
316 			  struct blkcipher_walk *walk)
317 {
318 	struct s390_pxts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
319 	unsigned int keylen, offset, nbytes, n, k;
320 	int ret;
321 	struct {
322 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
323 		u8 tweak[16];
324 		u8 block[16];
325 		u8 bit[16];
326 		u8 xts[16];
327 	} pcc_param;
328 	struct {
329 		u8 key[MAXPROTKEYSIZE];	/* key + verification pattern */
330 		u8 init[16];
331 	} xts_param;
332 
333 	ret = blkcipher_walk_virt(desc, walk);
334 	keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64;
335 	offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0;
336 retry:
337 	memset(&pcc_param, 0, sizeof(pcc_param));
338 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
339 	memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen);
340 	cpacf_pcc(ctx->fc, pcc_param.key + offset);
341 
342 	memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen);
343 	memcpy(xts_param.init, pcc_param.xts, 16);
344 
345 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
346 		/* only use complete blocks */
347 		n = nbytes & ~(AES_BLOCK_SIZE - 1);
348 		k = cpacf_km(ctx->fc | modifier, xts_param.key + offset,
349 			     walk->dst.virt.addr, walk->src.virt.addr, n);
350 		if (k)
351 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
352 		if (k < n) {
353 			if (__xts_paes_set_key(ctx) != 0)
354 				return blkcipher_walk_done(desc, walk, -EIO);
355 			goto retry;
356 		}
357 	}
358 	return ret;
359 }
360 
361 static int xts_paes_encrypt(struct blkcipher_desc *desc,
362 			    struct scatterlist *dst, struct scatterlist *src,
363 			    unsigned int nbytes)
364 {
365 	struct blkcipher_walk walk;
366 
367 	blkcipher_walk_init(&walk, dst, src, nbytes);
368 	return xts_paes_crypt(desc, 0, &walk);
369 }
370 
371 static int xts_paes_decrypt(struct blkcipher_desc *desc,
372 			    struct scatterlist *dst, struct scatterlist *src,
373 			    unsigned int nbytes)
374 {
375 	struct blkcipher_walk walk;
376 
377 	blkcipher_walk_init(&walk, dst, src, nbytes);
378 	return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
379 }
380 
381 static struct crypto_alg xts_paes_alg = {
382 	.cra_name		=	"xts(paes)",
383 	.cra_driver_name	=	"xts-paes-s390",
384 	.cra_priority		=	400,	/* combo: aes + xts */
385 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
386 	.cra_blocksize		=	AES_BLOCK_SIZE,
387 	.cra_ctxsize		=	sizeof(struct s390_pxts_ctx),
388 	.cra_type		=	&crypto_blkcipher_type,
389 	.cra_module		=	THIS_MODULE,
390 	.cra_list		=	LIST_HEAD_INIT(xts_paes_alg.cra_list),
391 	.cra_u			=	{
392 		.blkcipher = {
393 			.min_keysize		=	2 * SECKEYBLOBSIZE,
394 			.max_keysize		=	2 * SECKEYBLOBSIZE,
395 			.ivsize			=	AES_BLOCK_SIZE,
396 			.setkey			=	xts_paes_set_key,
397 			.encrypt		=	xts_paes_encrypt,
398 			.decrypt		=	xts_paes_decrypt,
399 		}
400 	}
401 };
402 
403 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx)
404 {
405 	unsigned long fc;
406 
407 	if (__paes_convert_key(&ctx->sk, &ctx->pk))
408 		return -EINVAL;
409 
410 	/* Pick the correct function code based on the protected key type */
411 	fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 :
412 		(ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 :
413 		(ctx->pk.type == PKEY_KEYTYPE_AES_256) ?
414 		CPACF_KMCTR_PAES_256 : 0;
415 
416 	/* Check if the function code is available */
417 	ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
418 
419 	return ctx->fc ? 0 : -EINVAL;
420 }
421 
422 static int ctr_paes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
423 			    unsigned int key_len)
424 {
425 	struct s390_paes_ctx *ctx = crypto_tfm_ctx(tfm);
426 
427 	memcpy(ctx->sk.seckey, in_key, key_len);
428 	if (__ctr_paes_set_key(ctx)) {
429 		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
430 		return -EINVAL;
431 	}
432 	return 0;
433 }
434 
435 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
436 {
437 	unsigned int i, n;
438 
439 	/* only use complete blocks, max. PAGE_SIZE */
440 	memcpy(ctrptr, iv, AES_BLOCK_SIZE);
441 	n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
442 	for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
443 		memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
444 		crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
445 		ctrptr += AES_BLOCK_SIZE;
446 	}
447 	return n;
448 }
449 
450 static int ctr_paes_crypt(struct blkcipher_desc *desc, unsigned long modifier,
451 			  struct blkcipher_walk *walk)
452 {
453 	struct s390_paes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
454 	u8 buf[AES_BLOCK_SIZE], *ctrptr;
455 	unsigned int nbytes, n, k;
456 	int ret, locked;
457 
458 	locked = spin_trylock(&ctrblk_lock);
459 
460 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
461 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
462 		n = AES_BLOCK_SIZE;
463 		if (nbytes >= 2*AES_BLOCK_SIZE && locked)
464 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
465 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
466 		k = cpacf_kmctr(ctx->fc | modifier, ctx->pk.protkey,
467 				walk->dst.virt.addr, walk->src.virt.addr,
468 				n, ctrptr);
469 		if (k) {
470 			if (ctrptr == ctrblk)
471 				memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
472 				       AES_BLOCK_SIZE);
473 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
474 			ret = blkcipher_walk_done(desc, walk, nbytes - n);
475 		}
476 		if (k < n) {
477 			if (__ctr_paes_set_key(ctx) != 0) {
478 				if (locked)
479 					spin_unlock(&ctrblk_lock);
480 				return blkcipher_walk_done(desc, walk, -EIO);
481 			}
482 		}
483 	}
484 	if (locked)
485 		spin_unlock(&ctrblk_lock);
486 	/*
487 	 * final block may be < AES_BLOCK_SIZE, copy only nbytes
488 	 */
489 	if (nbytes) {
490 		while (1) {
491 			if (cpacf_kmctr(ctx->fc | modifier,
492 					ctx->pk.protkey, buf,
493 					walk->src.virt.addr, AES_BLOCK_SIZE,
494 					walk->iv) == AES_BLOCK_SIZE)
495 				break;
496 			if (__ctr_paes_set_key(ctx) != 0)
497 				return blkcipher_walk_done(desc, walk, -EIO);
498 		}
499 		memcpy(walk->dst.virt.addr, buf, nbytes);
500 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
501 		ret = blkcipher_walk_done(desc, walk, 0);
502 	}
503 
504 	return ret;
505 }
506 
507 static int ctr_paes_encrypt(struct blkcipher_desc *desc,
508 			    struct scatterlist *dst, struct scatterlist *src,
509 			    unsigned int nbytes)
510 {
511 	struct blkcipher_walk walk;
512 
513 	blkcipher_walk_init(&walk, dst, src, nbytes);
514 	return ctr_paes_crypt(desc, 0, &walk);
515 }
516 
517 static int ctr_paes_decrypt(struct blkcipher_desc *desc,
518 			    struct scatterlist *dst, struct scatterlist *src,
519 			    unsigned int nbytes)
520 {
521 	struct blkcipher_walk walk;
522 
523 	blkcipher_walk_init(&walk, dst, src, nbytes);
524 	return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
525 }
526 
527 static struct crypto_alg ctr_paes_alg = {
528 	.cra_name		=	"ctr(paes)",
529 	.cra_driver_name	=	"ctr-paes-s390",
530 	.cra_priority		=	400,	/* combo: aes + ctr */
531 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER,
532 	.cra_blocksize		=	1,
533 	.cra_ctxsize		=	sizeof(struct s390_paes_ctx),
534 	.cra_type		=	&crypto_blkcipher_type,
535 	.cra_module		=	THIS_MODULE,
536 	.cra_list		=	LIST_HEAD_INIT(ctr_paes_alg.cra_list),
537 	.cra_u			=	{
538 		.blkcipher = {
539 			.min_keysize		=	SECKEYBLOBSIZE,
540 			.max_keysize		=	SECKEYBLOBSIZE,
541 			.ivsize			=	AES_BLOCK_SIZE,
542 			.setkey			=	ctr_paes_set_key,
543 			.encrypt		=	ctr_paes_encrypt,
544 			.decrypt		=	ctr_paes_decrypt,
545 		}
546 	}
547 };
548 
549 static inline void __crypto_unregister_alg(struct crypto_alg *alg)
550 {
551 	if (!list_empty(&alg->cra_list))
552 		crypto_unregister_alg(alg);
553 }
554 
555 static void paes_s390_fini(void)
556 {
557 	if (ctrblk)
558 		free_page((unsigned long) ctrblk);
559 	__crypto_unregister_alg(&ctr_paes_alg);
560 	__crypto_unregister_alg(&xts_paes_alg);
561 	__crypto_unregister_alg(&cbc_paes_alg);
562 	__crypto_unregister_alg(&ecb_paes_alg);
563 }
564 
565 static int __init paes_s390_init(void)
566 {
567 	int ret;
568 
569 	/* Query available functions for KM, KMC and KMCTR */
570 	cpacf_query(CPACF_KM, &km_functions);
571 	cpacf_query(CPACF_KMC, &kmc_functions);
572 	cpacf_query(CPACF_KMCTR, &kmctr_functions);
573 
574 	if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) ||
575 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_192) ||
576 	    cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) {
577 		ret = crypto_register_alg(&ecb_paes_alg);
578 		if (ret)
579 			goto out_err;
580 	}
581 
582 	if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) ||
583 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) ||
584 	    cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) {
585 		ret = crypto_register_alg(&cbc_paes_alg);
586 		if (ret)
587 			goto out_err;
588 	}
589 
590 	if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) ||
591 	    cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) {
592 		ret = crypto_register_alg(&xts_paes_alg);
593 		if (ret)
594 			goto out_err;
595 	}
596 
597 	if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) ||
598 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) ||
599 	    cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) {
600 		ret = crypto_register_alg(&ctr_paes_alg);
601 		if (ret)
602 			goto out_err;
603 		ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
604 		if (!ctrblk) {
605 			ret = -ENOMEM;
606 			goto out_err;
607 		}
608 	}
609 
610 	return 0;
611 out_err:
612 	paes_s390_fini();
613 	return ret;
614 }
615 
616 module_init(paes_s390_init);
617 module_exit(paes_s390_fini);
618 
619 MODULE_ALIAS_CRYPTO("paes");
620 
621 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys");
622 MODULE_LICENSE("GPL");
623