1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * Cryptographic API. 4 * 5 * s390 implementation of the AES Cipher Algorithm with protected keys. 6 * 7 * s390 Version: 8 * Copyright IBM Corp. 2017,2019 9 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com> 10 * Harald Freudenberger <freude@de.ibm.com> 11 */ 12 13 #define KMSG_COMPONENT "paes_s390" 14 #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt 15 16 #include <crypto/aes.h> 17 #include <crypto/algapi.h> 18 #include <linux/bug.h> 19 #include <linux/err.h> 20 #include <linux/module.h> 21 #include <linux/cpufeature.h> 22 #include <linux/init.h> 23 #include <linux/spinlock.h> 24 #include <crypto/internal/skcipher.h> 25 #include <crypto/xts.h> 26 #include <asm/cpacf.h> 27 #include <asm/pkey.h> 28 29 /* 30 * Key blobs smaller/bigger than these defines are rejected 31 * by the common code even before the individual setkey function 32 * is called. As paes can handle different kinds of key blobs 33 * and padding is also possible, the limits need to be generous. 34 */ 35 #define PAES_MIN_KEYSIZE 64 36 #define PAES_MAX_KEYSIZE 256 37 38 static u8 *ctrblk; 39 static DEFINE_SPINLOCK(ctrblk_lock); 40 41 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions; 42 43 struct key_blob { 44 /* 45 * Small keys will be stored in the keybuf. Larger keys are 46 * stored in extra allocated memory. In both cases does 47 * key point to the memory where the key is stored. 48 * The code distinguishes by checking keylen against 49 * sizeof(keybuf). See the two following helper functions. 50 */ 51 u8 *key; 52 u8 keybuf[128]; 53 unsigned int keylen; 54 }; 55 56 static inline int _copy_key_to_kb(struct key_blob *kb, 57 const u8 *key, 58 unsigned int keylen) 59 { 60 if (keylen <= sizeof(kb->keybuf)) 61 kb->key = kb->keybuf; 62 else { 63 kb->key = kmalloc(keylen, GFP_KERNEL); 64 if (!kb->key) 65 return -ENOMEM; 66 } 67 memcpy(kb->key, key, keylen); 68 kb->keylen = keylen; 69 70 return 0; 71 } 72 73 static inline void _free_kb_keybuf(struct key_blob *kb) 74 { 75 if (kb->key && kb->key != kb->keybuf 76 && kb->keylen > sizeof(kb->keybuf)) { 77 kfree(kb->key); 78 kb->key = NULL; 79 } 80 } 81 82 struct s390_paes_ctx { 83 struct key_blob kb; 84 struct pkey_protkey pk; 85 unsigned long fc; 86 }; 87 88 struct s390_pxts_ctx { 89 struct key_blob kb[2]; 90 struct pkey_protkey pk[2]; 91 unsigned long fc; 92 }; 93 94 static inline int __paes_convert_key(struct key_blob *kb, 95 struct pkey_protkey *pk) 96 { 97 int i, ret; 98 99 /* try three times in case of failure */ 100 for (i = 0; i < 3; i++) { 101 ret = pkey_keyblob2pkey(kb->key, kb->keylen, pk); 102 if (ret == 0) 103 break; 104 } 105 106 return ret; 107 } 108 109 static int __paes_set_key(struct s390_paes_ctx *ctx) 110 { 111 unsigned long fc; 112 113 if (__paes_convert_key(&ctx->kb, &ctx->pk)) 114 return -EINVAL; 115 116 /* Pick the correct function code based on the protected key type */ 117 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PAES_128 : 118 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KM_PAES_192 : 119 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KM_PAES_256 : 0; 120 121 /* Check if the function code is available */ 122 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; 123 124 return ctx->fc ? 0 : -EINVAL; 125 } 126 127 static int ecb_paes_init(struct crypto_skcipher *tfm) 128 { 129 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 130 131 ctx->kb.key = NULL; 132 133 return 0; 134 } 135 136 static void ecb_paes_exit(struct crypto_skcipher *tfm) 137 { 138 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 139 140 _free_kb_keybuf(&ctx->kb); 141 } 142 143 static int ecb_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, 144 unsigned int key_len) 145 { 146 int rc; 147 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 148 149 _free_kb_keybuf(&ctx->kb); 150 rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); 151 if (rc) 152 return rc; 153 154 if (__paes_set_key(ctx)) { 155 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 156 return -EINVAL; 157 } 158 return 0; 159 } 160 161 static int ecb_paes_crypt(struct skcipher_request *req, unsigned long modifier) 162 { 163 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 164 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 165 struct skcipher_walk walk; 166 unsigned int nbytes, n, k; 167 int ret; 168 169 ret = skcipher_walk_virt(&walk, req, false); 170 while ((nbytes = walk.nbytes) != 0) { 171 /* only use complete blocks */ 172 n = nbytes & ~(AES_BLOCK_SIZE - 1); 173 k = cpacf_km(ctx->fc | modifier, ctx->pk.protkey, 174 walk.dst.virt.addr, walk.src.virt.addr, n); 175 if (k) 176 ret = skcipher_walk_done(&walk, nbytes - k); 177 if (k < n) { 178 if (__paes_set_key(ctx) != 0) 179 return skcipher_walk_done(&walk, -EIO); 180 } 181 } 182 return ret; 183 } 184 185 static int ecb_paes_encrypt(struct skcipher_request *req) 186 { 187 return ecb_paes_crypt(req, 0); 188 } 189 190 static int ecb_paes_decrypt(struct skcipher_request *req) 191 { 192 return ecb_paes_crypt(req, CPACF_DECRYPT); 193 } 194 195 static struct skcipher_alg ecb_paes_alg = { 196 .base.cra_name = "ecb(paes)", 197 .base.cra_driver_name = "ecb-paes-s390", 198 .base.cra_priority = 401, /* combo: aes + ecb + 1 */ 199 .base.cra_blocksize = AES_BLOCK_SIZE, 200 .base.cra_ctxsize = sizeof(struct s390_paes_ctx), 201 .base.cra_module = THIS_MODULE, 202 .base.cra_list = LIST_HEAD_INIT(ecb_paes_alg.base.cra_list), 203 .init = ecb_paes_init, 204 .exit = ecb_paes_exit, 205 .min_keysize = PAES_MIN_KEYSIZE, 206 .max_keysize = PAES_MAX_KEYSIZE, 207 .setkey = ecb_paes_set_key, 208 .encrypt = ecb_paes_encrypt, 209 .decrypt = ecb_paes_decrypt, 210 }; 211 212 static int cbc_paes_init(struct crypto_skcipher *tfm) 213 { 214 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 215 216 ctx->kb.key = NULL; 217 218 return 0; 219 } 220 221 static void cbc_paes_exit(struct crypto_skcipher *tfm) 222 { 223 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 224 225 _free_kb_keybuf(&ctx->kb); 226 } 227 228 static int __cbc_paes_set_key(struct s390_paes_ctx *ctx) 229 { 230 unsigned long fc; 231 232 if (__paes_convert_key(&ctx->kb, &ctx->pk)) 233 return -EINVAL; 234 235 /* Pick the correct function code based on the protected key type */ 236 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMC_PAES_128 : 237 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMC_PAES_192 : 238 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? CPACF_KMC_PAES_256 : 0; 239 240 /* Check if the function code is available */ 241 ctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0; 242 243 return ctx->fc ? 0 : -EINVAL; 244 } 245 246 static int cbc_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, 247 unsigned int key_len) 248 { 249 int rc; 250 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 251 252 _free_kb_keybuf(&ctx->kb); 253 rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); 254 if (rc) 255 return rc; 256 257 if (__cbc_paes_set_key(ctx)) { 258 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 259 return -EINVAL; 260 } 261 return 0; 262 } 263 264 static int cbc_paes_crypt(struct skcipher_request *req, unsigned long modifier) 265 { 266 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 267 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 268 struct skcipher_walk walk; 269 unsigned int nbytes, n, k; 270 int ret; 271 struct { 272 u8 iv[AES_BLOCK_SIZE]; 273 u8 key[MAXPROTKEYSIZE]; 274 } param; 275 276 ret = skcipher_walk_virt(&walk, req, false); 277 if (ret) 278 return ret; 279 memcpy(param.iv, walk.iv, AES_BLOCK_SIZE); 280 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); 281 while ((nbytes = walk.nbytes) != 0) { 282 /* only use complete blocks */ 283 n = nbytes & ~(AES_BLOCK_SIZE - 1); 284 k = cpacf_kmc(ctx->fc | modifier, ¶m, 285 walk.dst.virt.addr, walk.src.virt.addr, n); 286 if (k) { 287 memcpy(walk.iv, param.iv, AES_BLOCK_SIZE); 288 ret = skcipher_walk_done(&walk, nbytes - k); 289 } 290 if (k < n) { 291 if (__cbc_paes_set_key(ctx) != 0) 292 return skcipher_walk_done(&walk, -EIO); 293 memcpy(param.key, ctx->pk.protkey, MAXPROTKEYSIZE); 294 } 295 } 296 return ret; 297 } 298 299 static int cbc_paes_encrypt(struct skcipher_request *req) 300 { 301 return cbc_paes_crypt(req, 0); 302 } 303 304 static int cbc_paes_decrypt(struct skcipher_request *req) 305 { 306 return cbc_paes_crypt(req, CPACF_DECRYPT); 307 } 308 309 static struct skcipher_alg cbc_paes_alg = { 310 .base.cra_name = "cbc(paes)", 311 .base.cra_driver_name = "cbc-paes-s390", 312 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ 313 .base.cra_blocksize = AES_BLOCK_SIZE, 314 .base.cra_ctxsize = sizeof(struct s390_paes_ctx), 315 .base.cra_module = THIS_MODULE, 316 .base.cra_list = LIST_HEAD_INIT(cbc_paes_alg.base.cra_list), 317 .init = cbc_paes_init, 318 .exit = cbc_paes_exit, 319 .min_keysize = PAES_MIN_KEYSIZE, 320 .max_keysize = PAES_MAX_KEYSIZE, 321 .ivsize = AES_BLOCK_SIZE, 322 .setkey = cbc_paes_set_key, 323 .encrypt = cbc_paes_encrypt, 324 .decrypt = cbc_paes_decrypt, 325 }; 326 327 static int xts_paes_init(struct crypto_skcipher *tfm) 328 { 329 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); 330 331 ctx->kb[0].key = NULL; 332 ctx->kb[1].key = NULL; 333 334 return 0; 335 } 336 337 static void xts_paes_exit(struct crypto_skcipher *tfm) 338 { 339 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); 340 341 _free_kb_keybuf(&ctx->kb[0]); 342 _free_kb_keybuf(&ctx->kb[1]); 343 } 344 345 static int __xts_paes_set_key(struct s390_pxts_ctx *ctx) 346 { 347 unsigned long fc; 348 349 if (__paes_convert_key(&ctx->kb[0], &ctx->pk[0]) || 350 __paes_convert_key(&ctx->kb[1], &ctx->pk[1])) 351 return -EINVAL; 352 353 if (ctx->pk[0].type != ctx->pk[1].type) 354 return -EINVAL; 355 356 /* Pick the correct function code based on the protected key type */ 357 fc = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? CPACF_KM_PXTS_128 : 358 (ctx->pk[0].type == PKEY_KEYTYPE_AES_256) ? 359 CPACF_KM_PXTS_256 : 0; 360 361 /* Check if the function code is available */ 362 ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0; 363 364 return ctx->fc ? 0 : -EINVAL; 365 } 366 367 static int xts_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, 368 unsigned int xts_key_len) 369 { 370 int rc; 371 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); 372 u8 ckey[2 * AES_MAX_KEY_SIZE]; 373 unsigned int ckey_len, key_len; 374 375 if (xts_key_len % 2) 376 return -EINVAL; 377 378 key_len = xts_key_len / 2; 379 380 _free_kb_keybuf(&ctx->kb[0]); 381 _free_kb_keybuf(&ctx->kb[1]); 382 rc = _copy_key_to_kb(&ctx->kb[0], in_key, key_len); 383 if (rc) 384 return rc; 385 rc = _copy_key_to_kb(&ctx->kb[1], in_key + key_len, key_len); 386 if (rc) 387 return rc; 388 389 if (__xts_paes_set_key(ctx)) { 390 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 391 return -EINVAL; 392 } 393 394 /* 395 * xts_check_key verifies the key length is not odd and makes 396 * sure that the two keys are not the same. This can be done 397 * on the two protected keys as well 398 */ 399 ckey_len = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 400 AES_KEYSIZE_128 : AES_KEYSIZE_256; 401 memcpy(ckey, ctx->pk[0].protkey, ckey_len); 402 memcpy(ckey + ckey_len, ctx->pk[1].protkey, ckey_len); 403 return xts_verify_key(tfm, ckey, 2*ckey_len); 404 } 405 406 static int xts_paes_crypt(struct skcipher_request *req, unsigned long modifier) 407 { 408 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 409 struct s390_pxts_ctx *ctx = crypto_skcipher_ctx(tfm); 410 struct skcipher_walk walk; 411 unsigned int keylen, offset, nbytes, n, k; 412 int ret; 413 struct { 414 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */ 415 u8 tweak[16]; 416 u8 block[16]; 417 u8 bit[16]; 418 u8 xts[16]; 419 } pcc_param; 420 struct { 421 u8 key[MAXPROTKEYSIZE]; /* key + verification pattern */ 422 u8 init[16]; 423 } xts_param; 424 425 ret = skcipher_walk_virt(&walk, req, false); 426 if (ret) 427 return ret; 428 keylen = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 48 : 64; 429 offset = (ctx->pk[0].type == PKEY_KEYTYPE_AES_128) ? 16 : 0; 430 retry: 431 memset(&pcc_param, 0, sizeof(pcc_param)); 432 memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak)); 433 memcpy(pcc_param.key + offset, ctx->pk[1].protkey, keylen); 434 cpacf_pcc(ctx->fc, pcc_param.key + offset); 435 436 memcpy(xts_param.key + offset, ctx->pk[0].protkey, keylen); 437 memcpy(xts_param.init, pcc_param.xts, 16); 438 439 while ((nbytes = walk.nbytes) != 0) { 440 /* only use complete blocks */ 441 n = nbytes & ~(AES_BLOCK_SIZE - 1); 442 k = cpacf_km(ctx->fc | modifier, xts_param.key + offset, 443 walk.dst.virt.addr, walk.src.virt.addr, n); 444 if (k) 445 ret = skcipher_walk_done(&walk, nbytes - k); 446 if (k < n) { 447 if (__xts_paes_set_key(ctx) != 0) 448 return skcipher_walk_done(&walk, -EIO); 449 goto retry; 450 } 451 } 452 return ret; 453 } 454 455 static int xts_paes_encrypt(struct skcipher_request *req) 456 { 457 return xts_paes_crypt(req, 0); 458 } 459 460 static int xts_paes_decrypt(struct skcipher_request *req) 461 { 462 return xts_paes_crypt(req, CPACF_DECRYPT); 463 } 464 465 static struct skcipher_alg xts_paes_alg = { 466 .base.cra_name = "xts(paes)", 467 .base.cra_driver_name = "xts-paes-s390", 468 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ 469 .base.cra_blocksize = AES_BLOCK_SIZE, 470 .base.cra_ctxsize = sizeof(struct s390_pxts_ctx), 471 .base.cra_module = THIS_MODULE, 472 .base.cra_list = LIST_HEAD_INIT(xts_paes_alg.base.cra_list), 473 .init = xts_paes_init, 474 .exit = xts_paes_exit, 475 .min_keysize = 2 * PAES_MIN_KEYSIZE, 476 .max_keysize = 2 * PAES_MAX_KEYSIZE, 477 .ivsize = AES_BLOCK_SIZE, 478 .setkey = xts_paes_set_key, 479 .encrypt = xts_paes_encrypt, 480 .decrypt = xts_paes_decrypt, 481 }; 482 483 static int ctr_paes_init(struct crypto_skcipher *tfm) 484 { 485 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 486 487 ctx->kb.key = NULL; 488 489 return 0; 490 } 491 492 static void ctr_paes_exit(struct crypto_skcipher *tfm) 493 { 494 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 495 496 _free_kb_keybuf(&ctx->kb); 497 } 498 499 static int __ctr_paes_set_key(struct s390_paes_ctx *ctx) 500 { 501 unsigned long fc; 502 503 if (__paes_convert_key(&ctx->kb, &ctx->pk)) 504 return -EINVAL; 505 506 /* Pick the correct function code based on the protected key type */ 507 fc = (ctx->pk.type == PKEY_KEYTYPE_AES_128) ? CPACF_KMCTR_PAES_128 : 508 (ctx->pk.type == PKEY_KEYTYPE_AES_192) ? CPACF_KMCTR_PAES_192 : 509 (ctx->pk.type == PKEY_KEYTYPE_AES_256) ? 510 CPACF_KMCTR_PAES_256 : 0; 511 512 /* Check if the function code is available */ 513 ctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0; 514 515 return ctx->fc ? 0 : -EINVAL; 516 } 517 518 static int ctr_paes_set_key(struct crypto_skcipher *tfm, const u8 *in_key, 519 unsigned int key_len) 520 { 521 int rc; 522 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 523 524 _free_kb_keybuf(&ctx->kb); 525 rc = _copy_key_to_kb(&ctx->kb, in_key, key_len); 526 if (rc) 527 return rc; 528 529 if (__ctr_paes_set_key(ctx)) { 530 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 531 return -EINVAL; 532 } 533 return 0; 534 } 535 536 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) 537 { 538 unsigned int i, n; 539 540 /* only use complete blocks, max. PAGE_SIZE */ 541 memcpy(ctrptr, iv, AES_BLOCK_SIZE); 542 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1); 543 for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) { 544 memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE); 545 crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE); 546 ctrptr += AES_BLOCK_SIZE; 547 } 548 return n; 549 } 550 551 static int ctr_paes_crypt(struct skcipher_request *req) 552 { 553 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 554 struct s390_paes_ctx *ctx = crypto_skcipher_ctx(tfm); 555 u8 buf[AES_BLOCK_SIZE], *ctrptr; 556 struct skcipher_walk walk; 557 unsigned int nbytes, n, k; 558 int ret, locked; 559 560 locked = spin_trylock(&ctrblk_lock); 561 562 ret = skcipher_walk_virt(&walk, req, false); 563 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { 564 n = AES_BLOCK_SIZE; 565 if (nbytes >= 2*AES_BLOCK_SIZE && locked) 566 n = __ctrblk_init(ctrblk, walk.iv, nbytes); 567 ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv; 568 k = cpacf_kmctr(ctx->fc, ctx->pk.protkey, walk.dst.virt.addr, 569 walk.src.virt.addr, n, ctrptr); 570 if (k) { 571 if (ctrptr == ctrblk) 572 memcpy(walk.iv, ctrptr + k - AES_BLOCK_SIZE, 573 AES_BLOCK_SIZE); 574 crypto_inc(walk.iv, AES_BLOCK_SIZE); 575 ret = skcipher_walk_done(&walk, nbytes - n); 576 } 577 if (k < n) { 578 if (__ctr_paes_set_key(ctx) != 0) { 579 if (locked) 580 spin_unlock(&ctrblk_lock); 581 return skcipher_walk_done(&walk, -EIO); 582 } 583 } 584 } 585 if (locked) 586 spin_unlock(&ctrblk_lock); 587 /* 588 * final block may be < AES_BLOCK_SIZE, copy only nbytes 589 */ 590 if (nbytes) { 591 while (1) { 592 if (cpacf_kmctr(ctx->fc, ctx->pk.protkey, buf, 593 walk.src.virt.addr, AES_BLOCK_SIZE, 594 walk.iv) == AES_BLOCK_SIZE) 595 break; 596 if (__ctr_paes_set_key(ctx) != 0) 597 return skcipher_walk_done(&walk, -EIO); 598 } 599 memcpy(walk.dst.virt.addr, buf, nbytes); 600 crypto_inc(walk.iv, AES_BLOCK_SIZE); 601 ret = skcipher_walk_done(&walk, 0); 602 } 603 604 return ret; 605 } 606 607 static struct skcipher_alg ctr_paes_alg = { 608 .base.cra_name = "ctr(paes)", 609 .base.cra_driver_name = "ctr-paes-s390", 610 .base.cra_priority = 402, /* ecb-paes-s390 + 1 */ 611 .base.cra_blocksize = 1, 612 .base.cra_ctxsize = sizeof(struct s390_paes_ctx), 613 .base.cra_module = THIS_MODULE, 614 .base.cra_list = LIST_HEAD_INIT(ctr_paes_alg.base.cra_list), 615 .init = ctr_paes_init, 616 .exit = ctr_paes_exit, 617 .min_keysize = PAES_MIN_KEYSIZE, 618 .max_keysize = PAES_MAX_KEYSIZE, 619 .ivsize = AES_BLOCK_SIZE, 620 .setkey = ctr_paes_set_key, 621 .encrypt = ctr_paes_crypt, 622 .decrypt = ctr_paes_crypt, 623 .chunksize = AES_BLOCK_SIZE, 624 }; 625 626 static inline void __crypto_unregister_skcipher(struct skcipher_alg *alg) 627 { 628 if (!list_empty(&alg->base.cra_list)) 629 crypto_unregister_skcipher(alg); 630 } 631 632 static void paes_s390_fini(void) 633 { 634 if (ctrblk) 635 free_page((unsigned long) ctrblk); 636 __crypto_unregister_skcipher(&ctr_paes_alg); 637 __crypto_unregister_skcipher(&xts_paes_alg); 638 __crypto_unregister_skcipher(&cbc_paes_alg); 639 __crypto_unregister_skcipher(&ecb_paes_alg); 640 } 641 642 static int __init paes_s390_init(void) 643 { 644 int ret; 645 646 /* Query available functions for KM, KMC and KMCTR */ 647 cpacf_query(CPACF_KM, &km_functions); 648 cpacf_query(CPACF_KMC, &kmc_functions); 649 cpacf_query(CPACF_KMCTR, &kmctr_functions); 650 651 if (cpacf_test_func(&km_functions, CPACF_KM_PAES_128) || 652 cpacf_test_func(&km_functions, CPACF_KM_PAES_192) || 653 cpacf_test_func(&km_functions, CPACF_KM_PAES_256)) { 654 ret = crypto_register_skcipher(&ecb_paes_alg); 655 if (ret) 656 goto out_err; 657 } 658 659 if (cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_128) || 660 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_192) || 661 cpacf_test_func(&kmc_functions, CPACF_KMC_PAES_256)) { 662 ret = crypto_register_skcipher(&cbc_paes_alg); 663 if (ret) 664 goto out_err; 665 } 666 667 if (cpacf_test_func(&km_functions, CPACF_KM_PXTS_128) || 668 cpacf_test_func(&km_functions, CPACF_KM_PXTS_256)) { 669 ret = crypto_register_skcipher(&xts_paes_alg); 670 if (ret) 671 goto out_err; 672 } 673 674 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_128) || 675 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_192) || 676 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_PAES_256)) { 677 ret = crypto_register_skcipher(&ctr_paes_alg); 678 if (ret) 679 goto out_err; 680 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 681 if (!ctrblk) { 682 ret = -ENOMEM; 683 goto out_err; 684 } 685 } 686 687 return 0; 688 out_err: 689 paes_s390_fini(); 690 return ret; 691 } 692 693 module_init(paes_s390_init); 694 module_exit(paes_s390_fini); 695 696 MODULE_ALIAS_CRYPTO("paes"); 697 698 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm with protected keys"); 699 MODULE_LICENSE("GPL"); 700