1 /* 2 * Cryptographic API. 3 * 4 * s390 implementation of the DES Cipher Algorithm. 5 * 6 * Copyright IBM Corp. 2003, 2011 7 * Author(s): Thomas Spatzier 8 * Jan Glauber (jan.glauber@de.ibm.com) 9 * 10 * This program is free software; you can redistribute it and/or modify 11 * it under the terms of the GNU General Public License as published by 12 * the Free Software Foundation; either version 2 of the License, or 13 * (at your option) any later version. 14 * 15 */ 16 17 #include <linux/init.h> 18 #include <linux/module.h> 19 #include <linux/crypto.h> 20 #include <crypto/algapi.h> 21 #include <crypto/des.h> 22 23 #include "crypt_s390.h" 24 25 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE) 26 27 static u8 *ctrblk; 28 static DEFINE_SPINLOCK(ctrblk_lock); 29 30 struct s390_des_ctx { 31 u8 iv[DES_BLOCK_SIZE]; 32 u8 key[DES3_KEY_SIZE]; 33 }; 34 35 static int des_setkey(struct crypto_tfm *tfm, const u8 *key, 36 unsigned int key_len) 37 { 38 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 39 u32 *flags = &tfm->crt_flags; 40 u32 tmp[DES_EXPKEY_WORDS]; 41 42 /* check for weak keys */ 43 if (!des_ekey(tmp, key) && (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { 44 *flags |= CRYPTO_TFM_RES_WEAK_KEY; 45 return -EINVAL; 46 } 47 48 memcpy(ctx->key, key, key_len); 49 return 0; 50 } 51 52 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 53 { 54 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 55 56 crypt_s390_km(KM_DEA_ENCRYPT, ctx->key, out, in, DES_BLOCK_SIZE); 57 } 58 59 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 60 { 61 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 62 63 crypt_s390_km(KM_DEA_DECRYPT, ctx->key, out, in, DES_BLOCK_SIZE); 64 } 65 66 static struct crypto_alg des_alg = { 67 .cra_name = "des", 68 .cra_driver_name = "des-s390", 69 .cra_priority = CRYPT_S390_PRIORITY, 70 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 71 .cra_blocksize = DES_BLOCK_SIZE, 72 .cra_ctxsize = sizeof(struct s390_des_ctx), 73 .cra_module = THIS_MODULE, 74 .cra_u = { 75 .cipher = { 76 .cia_min_keysize = DES_KEY_SIZE, 77 .cia_max_keysize = DES_KEY_SIZE, 78 .cia_setkey = des_setkey, 79 .cia_encrypt = des_encrypt, 80 .cia_decrypt = des_decrypt, 81 } 82 } 83 }; 84 85 static int ecb_desall_crypt(struct blkcipher_desc *desc, long func, 86 u8 *key, struct blkcipher_walk *walk) 87 { 88 int ret = blkcipher_walk_virt(desc, walk); 89 unsigned int nbytes; 90 91 while ((nbytes = walk->nbytes)) { 92 /* only use complete blocks */ 93 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); 94 u8 *out = walk->dst.virt.addr; 95 u8 *in = walk->src.virt.addr; 96 97 ret = crypt_s390_km(func, key, out, in, n); 98 if (ret < 0 || ret != n) 99 return -EIO; 100 101 nbytes &= DES_BLOCK_SIZE - 1; 102 ret = blkcipher_walk_done(desc, walk, nbytes); 103 } 104 105 return ret; 106 } 107 108 static int cbc_desall_crypt(struct blkcipher_desc *desc, long func, 109 struct blkcipher_walk *walk) 110 { 111 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 112 int ret = blkcipher_walk_virt(desc, walk); 113 unsigned int nbytes = walk->nbytes; 114 struct { 115 u8 iv[DES_BLOCK_SIZE]; 116 u8 key[DES3_KEY_SIZE]; 117 } param; 118 119 if (!nbytes) 120 goto out; 121 122 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); 123 memcpy(param.key, ctx->key, DES3_KEY_SIZE); 124 do { 125 /* only use complete blocks */ 126 unsigned int n = nbytes & ~(DES_BLOCK_SIZE - 1); 127 u8 *out = walk->dst.virt.addr; 128 u8 *in = walk->src.virt.addr; 129 130 ret = crypt_s390_kmc(func, ¶m, out, in, n); 131 if (ret < 0 || ret != n) 132 return -EIO; 133 134 nbytes &= DES_BLOCK_SIZE - 1; 135 ret = blkcipher_walk_done(desc, walk, nbytes); 136 } while ((nbytes = walk->nbytes)); 137 memcpy(walk->iv, param.iv, DES_BLOCK_SIZE); 138 139 out: 140 return ret; 141 } 142 143 static int ecb_des_encrypt(struct blkcipher_desc *desc, 144 struct scatterlist *dst, struct scatterlist *src, 145 unsigned int nbytes) 146 { 147 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 148 struct blkcipher_walk walk; 149 150 blkcipher_walk_init(&walk, dst, src, nbytes); 151 return ecb_desall_crypt(desc, KM_DEA_ENCRYPT, ctx->key, &walk); 152 } 153 154 static int ecb_des_decrypt(struct blkcipher_desc *desc, 155 struct scatterlist *dst, struct scatterlist *src, 156 unsigned int nbytes) 157 { 158 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 159 struct blkcipher_walk walk; 160 161 blkcipher_walk_init(&walk, dst, src, nbytes); 162 return ecb_desall_crypt(desc, KM_DEA_DECRYPT, ctx->key, &walk); 163 } 164 165 static struct crypto_alg ecb_des_alg = { 166 .cra_name = "ecb(des)", 167 .cra_driver_name = "ecb-des-s390", 168 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 169 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 170 .cra_blocksize = DES_BLOCK_SIZE, 171 .cra_ctxsize = sizeof(struct s390_des_ctx), 172 .cra_type = &crypto_blkcipher_type, 173 .cra_module = THIS_MODULE, 174 .cra_u = { 175 .blkcipher = { 176 .min_keysize = DES_KEY_SIZE, 177 .max_keysize = DES_KEY_SIZE, 178 .setkey = des_setkey, 179 .encrypt = ecb_des_encrypt, 180 .decrypt = ecb_des_decrypt, 181 } 182 } 183 }; 184 185 static int cbc_des_encrypt(struct blkcipher_desc *desc, 186 struct scatterlist *dst, struct scatterlist *src, 187 unsigned int nbytes) 188 { 189 struct blkcipher_walk walk; 190 191 blkcipher_walk_init(&walk, dst, src, nbytes); 192 return cbc_desall_crypt(desc, KMC_DEA_ENCRYPT, &walk); 193 } 194 195 static int cbc_des_decrypt(struct blkcipher_desc *desc, 196 struct scatterlist *dst, struct scatterlist *src, 197 unsigned int nbytes) 198 { 199 struct blkcipher_walk walk; 200 201 blkcipher_walk_init(&walk, dst, src, nbytes); 202 return cbc_desall_crypt(desc, KMC_DEA_DECRYPT, &walk); 203 } 204 205 static struct crypto_alg cbc_des_alg = { 206 .cra_name = "cbc(des)", 207 .cra_driver_name = "cbc-des-s390", 208 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 209 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 210 .cra_blocksize = DES_BLOCK_SIZE, 211 .cra_ctxsize = sizeof(struct s390_des_ctx), 212 .cra_type = &crypto_blkcipher_type, 213 .cra_module = THIS_MODULE, 214 .cra_u = { 215 .blkcipher = { 216 .min_keysize = DES_KEY_SIZE, 217 .max_keysize = DES_KEY_SIZE, 218 .ivsize = DES_BLOCK_SIZE, 219 .setkey = des_setkey, 220 .encrypt = cbc_des_encrypt, 221 .decrypt = cbc_des_decrypt, 222 } 223 } 224 }; 225 226 /* 227 * RFC2451: 228 * 229 * For DES-EDE3, there is no known need to reject weak or 230 * complementation keys. Any weakness is obviated by the use of 231 * multiple keys. 232 * 233 * However, if the first two or last two independent 64-bit keys are 234 * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the 235 * same as DES. Implementers MUST reject keys that exhibit this 236 * property. 237 * 238 */ 239 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key, 240 unsigned int key_len) 241 { 242 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 243 u32 *flags = &tfm->crt_flags; 244 245 if (!(crypto_memneq(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && 246 crypto_memneq(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], 247 DES_KEY_SIZE)) && 248 (*flags & CRYPTO_TFM_REQ_WEAK_KEY)) { 249 *flags |= CRYPTO_TFM_RES_WEAK_KEY; 250 return -EINVAL; 251 } 252 memcpy(ctx->key, key, key_len); 253 return 0; 254 } 255 256 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 257 { 258 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 259 260 crypt_s390_km(KM_TDEA_192_ENCRYPT, ctx->key, dst, src, DES_BLOCK_SIZE); 261 } 262 263 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 264 { 265 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 266 267 crypt_s390_km(KM_TDEA_192_DECRYPT, ctx->key, dst, src, DES_BLOCK_SIZE); 268 } 269 270 static struct crypto_alg des3_alg = { 271 .cra_name = "des3_ede", 272 .cra_driver_name = "des3_ede-s390", 273 .cra_priority = CRYPT_S390_PRIORITY, 274 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 275 .cra_blocksize = DES_BLOCK_SIZE, 276 .cra_ctxsize = sizeof(struct s390_des_ctx), 277 .cra_module = THIS_MODULE, 278 .cra_u = { 279 .cipher = { 280 .cia_min_keysize = DES3_KEY_SIZE, 281 .cia_max_keysize = DES3_KEY_SIZE, 282 .cia_setkey = des3_setkey, 283 .cia_encrypt = des3_encrypt, 284 .cia_decrypt = des3_decrypt, 285 } 286 } 287 }; 288 289 static int ecb_des3_encrypt(struct blkcipher_desc *desc, 290 struct scatterlist *dst, struct scatterlist *src, 291 unsigned int nbytes) 292 { 293 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 294 struct blkcipher_walk walk; 295 296 blkcipher_walk_init(&walk, dst, src, nbytes); 297 return ecb_desall_crypt(desc, KM_TDEA_192_ENCRYPT, ctx->key, &walk); 298 } 299 300 static int ecb_des3_decrypt(struct blkcipher_desc *desc, 301 struct scatterlist *dst, struct scatterlist *src, 302 unsigned int nbytes) 303 { 304 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 305 struct blkcipher_walk walk; 306 307 blkcipher_walk_init(&walk, dst, src, nbytes); 308 return ecb_desall_crypt(desc, KM_TDEA_192_DECRYPT, ctx->key, &walk); 309 } 310 311 static struct crypto_alg ecb_des3_alg = { 312 .cra_name = "ecb(des3_ede)", 313 .cra_driver_name = "ecb-des3_ede-s390", 314 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 315 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 316 .cra_blocksize = DES_BLOCK_SIZE, 317 .cra_ctxsize = sizeof(struct s390_des_ctx), 318 .cra_type = &crypto_blkcipher_type, 319 .cra_module = THIS_MODULE, 320 .cra_u = { 321 .blkcipher = { 322 .min_keysize = DES3_KEY_SIZE, 323 .max_keysize = DES3_KEY_SIZE, 324 .setkey = des3_setkey, 325 .encrypt = ecb_des3_encrypt, 326 .decrypt = ecb_des3_decrypt, 327 } 328 } 329 }; 330 331 static int cbc_des3_encrypt(struct blkcipher_desc *desc, 332 struct scatterlist *dst, struct scatterlist *src, 333 unsigned int nbytes) 334 { 335 struct blkcipher_walk walk; 336 337 blkcipher_walk_init(&walk, dst, src, nbytes); 338 return cbc_desall_crypt(desc, KMC_TDEA_192_ENCRYPT, &walk); 339 } 340 341 static int cbc_des3_decrypt(struct blkcipher_desc *desc, 342 struct scatterlist *dst, struct scatterlist *src, 343 unsigned int nbytes) 344 { 345 struct blkcipher_walk walk; 346 347 blkcipher_walk_init(&walk, dst, src, nbytes); 348 return cbc_desall_crypt(desc, KMC_TDEA_192_DECRYPT, &walk); 349 } 350 351 static struct crypto_alg cbc_des3_alg = { 352 .cra_name = "cbc(des3_ede)", 353 .cra_driver_name = "cbc-des3_ede-s390", 354 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 355 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 356 .cra_blocksize = DES_BLOCK_SIZE, 357 .cra_ctxsize = sizeof(struct s390_des_ctx), 358 .cra_type = &crypto_blkcipher_type, 359 .cra_module = THIS_MODULE, 360 .cra_u = { 361 .blkcipher = { 362 .min_keysize = DES3_KEY_SIZE, 363 .max_keysize = DES3_KEY_SIZE, 364 .ivsize = DES_BLOCK_SIZE, 365 .setkey = des3_setkey, 366 .encrypt = cbc_des3_encrypt, 367 .decrypt = cbc_des3_decrypt, 368 } 369 } 370 }; 371 372 static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes) 373 { 374 unsigned int i, n; 375 376 /* align to block size, max. PAGE_SIZE */ 377 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1); 378 for (i = DES_BLOCK_SIZE; i < n; i += DES_BLOCK_SIZE) { 379 memcpy(ctrptr + i, ctrptr + i - DES_BLOCK_SIZE, DES_BLOCK_SIZE); 380 crypto_inc(ctrptr + i, DES_BLOCK_SIZE); 381 } 382 return n; 383 } 384 385 static int ctr_desall_crypt(struct blkcipher_desc *desc, long func, 386 struct s390_des_ctx *ctx, 387 struct blkcipher_walk *walk) 388 { 389 int ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE); 390 unsigned int n, nbytes; 391 u8 buf[DES_BLOCK_SIZE], ctrbuf[DES_BLOCK_SIZE]; 392 u8 *out, *in, *ctrptr = ctrbuf; 393 394 if (!walk->nbytes) 395 return ret; 396 397 if (spin_trylock(&ctrblk_lock)) 398 ctrptr = ctrblk; 399 400 memcpy(ctrptr, walk->iv, DES_BLOCK_SIZE); 401 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { 402 out = walk->dst.virt.addr; 403 in = walk->src.virt.addr; 404 while (nbytes >= DES_BLOCK_SIZE) { 405 if (ctrptr == ctrblk) 406 n = __ctrblk_init(ctrptr, nbytes); 407 else 408 n = DES_BLOCK_SIZE; 409 ret = crypt_s390_kmctr(func, ctx->key, out, in, 410 n, ctrptr); 411 if (ret < 0 || ret != n) { 412 if (ctrptr == ctrblk) 413 spin_unlock(&ctrblk_lock); 414 return -EIO; 415 } 416 if (n > DES_BLOCK_SIZE) 417 memcpy(ctrptr, ctrptr + n - DES_BLOCK_SIZE, 418 DES_BLOCK_SIZE); 419 crypto_inc(ctrptr, DES_BLOCK_SIZE); 420 out += n; 421 in += n; 422 nbytes -= n; 423 } 424 ret = blkcipher_walk_done(desc, walk, nbytes); 425 } 426 if (ctrptr == ctrblk) { 427 if (nbytes) 428 memcpy(ctrbuf, ctrptr, DES_BLOCK_SIZE); 429 else 430 memcpy(walk->iv, ctrptr, DES_BLOCK_SIZE); 431 spin_unlock(&ctrblk_lock); 432 } else { 433 if (!nbytes) 434 memcpy(walk->iv, ctrptr, DES_BLOCK_SIZE); 435 } 436 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */ 437 if (nbytes) { 438 out = walk->dst.virt.addr; 439 in = walk->src.virt.addr; 440 ret = crypt_s390_kmctr(func, ctx->key, buf, in, 441 DES_BLOCK_SIZE, ctrbuf); 442 if (ret < 0 || ret != DES_BLOCK_SIZE) 443 return -EIO; 444 memcpy(out, buf, nbytes); 445 crypto_inc(ctrbuf, DES_BLOCK_SIZE); 446 ret = blkcipher_walk_done(desc, walk, 0); 447 memcpy(walk->iv, ctrbuf, DES_BLOCK_SIZE); 448 } 449 return ret; 450 } 451 452 static int ctr_des_encrypt(struct blkcipher_desc *desc, 453 struct scatterlist *dst, struct scatterlist *src, 454 unsigned int nbytes) 455 { 456 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 457 struct blkcipher_walk walk; 458 459 blkcipher_walk_init(&walk, dst, src, nbytes); 460 return ctr_desall_crypt(desc, KMCTR_DEA_ENCRYPT, ctx, &walk); 461 } 462 463 static int ctr_des_decrypt(struct blkcipher_desc *desc, 464 struct scatterlist *dst, struct scatterlist *src, 465 unsigned int nbytes) 466 { 467 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 468 struct blkcipher_walk walk; 469 470 blkcipher_walk_init(&walk, dst, src, nbytes); 471 return ctr_desall_crypt(desc, KMCTR_DEA_DECRYPT, ctx, &walk); 472 } 473 474 static struct crypto_alg ctr_des_alg = { 475 .cra_name = "ctr(des)", 476 .cra_driver_name = "ctr-des-s390", 477 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 478 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 479 .cra_blocksize = 1, 480 .cra_ctxsize = sizeof(struct s390_des_ctx), 481 .cra_type = &crypto_blkcipher_type, 482 .cra_module = THIS_MODULE, 483 .cra_u = { 484 .blkcipher = { 485 .min_keysize = DES_KEY_SIZE, 486 .max_keysize = DES_KEY_SIZE, 487 .ivsize = DES_BLOCK_SIZE, 488 .setkey = des_setkey, 489 .encrypt = ctr_des_encrypt, 490 .decrypt = ctr_des_decrypt, 491 } 492 } 493 }; 494 495 static int ctr_des3_encrypt(struct blkcipher_desc *desc, 496 struct scatterlist *dst, struct scatterlist *src, 497 unsigned int nbytes) 498 { 499 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 500 struct blkcipher_walk walk; 501 502 blkcipher_walk_init(&walk, dst, src, nbytes); 503 return ctr_desall_crypt(desc, KMCTR_TDEA_192_ENCRYPT, ctx, &walk); 504 } 505 506 static int ctr_des3_decrypt(struct blkcipher_desc *desc, 507 struct scatterlist *dst, struct scatterlist *src, 508 unsigned int nbytes) 509 { 510 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 511 struct blkcipher_walk walk; 512 513 blkcipher_walk_init(&walk, dst, src, nbytes); 514 return ctr_desall_crypt(desc, KMCTR_TDEA_192_DECRYPT, ctx, &walk); 515 } 516 517 static struct crypto_alg ctr_des3_alg = { 518 .cra_name = "ctr(des3_ede)", 519 .cra_driver_name = "ctr-des3_ede-s390", 520 .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY, 521 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 522 .cra_blocksize = 1, 523 .cra_ctxsize = sizeof(struct s390_des_ctx), 524 .cra_type = &crypto_blkcipher_type, 525 .cra_module = THIS_MODULE, 526 .cra_u = { 527 .blkcipher = { 528 .min_keysize = DES3_KEY_SIZE, 529 .max_keysize = DES3_KEY_SIZE, 530 .ivsize = DES_BLOCK_SIZE, 531 .setkey = des3_setkey, 532 .encrypt = ctr_des3_encrypt, 533 .decrypt = ctr_des3_decrypt, 534 } 535 } 536 }; 537 538 static int __init des_s390_init(void) 539 { 540 int ret; 541 542 if (!crypt_s390_func_available(KM_DEA_ENCRYPT, CRYPT_S390_MSA) || 543 !crypt_s390_func_available(KM_TDEA_192_ENCRYPT, CRYPT_S390_MSA)) 544 return -EOPNOTSUPP; 545 546 ret = crypto_register_alg(&des_alg); 547 if (ret) 548 goto des_err; 549 ret = crypto_register_alg(&ecb_des_alg); 550 if (ret) 551 goto ecb_des_err; 552 ret = crypto_register_alg(&cbc_des_alg); 553 if (ret) 554 goto cbc_des_err; 555 ret = crypto_register_alg(&des3_alg); 556 if (ret) 557 goto des3_err; 558 ret = crypto_register_alg(&ecb_des3_alg); 559 if (ret) 560 goto ecb_des3_err; 561 ret = crypto_register_alg(&cbc_des3_alg); 562 if (ret) 563 goto cbc_des3_err; 564 565 if (crypt_s390_func_available(KMCTR_DEA_ENCRYPT, 566 CRYPT_S390_MSA | CRYPT_S390_MSA4) && 567 crypt_s390_func_available(KMCTR_TDEA_192_ENCRYPT, 568 CRYPT_S390_MSA | CRYPT_S390_MSA4)) { 569 ret = crypto_register_alg(&ctr_des_alg); 570 if (ret) 571 goto ctr_des_err; 572 ret = crypto_register_alg(&ctr_des3_alg); 573 if (ret) 574 goto ctr_des3_err; 575 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 576 if (!ctrblk) { 577 ret = -ENOMEM; 578 goto ctr_mem_err; 579 } 580 } 581 out: 582 return ret; 583 584 ctr_mem_err: 585 crypto_unregister_alg(&ctr_des3_alg); 586 ctr_des3_err: 587 crypto_unregister_alg(&ctr_des_alg); 588 ctr_des_err: 589 crypto_unregister_alg(&cbc_des3_alg); 590 cbc_des3_err: 591 crypto_unregister_alg(&ecb_des3_alg); 592 ecb_des3_err: 593 crypto_unregister_alg(&des3_alg); 594 des3_err: 595 crypto_unregister_alg(&cbc_des_alg); 596 cbc_des_err: 597 crypto_unregister_alg(&ecb_des_alg); 598 ecb_des_err: 599 crypto_unregister_alg(&des_alg); 600 des_err: 601 goto out; 602 } 603 604 static void __exit des_s390_exit(void) 605 { 606 if (ctrblk) { 607 crypto_unregister_alg(&ctr_des_alg); 608 crypto_unregister_alg(&ctr_des3_alg); 609 free_page((unsigned long) ctrblk); 610 } 611 crypto_unregister_alg(&cbc_des3_alg); 612 crypto_unregister_alg(&ecb_des3_alg); 613 crypto_unregister_alg(&des3_alg); 614 crypto_unregister_alg(&cbc_des_alg); 615 crypto_unregister_alg(&ecb_des_alg); 616 crypto_unregister_alg(&des_alg); 617 } 618 619 module_init(des_s390_init); 620 module_exit(des_s390_exit); 621 622 MODULE_ALIAS("des"); 623 MODULE_ALIAS("des3_ede"); 624 625 MODULE_LICENSE("GPL"); 626 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms"); 627