1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * Cryptographic API. 4 * 5 * s390 implementation of the DES Cipher Algorithm. 6 * 7 * Copyright IBM Corp. 2003, 2011 8 * Author(s): Thomas Spatzier 9 * Jan Glauber (jan.glauber@de.ibm.com) 10 */ 11 12 #include <linux/init.h> 13 #include <linux/module.h> 14 #include <linux/cpufeature.h> 15 #include <linux/crypto.h> 16 #include <linux/fips.h> 17 #include <crypto/algapi.h> 18 #include <crypto/des.h> 19 #include <asm/cpacf.h> 20 21 #define DES3_KEY_SIZE (3 * DES_KEY_SIZE) 22 23 static u8 *ctrblk; 24 static DEFINE_SPINLOCK(ctrblk_lock); 25 26 static cpacf_mask_t km_functions, kmc_functions, kmctr_functions; 27 28 struct s390_des_ctx { 29 u8 iv[DES_BLOCK_SIZE]; 30 u8 key[DES3_KEY_SIZE]; 31 }; 32 33 static int des_setkey(struct crypto_tfm *tfm, const u8 *key, 34 unsigned int key_len) 35 { 36 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 37 u32 tmp[DES_EXPKEY_WORDS]; 38 39 /* check for weak keys */ 40 if (!des_ekey(tmp, key) && 41 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) { 42 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 43 return -EINVAL; 44 } 45 46 memcpy(ctx->key, key, key_len); 47 return 0; 48 } 49 50 static void des_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 51 { 52 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 53 54 cpacf_km(CPACF_KM_DEA, ctx->key, out, in, DES_BLOCK_SIZE); 55 } 56 57 static void des_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 58 { 59 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 60 61 cpacf_km(CPACF_KM_DEA | CPACF_DECRYPT, 62 ctx->key, out, in, DES_BLOCK_SIZE); 63 } 64 65 static struct crypto_alg des_alg = { 66 .cra_name = "des", 67 .cra_driver_name = "des-s390", 68 .cra_priority = 300, 69 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 70 .cra_blocksize = DES_BLOCK_SIZE, 71 .cra_ctxsize = sizeof(struct s390_des_ctx), 72 .cra_module = THIS_MODULE, 73 .cra_u = { 74 .cipher = { 75 .cia_min_keysize = DES_KEY_SIZE, 76 .cia_max_keysize = DES_KEY_SIZE, 77 .cia_setkey = des_setkey, 78 .cia_encrypt = des_encrypt, 79 .cia_decrypt = des_decrypt, 80 } 81 } 82 }; 83 84 static int ecb_desall_crypt(struct blkcipher_desc *desc, unsigned long fc, 85 struct blkcipher_walk *walk) 86 { 87 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 88 unsigned int nbytes, n; 89 int ret; 90 91 ret = blkcipher_walk_virt(desc, walk); 92 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { 93 /* only use complete blocks */ 94 n = nbytes & ~(DES_BLOCK_SIZE - 1); 95 cpacf_km(fc, ctx->key, walk->dst.virt.addr, 96 walk->src.virt.addr, n); 97 ret = blkcipher_walk_done(desc, walk, nbytes - n); 98 } 99 return ret; 100 } 101 102 static int cbc_desall_crypt(struct blkcipher_desc *desc, unsigned long fc, 103 struct blkcipher_walk *walk) 104 { 105 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 106 unsigned int nbytes, n; 107 int ret; 108 struct { 109 u8 iv[DES_BLOCK_SIZE]; 110 u8 key[DES3_KEY_SIZE]; 111 } param; 112 113 ret = blkcipher_walk_virt(desc, walk); 114 memcpy(param.iv, walk->iv, DES_BLOCK_SIZE); 115 memcpy(param.key, ctx->key, DES3_KEY_SIZE); 116 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { 117 /* only use complete blocks */ 118 n = nbytes & ~(DES_BLOCK_SIZE - 1); 119 cpacf_kmc(fc, ¶m, walk->dst.virt.addr, 120 walk->src.virt.addr, n); 121 ret = blkcipher_walk_done(desc, walk, nbytes - n); 122 } 123 memcpy(walk->iv, param.iv, DES_BLOCK_SIZE); 124 return ret; 125 } 126 127 static int ecb_des_encrypt(struct blkcipher_desc *desc, 128 struct scatterlist *dst, struct scatterlist *src, 129 unsigned int nbytes) 130 { 131 struct blkcipher_walk walk; 132 133 blkcipher_walk_init(&walk, dst, src, nbytes); 134 return ecb_desall_crypt(desc, CPACF_KM_DEA, &walk); 135 } 136 137 static int ecb_des_decrypt(struct blkcipher_desc *desc, 138 struct scatterlist *dst, struct scatterlist *src, 139 unsigned int nbytes) 140 { 141 struct blkcipher_walk walk; 142 143 blkcipher_walk_init(&walk, dst, src, nbytes); 144 return ecb_desall_crypt(desc, CPACF_KM_DEA | CPACF_DECRYPT, &walk); 145 } 146 147 static struct crypto_alg ecb_des_alg = { 148 .cra_name = "ecb(des)", 149 .cra_driver_name = "ecb-des-s390", 150 .cra_priority = 400, /* combo: des + ecb */ 151 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 152 .cra_blocksize = DES_BLOCK_SIZE, 153 .cra_ctxsize = sizeof(struct s390_des_ctx), 154 .cra_type = &crypto_blkcipher_type, 155 .cra_module = THIS_MODULE, 156 .cra_u = { 157 .blkcipher = { 158 .min_keysize = DES_KEY_SIZE, 159 .max_keysize = DES_KEY_SIZE, 160 .setkey = des_setkey, 161 .encrypt = ecb_des_encrypt, 162 .decrypt = ecb_des_decrypt, 163 } 164 } 165 }; 166 167 static int cbc_des_encrypt(struct blkcipher_desc *desc, 168 struct scatterlist *dst, struct scatterlist *src, 169 unsigned int nbytes) 170 { 171 struct blkcipher_walk walk; 172 173 blkcipher_walk_init(&walk, dst, src, nbytes); 174 return cbc_desall_crypt(desc, CPACF_KMC_DEA, &walk); 175 } 176 177 static int cbc_des_decrypt(struct blkcipher_desc *desc, 178 struct scatterlist *dst, struct scatterlist *src, 179 unsigned int nbytes) 180 { 181 struct blkcipher_walk walk; 182 183 blkcipher_walk_init(&walk, dst, src, nbytes); 184 return cbc_desall_crypt(desc, CPACF_KMC_DEA | CPACF_DECRYPT, &walk); 185 } 186 187 static struct crypto_alg cbc_des_alg = { 188 .cra_name = "cbc(des)", 189 .cra_driver_name = "cbc-des-s390", 190 .cra_priority = 400, /* combo: des + cbc */ 191 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 192 .cra_blocksize = DES_BLOCK_SIZE, 193 .cra_ctxsize = sizeof(struct s390_des_ctx), 194 .cra_type = &crypto_blkcipher_type, 195 .cra_module = THIS_MODULE, 196 .cra_u = { 197 .blkcipher = { 198 .min_keysize = DES_KEY_SIZE, 199 .max_keysize = DES_KEY_SIZE, 200 .ivsize = DES_BLOCK_SIZE, 201 .setkey = des_setkey, 202 .encrypt = cbc_des_encrypt, 203 .decrypt = cbc_des_decrypt, 204 } 205 } 206 }; 207 208 /* 209 * RFC2451: 210 * 211 * For DES-EDE3, there is no known need to reject weak or 212 * complementation keys. Any weakness is obviated by the use of 213 * multiple keys. 214 * 215 * However, if the first two or last two independent 64-bit keys are 216 * equal (k1 == k2 or k2 == k3), then the DES3 operation is simply the 217 * same as DES. Implementers MUST reject keys that exhibit this 218 * property. 219 * 220 * In fips mode additinally check for all 3 keys are unique. 221 * 222 */ 223 static int des3_setkey(struct crypto_tfm *tfm, const u8 *key, 224 unsigned int key_len) 225 { 226 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 227 228 if (!(crypto_memneq(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && 229 crypto_memneq(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], 230 DES_KEY_SIZE)) && 231 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) { 232 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 233 return -EINVAL; 234 } 235 236 /* in fips mode, ensure k1 != k2 and k2 != k3 and k1 != k3 */ 237 if (fips_enabled && 238 !(crypto_memneq(key, &key[DES_KEY_SIZE], DES_KEY_SIZE) && 239 crypto_memneq(&key[DES_KEY_SIZE], &key[DES_KEY_SIZE * 2], 240 DES_KEY_SIZE) && 241 crypto_memneq(key, &key[DES_KEY_SIZE * 2], DES_KEY_SIZE))) { 242 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 243 return -EINVAL; 244 } 245 246 memcpy(ctx->key, key, key_len); 247 return 0; 248 } 249 250 static void des3_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 251 { 252 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 253 254 cpacf_km(CPACF_KM_TDEA_192, ctx->key, dst, src, DES_BLOCK_SIZE); 255 } 256 257 static void des3_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 258 { 259 struct s390_des_ctx *ctx = crypto_tfm_ctx(tfm); 260 261 cpacf_km(CPACF_KM_TDEA_192 | CPACF_DECRYPT, 262 ctx->key, dst, src, DES_BLOCK_SIZE); 263 } 264 265 static struct crypto_alg des3_alg = { 266 .cra_name = "des3_ede", 267 .cra_driver_name = "des3_ede-s390", 268 .cra_priority = 300, 269 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 270 .cra_blocksize = DES_BLOCK_SIZE, 271 .cra_ctxsize = sizeof(struct s390_des_ctx), 272 .cra_module = THIS_MODULE, 273 .cra_u = { 274 .cipher = { 275 .cia_min_keysize = DES3_KEY_SIZE, 276 .cia_max_keysize = DES3_KEY_SIZE, 277 .cia_setkey = des3_setkey, 278 .cia_encrypt = des3_encrypt, 279 .cia_decrypt = des3_decrypt, 280 } 281 } 282 }; 283 284 static int ecb_des3_encrypt(struct blkcipher_desc *desc, 285 struct scatterlist *dst, struct scatterlist *src, 286 unsigned int nbytes) 287 { 288 struct blkcipher_walk walk; 289 290 blkcipher_walk_init(&walk, dst, src, nbytes); 291 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192, &walk); 292 } 293 294 static int ecb_des3_decrypt(struct blkcipher_desc *desc, 295 struct scatterlist *dst, struct scatterlist *src, 296 unsigned int nbytes) 297 { 298 struct blkcipher_walk walk; 299 300 blkcipher_walk_init(&walk, dst, src, nbytes); 301 return ecb_desall_crypt(desc, CPACF_KM_TDEA_192 | CPACF_DECRYPT, 302 &walk); 303 } 304 305 static struct crypto_alg ecb_des3_alg = { 306 .cra_name = "ecb(des3_ede)", 307 .cra_driver_name = "ecb-des3_ede-s390", 308 .cra_priority = 400, /* combo: des3 + ecb */ 309 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 310 .cra_blocksize = DES_BLOCK_SIZE, 311 .cra_ctxsize = sizeof(struct s390_des_ctx), 312 .cra_type = &crypto_blkcipher_type, 313 .cra_module = THIS_MODULE, 314 .cra_u = { 315 .blkcipher = { 316 .min_keysize = DES3_KEY_SIZE, 317 .max_keysize = DES3_KEY_SIZE, 318 .setkey = des3_setkey, 319 .encrypt = ecb_des3_encrypt, 320 .decrypt = ecb_des3_decrypt, 321 } 322 } 323 }; 324 325 static int cbc_des3_encrypt(struct blkcipher_desc *desc, 326 struct scatterlist *dst, struct scatterlist *src, 327 unsigned int nbytes) 328 { 329 struct blkcipher_walk walk; 330 331 blkcipher_walk_init(&walk, dst, src, nbytes); 332 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192, &walk); 333 } 334 335 static int cbc_des3_decrypt(struct blkcipher_desc *desc, 336 struct scatterlist *dst, struct scatterlist *src, 337 unsigned int nbytes) 338 { 339 struct blkcipher_walk walk; 340 341 blkcipher_walk_init(&walk, dst, src, nbytes); 342 return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192 | CPACF_DECRYPT, 343 &walk); 344 } 345 346 static struct crypto_alg cbc_des3_alg = { 347 .cra_name = "cbc(des3_ede)", 348 .cra_driver_name = "cbc-des3_ede-s390", 349 .cra_priority = 400, /* combo: des3 + cbc */ 350 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 351 .cra_blocksize = DES_BLOCK_SIZE, 352 .cra_ctxsize = sizeof(struct s390_des_ctx), 353 .cra_type = &crypto_blkcipher_type, 354 .cra_module = THIS_MODULE, 355 .cra_u = { 356 .blkcipher = { 357 .min_keysize = DES3_KEY_SIZE, 358 .max_keysize = DES3_KEY_SIZE, 359 .ivsize = DES_BLOCK_SIZE, 360 .setkey = des3_setkey, 361 .encrypt = cbc_des3_encrypt, 362 .decrypt = cbc_des3_decrypt, 363 } 364 } 365 }; 366 367 static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes) 368 { 369 unsigned int i, n; 370 371 /* align to block size, max. PAGE_SIZE */ 372 n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(DES_BLOCK_SIZE - 1); 373 memcpy(ctrptr, iv, DES_BLOCK_SIZE); 374 for (i = (n / DES_BLOCK_SIZE) - 1; i > 0; i--) { 375 memcpy(ctrptr + DES_BLOCK_SIZE, ctrptr, DES_BLOCK_SIZE); 376 crypto_inc(ctrptr + DES_BLOCK_SIZE, DES_BLOCK_SIZE); 377 ctrptr += DES_BLOCK_SIZE; 378 } 379 return n; 380 } 381 382 static int ctr_desall_crypt(struct blkcipher_desc *desc, unsigned long fc, 383 struct blkcipher_walk *walk) 384 { 385 struct s390_des_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 386 u8 buf[DES_BLOCK_SIZE], *ctrptr; 387 unsigned int n, nbytes; 388 int ret, locked; 389 390 locked = spin_trylock(&ctrblk_lock); 391 392 ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE); 393 while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) { 394 n = DES_BLOCK_SIZE; 395 if (nbytes >= 2*DES_BLOCK_SIZE && locked) 396 n = __ctrblk_init(ctrblk, walk->iv, nbytes); 397 ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk->iv; 398 cpacf_kmctr(fc, ctx->key, walk->dst.virt.addr, 399 walk->src.virt.addr, n, ctrptr); 400 if (ctrptr == ctrblk) 401 memcpy(walk->iv, ctrptr + n - DES_BLOCK_SIZE, 402 DES_BLOCK_SIZE); 403 crypto_inc(walk->iv, DES_BLOCK_SIZE); 404 ret = blkcipher_walk_done(desc, walk, nbytes - n); 405 } 406 if (locked) 407 spin_unlock(&ctrblk_lock); 408 /* final block may be < DES_BLOCK_SIZE, copy only nbytes */ 409 if (nbytes) { 410 cpacf_kmctr(fc, ctx->key, buf, walk->src.virt.addr, 411 DES_BLOCK_SIZE, walk->iv); 412 memcpy(walk->dst.virt.addr, buf, nbytes); 413 crypto_inc(walk->iv, DES_BLOCK_SIZE); 414 ret = blkcipher_walk_done(desc, walk, 0); 415 } 416 return ret; 417 } 418 419 static int ctr_des_encrypt(struct blkcipher_desc *desc, 420 struct scatterlist *dst, struct scatterlist *src, 421 unsigned int nbytes) 422 { 423 struct blkcipher_walk walk; 424 425 blkcipher_walk_init(&walk, dst, src, nbytes); 426 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA, &walk); 427 } 428 429 static int ctr_des_decrypt(struct blkcipher_desc *desc, 430 struct scatterlist *dst, struct scatterlist *src, 431 unsigned int nbytes) 432 { 433 struct blkcipher_walk walk; 434 435 blkcipher_walk_init(&walk, dst, src, nbytes); 436 return ctr_desall_crypt(desc, CPACF_KMCTR_DEA | CPACF_DECRYPT, &walk); 437 } 438 439 static struct crypto_alg ctr_des_alg = { 440 .cra_name = "ctr(des)", 441 .cra_driver_name = "ctr-des-s390", 442 .cra_priority = 400, /* combo: des + ctr */ 443 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 444 .cra_blocksize = 1, 445 .cra_ctxsize = sizeof(struct s390_des_ctx), 446 .cra_type = &crypto_blkcipher_type, 447 .cra_module = THIS_MODULE, 448 .cra_u = { 449 .blkcipher = { 450 .min_keysize = DES_KEY_SIZE, 451 .max_keysize = DES_KEY_SIZE, 452 .ivsize = DES_BLOCK_SIZE, 453 .setkey = des_setkey, 454 .encrypt = ctr_des_encrypt, 455 .decrypt = ctr_des_decrypt, 456 } 457 } 458 }; 459 460 static int ctr_des3_encrypt(struct blkcipher_desc *desc, 461 struct scatterlist *dst, struct scatterlist *src, 462 unsigned int nbytes) 463 { 464 struct blkcipher_walk walk; 465 466 blkcipher_walk_init(&walk, dst, src, nbytes); 467 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192, &walk); 468 } 469 470 static int ctr_des3_decrypt(struct blkcipher_desc *desc, 471 struct scatterlist *dst, struct scatterlist *src, 472 unsigned int nbytes) 473 { 474 struct blkcipher_walk walk; 475 476 blkcipher_walk_init(&walk, dst, src, nbytes); 477 return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192 | CPACF_DECRYPT, 478 &walk); 479 } 480 481 static struct crypto_alg ctr_des3_alg = { 482 .cra_name = "ctr(des3_ede)", 483 .cra_driver_name = "ctr-des3_ede-s390", 484 .cra_priority = 400, /* combo: des3 + ede */ 485 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 486 .cra_blocksize = 1, 487 .cra_ctxsize = sizeof(struct s390_des_ctx), 488 .cra_type = &crypto_blkcipher_type, 489 .cra_module = THIS_MODULE, 490 .cra_u = { 491 .blkcipher = { 492 .min_keysize = DES3_KEY_SIZE, 493 .max_keysize = DES3_KEY_SIZE, 494 .ivsize = DES_BLOCK_SIZE, 495 .setkey = des3_setkey, 496 .encrypt = ctr_des3_encrypt, 497 .decrypt = ctr_des3_decrypt, 498 } 499 } 500 }; 501 502 static struct crypto_alg *des_s390_algs_ptr[8]; 503 static int des_s390_algs_num; 504 505 static int des_s390_register_alg(struct crypto_alg *alg) 506 { 507 int ret; 508 509 ret = crypto_register_alg(alg); 510 if (!ret) 511 des_s390_algs_ptr[des_s390_algs_num++] = alg; 512 return ret; 513 } 514 515 static void des_s390_exit(void) 516 { 517 while (des_s390_algs_num--) 518 crypto_unregister_alg(des_s390_algs_ptr[des_s390_algs_num]); 519 if (ctrblk) 520 free_page((unsigned long) ctrblk); 521 } 522 523 static int __init des_s390_init(void) 524 { 525 int ret; 526 527 /* Query available functions for KM, KMC and KMCTR */ 528 cpacf_query(CPACF_KM, &km_functions); 529 cpacf_query(CPACF_KMC, &kmc_functions); 530 cpacf_query(CPACF_KMCTR, &kmctr_functions); 531 532 if (cpacf_test_func(&km_functions, CPACF_KM_DEA)) { 533 ret = des_s390_register_alg(&des_alg); 534 if (ret) 535 goto out_err; 536 ret = des_s390_register_alg(&ecb_des_alg); 537 if (ret) 538 goto out_err; 539 } 540 if (cpacf_test_func(&kmc_functions, CPACF_KMC_DEA)) { 541 ret = des_s390_register_alg(&cbc_des_alg); 542 if (ret) 543 goto out_err; 544 } 545 if (cpacf_test_func(&km_functions, CPACF_KM_TDEA_192)) { 546 ret = des_s390_register_alg(&des3_alg); 547 if (ret) 548 goto out_err; 549 ret = des_s390_register_alg(&ecb_des3_alg); 550 if (ret) 551 goto out_err; 552 } 553 if (cpacf_test_func(&kmc_functions, CPACF_KMC_TDEA_192)) { 554 ret = des_s390_register_alg(&cbc_des3_alg); 555 if (ret) 556 goto out_err; 557 } 558 559 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_DEA) || 560 cpacf_test_func(&kmctr_functions, CPACF_KMCTR_TDEA_192)) { 561 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); 562 if (!ctrblk) { 563 ret = -ENOMEM; 564 goto out_err; 565 } 566 } 567 568 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_DEA)) { 569 ret = des_s390_register_alg(&ctr_des_alg); 570 if (ret) 571 goto out_err; 572 } 573 if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_TDEA_192)) { 574 ret = des_s390_register_alg(&ctr_des3_alg); 575 if (ret) 576 goto out_err; 577 } 578 579 return 0; 580 out_err: 581 des_s390_exit(); 582 return ret; 583 } 584 585 module_cpu_feature_match(MSA, des_s390_init); 586 module_exit(des_s390_exit); 587 588 MODULE_ALIAS_CRYPTO("des"); 589 MODULE_ALIAS_CRYPTO("des3_ede"); 590 591 MODULE_LICENSE("GPL"); 592 MODULE_DESCRIPTION("DES & Triple DES EDE Cipher Algorithms"); 593