1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Scatterlist Cryptographic API. 4 * 5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 6 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 10 * and Nettle, by Niels Möller. 11 */ 12 13 #include <linux/err.h> 14 #include <linux/errno.h> 15 #include <linux/jump_label.h> 16 #include <linux/kernel.h> 17 #include <linux/kmod.h> 18 #include <linux/module.h> 19 #include <linux/param.h> 20 #include <linux/sched/signal.h> 21 #include <linux/slab.h> 22 #include <linux/string.h> 23 #include <linux/completion.h> 24 #include "internal.h" 25 26 LIST_HEAD(crypto_alg_list); 27 EXPORT_SYMBOL_GPL(crypto_alg_list); 28 DECLARE_RWSEM(crypto_alg_sem); 29 EXPORT_SYMBOL_GPL(crypto_alg_sem); 30 31 BLOCKING_NOTIFIER_HEAD(crypto_chain); 32 EXPORT_SYMBOL_GPL(crypto_chain); 33 34 #ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 35 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished); 36 EXPORT_SYMBOL_GPL(__crypto_boot_test_finished); 37 #endif 38 39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); 40 41 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) 42 { 43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; 44 } 45 EXPORT_SYMBOL_GPL(crypto_mod_get); 46 47 void crypto_mod_put(struct crypto_alg *alg) 48 { 49 struct module *module = alg->cra_module; 50 51 crypto_alg_put(alg); 52 module_put(module); 53 } 54 EXPORT_SYMBOL_GPL(crypto_mod_put); 55 56 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, 57 u32 mask) 58 { 59 struct crypto_alg *q, *alg = NULL; 60 int best = -2; 61 62 list_for_each_entry(q, &crypto_alg_list, cra_list) { 63 int exact, fuzzy; 64 65 if (crypto_is_moribund(q)) 66 continue; 67 68 if ((q->cra_flags ^ type) & mask) 69 continue; 70 71 if (crypto_is_larval(q) && 72 !crypto_is_test_larval((struct crypto_larval *)q) && 73 ((struct crypto_larval *)q)->mask != mask) 74 continue; 75 76 exact = !strcmp(q->cra_driver_name, name); 77 fuzzy = !strcmp(q->cra_name, name); 78 if (!exact && !(fuzzy && q->cra_priority > best)) 79 continue; 80 81 if (unlikely(!crypto_mod_get(q))) 82 continue; 83 84 best = q->cra_priority; 85 if (alg) 86 crypto_mod_put(alg); 87 alg = q; 88 89 if (exact) 90 break; 91 } 92 93 return alg; 94 } 95 96 static void crypto_larval_destroy(struct crypto_alg *alg) 97 { 98 struct crypto_larval *larval = (void *)alg; 99 100 BUG_ON(!crypto_is_larval(alg)); 101 if (!IS_ERR_OR_NULL(larval->adult)) 102 crypto_mod_put(larval->adult); 103 kfree(larval); 104 } 105 106 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) 107 { 108 struct crypto_larval *larval; 109 110 larval = kzalloc(sizeof(*larval), GFP_KERNEL); 111 if (!larval) 112 return ERR_PTR(-ENOMEM); 113 114 larval->mask = mask; 115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; 116 larval->alg.cra_priority = -1; 117 larval->alg.cra_destroy = crypto_larval_destroy; 118 119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); 120 init_completion(&larval->completion); 121 122 return larval; 123 } 124 EXPORT_SYMBOL_GPL(crypto_larval_alloc); 125 126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type, 127 u32 mask) 128 { 129 struct crypto_alg *alg; 130 struct crypto_larval *larval; 131 132 larval = crypto_larval_alloc(name, type, mask); 133 if (IS_ERR(larval)) 134 return ERR_CAST(larval); 135 136 refcount_set(&larval->alg.cra_refcnt, 2); 137 138 down_write(&crypto_alg_sem); 139 alg = __crypto_alg_lookup(name, type, mask); 140 if (!alg) { 141 alg = &larval->alg; 142 list_add(&alg->cra_list, &crypto_alg_list); 143 } 144 up_write(&crypto_alg_sem); 145 146 if (alg != &larval->alg) { 147 kfree(larval); 148 if (crypto_is_larval(alg)) 149 alg = crypto_larval_wait(alg); 150 } 151 152 return alg; 153 } 154 155 void crypto_larval_kill(struct crypto_alg *alg) 156 { 157 struct crypto_larval *larval = (void *)alg; 158 159 down_write(&crypto_alg_sem); 160 list_del(&alg->cra_list); 161 up_write(&crypto_alg_sem); 162 complete_all(&larval->completion); 163 crypto_alg_put(alg); 164 } 165 EXPORT_SYMBOL_GPL(crypto_larval_kill); 166 167 void crypto_wait_for_test(struct crypto_larval *larval) 168 { 169 int err; 170 171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 172 if (WARN_ON_ONCE(err != NOTIFY_STOP)) 173 goto out; 174 175 err = wait_for_completion_killable(&larval->completion); 176 WARN_ON(err); 177 out: 178 crypto_larval_kill(&larval->alg); 179 } 180 EXPORT_SYMBOL_GPL(crypto_wait_for_test); 181 182 static void crypto_start_test(struct crypto_larval *larval) 183 { 184 if (!crypto_is_test_larval(larval)) 185 return; 186 187 if (larval->test_started) 188 return; 189 190 down_write(&crypto_alg_sem); 191 if (larval->test_started) { 192 up_write(&crypto_alg_sem); 193 return; 194 } 195 196 larval->test_started = true; 197 up_write(&crypto_alg_sem); 198 199 crypto_wait_for_test(larval); 200 } 201 202 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 203 { 204 struct crypto_larval *larval = (void *)alg; 205 long timeout; 206 207 if (!crypto_boot_test_finished()) 208 crypto_start_test(larval); 209 210 timeout = wait_for_completion_killable_timeout( 211 &larval->completion, 60 * HZ); 212 213 alg = larval->adult; 214 if (timeout < 0) 215 alg = ERR_PTR(-EINTR); 216 else if (!timeout) 217 alg = ERR_PTR(-ETIMEDOUT); 218 else if (!alg) 219 alg = ERR_PTR(-ENOENT); 220 else if (IS_ERR(alg)) 221 ; 222 else if (crypto_is_test_larval(larval) && 223 !(alg->cra_flags & CRYPTO_ALG_TESTED)) 224 alg = ERR_PTR(-EAGAIN); 225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL) 226 alg = ERR_PTR(-EAGAIN); 227 else if (!crypto_mod_get(alg)) 228 alg = ERR_PTR(-EAGAIN); 229 crypto_mod_put(&larval->alg); 230 231 return alg; 232 } 233 234 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, 235 u32 mask) 236 { 237 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL; 238 struct crypto_alg *alg; 239 u32 test = 0; 240 241 if (!((type | mask) & CRYPTO_ALG_TESTED)) 242 test |= CRYPTO_ALG_TESTED; 243 244 down_read(&crypto_alg_sem); 245 alg = __crypto_alg_lookup(name, (type | test) & ~fips, 246 (mask | test) & ~fips); 247 if (alg) { 248 if (((type | mask) ^ fips) & fips) 249 mask |= fips; 250 mask &= fips; 251 252 if (!crypto_is_larval(alg) && 253 ((type ^ alg->cra_flags) & mask)) { 254 /* Algorithm is disallowed in FIPS mode. */ 255 crypto_mod_put(alg); 256 alg = ERR_PTR(-ENOENT); 257 } 258 } else if (test) { 259 alg = __crypto_alg_lookup(name, type, mask); 260 if (alg && !crypto_is_larval(alg)) { 261 /* Test failed */ 262 crypto_mod_put(alg); 263 alg = ERR_PTR(-ELIBBAD); 264 } 265 } 266 up_read(&crypto_alg_sem); 267 268 return alg; 269 } 270 271 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, 272 u32 mask) 273 { 274 struct crypto_alg *alg; 275 276 if (!name) 277 return ERR_PTR(-ENOENT); 278 279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 280 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 281 282 alg = crypto_alg_lookup(name, type, mask); 283 if (!alg && !(mask & CRYPTO_NOLOAD)) { 284 request_module("crypto-%s", name); 285 286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & 287 CRYPTO_ALG_NEED_FALLBACK)) 288 request_module("crypto-%s-all", name); 289 290 alg = crypto_alg_lookup(name, type, mask); 291 } 292 293 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg)) 294 alg = crypto_larval_wait(alg); 295 else if (!alg) 296 alg = crypto_larval_add(name, type, mask); 297 298 return alg; 299 } 300 301 int crypto_probing_notify(unsigned long val, void *v) 302 { 303 int ok; 304 305 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 306 if (ok == NOTIFY_DONE) { 307 request_module("cryptomgr"); 308 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 309 } 310 311 return ok; 312 } 313 EXPORT_SYMBOL_GPL(crypto_probing_notify); 314 315 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 316 { 317 struct crypto_alg *alg; 318 struct crypto_alg *larval; 319 int ok; 320 321 /* 322 * If the internal flag is set for a cipher, require a caller to 323 * invoke the cipher with the internal flag to use that cipher. 324 * Also, if a caller wants to allocate a cipher that may or may 325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and 326 * !(mask & CRYPTO_ALG_INTERNAL). 327 */ 328 if (!((type | mask) & CRYPTO_ALG_INTERNAL)) 329 mask |= CRYPTO_ALG_INTERNAL; 330 331 larval = crypto_larval_lookup(name, type, mask); 332 if (IS_ERR(larval) || !crypto_is_larval(larval)) 333 return larval; 334 335 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); 336 337 if (ok == NOTIFY_STOP) 338 alg = crypto_larval_wait(larval); 339 else { 340 crypto_mod_put(larval); 341 alg = ERR_PTR(-ENOENT); 342 } 343 crypto_larval_kill(larval); 344 return alg; 345 } 346 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); 347 348 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 349 { 350 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; 351 352 if (type_obj) 353 return type_obj->init(tfm, type, mask); 354 return 0; 355 } 356 357 static void crypto_exit_ops(struct crypto_tfm *tfm) 358 { 359 const struct crypto_type *type = tfm->__crt_alg->cra_type; 360 361 if (type && tfm->exit) 362 tfm->exit(tfm); 363 } 364 365 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) 366 { 367 const struct crypto_type *type_obj = alg->cra_type; 368 unsigned int len; 369 370 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); 371 if (type_obj) 372 return len + type_obj->ctxsize(alg, type, mask); 373 374 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 375 default: 376 BUG(); 377 378 case CRYPTO_ALG_TYPE_CIPHER: 379 len += crypto_cipher_ctxsize(alg); 380 break; 381 382 case CRYPTO_ALG_TYPE_COMPRESS: 383 len += crypto_compress_ctxsize(alg); 384 break; 385 } 386 387 return len; 388 } 389 390 void crypto_shoot_alg(struct crypto_alg *alg) 391 { 392 down_write(&crypto_alg_sem); 393 alg->cra_flags |= CRYPTO_ALG_DYING; 394 up_write(&crypto_alg_sem); 395 } 396 EXPORT_SYMBOL_GPL(crypto_shoot_alg); 397 398 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 399 u32 mask) 400 { 401 struct crypto_tfm *tfm = NULL; 402 unsigned int tfm_size; 403 int err = -ENOMEM; 404 405 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); 406 tfm = kzalloc(tfm_size, GFP_KERNEL); 407 if (tfm == NULL) 408 goto out_err; 409 410 tfm->__crt_alg = alg; 411 refcount_set(&tfm->refcnt, 1); 412 413 err = crypto_init_ops(tfm, type, mask); 414 if (err) 415 goto out_free_tfm; 416 417 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 418 goto cra_init_failed; 419 420 goto out; 421 422 cra_init_failed: 423 crypto_exit_ops(tfm); 424 out_free_tfm: 425 if (err == -EAGAIN) 426 crypto_shoot_alg(alg); 427 kfree(tfm); 428 out_err: 429 tfm = ERR_PTR(err); 430 out: 431 return tfm; 432 } 433 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); 434 435 /* 436 * crypto_alloc_base - Locate algorithm and allocate transform 437 * @alg_name: Name of algorithm 438 * @type: Type of algorithm 439 * @mask: Mask for type comparison 440 * 441 * This function should not be used by new algorithm types. 442 * Please use crypto_alloc_tfm instead. 443 * 444 * crypto_alloc_base() will first attempt to locate an already loaded 445 * algorithm. If that fails and the kernel supports dynamically loadable 446 * modules, it will then attempt to load a module of the same name or 447 * alias. If that fails it will send a query to any loaded crypto manager 448 * to construct an algorithm on the fly. A refcount is grabbed on the 449 * algorithm which is then associated with the new transform. 450 * 451 * The returned transform is of a non-determinate type. Most people 452 * should use one of the more specific allocation functions such as 453 * crypto_alloc_skcipher(). 454 * 455 * In case of error the return value is an error pointer. 456 */ 457 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) 458 { 459 struct crypto_tfm *tfm; 460 int err; 461 462 for (;;) { 463 struct crypto_alg *alg; 464 465 alg = crypto_alg_mod_lookup(alg_name, type, mask); 466 if (IS_ERR(alg)) { 467 err = PTR_ERR(alg); 468 goto err; 469 } 470 471 tfm = __crypto_alloc_tfm(alg, type, mask); 472 if (!IS_ERR(tfm)) 473 return tfm; 474 475 crypto_mod_put(alg); 476 err = PTR_ERR(tfm); 477 478 err: 479 if (err != -EAGAIN) 480 break; 481 if (fatal_signal_pending(current)) { 482 err = -EINTR; 483 break; 484 } 485 } 486 487 return ERR_PTR(err); 488 } 489 EXPORT_SYMBOL_GPL(crypto_alloc_base); 490 491 static void *crypto_alloc_tfmmem(struct crypto_alg *alg, 492 const struct crypto_type *frontend, int node, 493 gfp_t gfp) 494 { 495 struct crypto_tfm *tfm; 496 unsigned int tfmsize; 497 unsigned int total; 498 char *mem; 499 500 tfmsize = frontend->tfmsize; 501 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); 502 503 mem = kzalloc_node(total, gfp, node); 504 if (mem == NULL) 505 return ERR_PTR(-ENOMEM); 506 507 tfm = (struct crypto_tfm *)(mem + tfmsize); 508 tfm->__crt_alg = alg; 509 tfm->node = node; 510 refcount_set(&tfm->refcnt, 1); 511 512 return mem; 513 } 514 515 void *crypto_create_tfm_node(struct crypto_alg *alg, 516 const struct crypto_type *frontend, 517 int node) 518 { 519 struct crypto_tfm *tfm; 520 char *mem; 521 int err; 522 523 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL); 524 if (IS_ERR(mem)) 525 goto out; 526 527 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize); 528 529 err = frontend->init_tfm(tfm); 530 if (err) 531 goto out_free_tfm; 532 533 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 534 goto cra_init_failed; 535 536 goto out; 537 538 cra_init_failed: 539 crypto_exit_ops(tfm); 540 out_free_tfm: 541 if (err == -EAGAIN) 542 crypto_shoot_alg(alg); 543 kfree(mem); 544 mem = ERR_PTR(err); 545 out: 546 return mem; 547 } 548 EXPORT_SYMBOL_GPL(crypto_create_tfm_node); 549 550 void *crypto_clone_tfm(const struct crypto_type *frontend, 551 struct crypto_tfm *otfm) 552 { 553 struct crypto_alg *alg = otfm->__crt_alg; 554 struct crypto_tfm *tfm; 555 char *mem; 556 557 mem = ERR_PTR(-ESTALE); 558 if (unlikely(!crypto_mod_get(alg))) 559 goto out; 560 561 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC); 562 if (IS_ERR(mem)) { 563 crypto_mod_put(alg); 564 goto out; 565 } 566 567 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize); 568 tfm->crt_flags = otfm->crt_flags; 569 tfm->exit = otfm->exit; 570 571 out: 572 return mem; 573 } 574 EXPORT_SYMBOL_GPL(crypto_clone_tfm); 575 576 struct crypto_alg *crypto_find_alg(const char *alg_name, 577 const struct crypto_type *frontend, 578 u32 type, u32 mask) 579 { 580 if (frontend) { 581 type &= frontend->maskclear; 582 mask &= frontend->maskclear; 583 type |= frontend->type; 584 mask |= frontend->maskset; 585 } 586 587 return crypto_alg_mod_lookup(alg_name, type, mask); 588 } 589 EXPORT_SYMBOL_GPL(crypto_find_alg); 590 591 /* 592 * crypto_alloc_tfm_node - Locate algorithm and allocate transform 593 * @alg_name: Name of algorithm 594 * @frontend: Frontend algorithm type 595 * @type: Type of algorithm 596 * @mask: Mask for type comparison 597 * @node: NUMA node in which users desire to put requests, if node is 598 * NUMA_NO_NODE, it means users have no special requirement. 599 * 600 * crypto_alloc_tfm() will first attempt to locate an already loaded 601 * algorithm. If that fails and the kernel supports dynamically loadable 602 * modules, it will then attempt to load a module of the same name or 603 * alias. If that fails it will send a query to any loaded crypto manager 604 * to construct an algorithm on the fly. A refcount is grabbed on the 605 * algorithm which is then associated with the new transform. 606 * 607 * The returned transform is of a non-determinate type. Most people 608 * should use one of the more specific allocation functions such as 609 * crypto_alloc_skcipher(). 610 * 611 * In case of error the return value is an error pointer. 612 */ 613 614 void *crypto_alloc_tfm_node(const char *alg_name, 615 const struct crypto_type *frontend, u32 type, u32 mask, 616 int node) 617 { 618 void *tfm; 619 int err; 620 621 for (;;) { 622 struct crypto_alg *alg; 623 624 alg = crypto_find_alg(alg_name, frontend, type, mask); 625 if (IS_ERR(alg)) { 626 err = PTR_ERR(alg); 627 goto err; 628 } 629 630 tfm = crypto_create_tfm_node(alg, frontend, node); 631 if (!IS_ERR(tfm)) 632 return tfm; 633 634 crypto_mod_put(alg); 635 err = PTR_ERR(tfm); 636 637 err: 638 if (err != -EAGAIN) 639 break; 640 if (fatal_signal_pending(current)) { 641 err = -EINTR; 642 break; 643 } 644 } 645 646 return ERR_PTR(err); 647 } 648 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node); 649 650 /* 651 * crypto_destroy_tfm - Free crypto transform 652 * @mem: Start of tfm slab 653 * @tfm: Transform to free 654 * 655 * This function frees up the transform and any associated resources, 656 * then drops the refcount on the associated algorithm. 657 */ 658 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) 659 { 660 struct crypto_alg *alg; 661 662 if (IS_ERR_OR_NULL(mem)) 663 return; 664 665 if (!refcount_dec_and_test(&tfm->refcnt)) 666 return; 667 alg = tfm->__crt_alg; 668 669 if (!tfm->exit && alg->cra_exit) 670 alg->cra_exit(tfm); 671 crypto_exit_ops(tfm); 672 crypto_mod_put(alg); 673 kfree_sensitive(mem); 674 } 675 EXPORT_SYMBOL_GPL(crypto_destroy_tfm); 676 677 int crypto_has_alg(const char *name, u32 type, u32 mask) 678 { 679 int ret = 0; 680 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); 681 682 if (!IS_ERR(alg)) { 683 crypto_mod_put(alg); 684 ret = 1; 685 } 686 687 return ret; 688 } 689 EXPORT_SYMBOL_GPL(crypto_has_alg); 690 691 void crypto_req_done(void *data, int err) 692 { 693 struct crypto_wait *wait = data; 694 695 if (err == -EINPROGRESS) 696 return; 697 698 wait->err = err; 699 complete(&wait->completion); 700 } 701 EXPORT_SYMBOL_GPL(crypto_req_done); 702 703 MODULE_DESCRIPTION("Cryptographic core API"); 704 MODULE_LICENSE("GPL"); 705