1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Cryptographic API for algorithms (i.e., low-level API). 4 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #include <crypto/algapi.h> 9 #include <linux/err.h> 10 #include <linux/errno.h> 11 #include <linux/fips.h> 12 #include <linux/init.h> 13 #include <linux/kernel.h> 14 #include <linux/list.h> 15 #include <linux/module.h> 16 #include <linux/rtnetlink.h> 17 #include <linux/slab.h> 18 #include <linux/string.h> 19 20 #include "internal.h" 21 22 static LIST_HEAD(crypto_template_list); 23 24 static inline void crypto_check_module_sig(struct module *mod) 25 { 26 if (fips_enabled && mod && !module_sig_ok(mod)) 27 panic("Module %s signature verification failed in FIPS mode\n", 28 module_name(mod)); 29 } 30 31 static int crypto_check_alg(struct crypto_alg *alg) 32 { 33 crypto_check_module_sig(alg->cra_module); 34 35 if (!alg->cra_name[0] || !alg->cra_driver_name[0]) 36 return -EINVAL; 37 38 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 39 return -EINVAL; 40 41 /* General maximums for all algs. */ 42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 43 return -EINVAL; 44 45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 46 return -EINVAL; 47 48 /* Lower maximums for specific alg types. */ 49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 50 CRYPTO_ALG_TYPE_CIPHER) { 51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 52 return -EINVAL; 53 54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 55 return -EINVAL; 56 } 57 58 if (alg->cra_priority < 0) 59 return -EINVAL; 60 61 refcount_set(&alg->cra_refcnt, 1); 62 63 return 0; 64 } 65 66 static void crypto_free_instance(struct crypto_instance *inst) 67 { 68 inst->alg.cra_type->free(inst); 69 } 70 71 static void crypto_destroy_instance(struct crypto_alg *alg) 72 { 73 struct crypto_instance *inst = (void *)alg; 74 struct crypto_template *tmpl = inst->tmpl; 75 76 crypto_free_instance(inst); 77 crypto_tmpl_put(tmpl); 78 } 79 80 /* 81 * This function adds a spawn to the list secondary_spawns which 82 * will be used at the end of crypto_remove_spawns to unregister 83 * instances, unless the spawn happens to be one that is depended 84 * on by the new algorithm (nalg in crypto_remove_spawns). 85 * 86 * This function is also responsible for resurrecting any algorithms 87 * in the dependency chain of nalg by unsetting n->dead. 88 */ 89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 90 struct list_head *stack, 91 struct list_head *top, 92 struct list_head *secondary_spawns) 93 { 94 struct crypto_spawn *spawn, *n; 95 96 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 97 if (!spawn) 98 return NULL; 99 100 n = list_prev_entry(spawn, list); 101 list_move(&spawn->list, secondary_spawns); 102 103 if (list_is_last(&n->list, stack)) 104 return top; 105 106 n = list_next_entry(n, list); 107 if (!spawn->dead) 108 n->dead = false; 109 110 return &n->inst->alg.cra_users; 111 } 112 113 static void crypto_remove_instance(struct crypto_instance *inst, 114 struct list_head *list) 115 { 116 struct crypto_template *tmpl = inst->tmpl; 117 118 if (crypto_is_dead(&inst->alg)) 119 return; 120 121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 122 123 if (!tmpl || !crypto_tmpl_get(tmpl)) 124 return; 125 126 list_move(&inst->alg.cra_list, list); 127 hlist_del(&inst->list); 128 inst->alg.cra_destroy = crypto_destroy_instance; 129 130 BUG_ON(!list_empty(&inst->alg.cra_users)); 131 } 132 133 /* 134 * Given an algorithm alg, remove all algorithms that depend on it 135 * through spawns. If nalg is not null, then exempt any algorithms 136 * that is depended on by nalg. This is useful when nalg itself 137 * depends on alg. 138 */ 139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 140 struct crypto_alg *nalg) 141 { 142 u32 new_type = (nalg ?: alg)->cra_flags; 143 struct crypto_spawn *spawn, *n; 144 LIST_HEAD(secondary_spawns); 145 struct list_head *spawns; 146 LIST_HEAD(stack); 147 LIST_HEAD(top); 148 149 spawns = &alg->cra_users; 150 list_for_each_entry_safe(spawn, n, spawns, list) { 151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 152 continue; 153 154 list_move(&spawn->list, &top); 155 } 156 157 /* 158 * Perform a depth-first walk starting from alg through 159 * the cra_users tree. The list stack records the path 160 * from alg to the current spawn. 161 */ 162 spawns = ⊤ 163 do { 164 while (!list_empty(spawns)) { 165 struct crypto_instance *inst; 166 167 spawn = list_first_entry(spawns, struct crypto_spawn, 168 list); 169 inst = spawn->inst; 170 171 list_move(&spawn->list, &stack); 172 spawn->dead = !spawn->registered || &inst->alg != nalg; 173 174 if (!spawn->registered) 175 break; 176 177 BUG_ON(&inst->alg == alg); 178 179 if (&inst->alg == nalg) 180 break; 181 182 spawns = &inst->alg.cra_users; 183 184 /* 185 * Even if spawn->registered is true, the 186 * instance itself may still be unregistered. 187 * This is because it may have failed during 188 * registration. Therefore we still need to 189 * make the following test. 190 * 191 * We may encounter an unregistered instance here, since 192 * an instance's spawns are set up prior to the instance 193 * being registered. An unregistered instance will have 194 * NULL ->cra_users.next, since ->cra_users isn't 195 * properly initialized until registration. But an 196 * unregistered instance cannot have any users, so treat 197 * it the same as ->cra_users being empty. 198 */ 199 if (spawns->next == NULL) 200 break; 201 } 202 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 203 &secondary_spawns))); 204 205 /* 206 * Remove all instances that are marked as dead. Also 207 * complete the resurrection of the others by moving them 208 * back to the cra_users list. 209 */ 210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 211 if (!spawn->dead) 212 list_move(&spawn->list, &spawn->alg->cra_users); 213 else if (spawn->registered) 214 crypto_remove_instance(spawn->inst, list); 215 } 216 } 217 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 218 219 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg) 220 { 221 struct crypto_larval *larval; 222 223 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER)) 224 return NULL; 225 226 larval = crypto_larval_alloc(alg->cra_name, 227 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 228 if (IS_ERR(larval)) 229 return larval; 230 231 larval->adult = crypto_mod_get(alg); 232 if (!larval->adult) { 233 kfree(larval); 234 return ERR_PTR(-ENOENT); 235 } 236 237 refcount_set(&larval->alg.cra_refcnt, 1); 238 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 239 CRYPTO_MAX_ALG_NAME); 240 larval->alg.cra_priority = alg->cra_priority; 241 242 return larval; 243 } 244 245 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 246 { 247 struct crypto_alg *q; 248 struct crypto_larval *larval; 249 int ret = -EAGAIN; 250 251 if (crypto_is_dead(alg)) 252 goto err; 253 254 INIT_LIST_HEAD(&alg->cra_users); 255 256 /* No cheating! */ 257 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 258 259 ret = -EEXIST; 260 261 list_for_each_entry(q, &crypto_alg_list, cra_list) { 262 if (q == alg) 263 goto err; 264 265 if (crypto_is_moribund(q)) 266 continue; 267 268 if (crypto_is_larval(q)) { 269 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 270 goto err; 271 continue; 272 } 273 274 if (!strcmp(q->cra_driver_name, alg->cra_name) || 275 !strcmp(q->cra_name, alg->cra_driver_name)) 276 goto err; 277 } 278 279 larval = crypto_alloc_test_larval(alg); 280 if (IS_ERR(larval)) 281 goto out; 282 283 list_add(&alg->cra_list, &crypto_alg_list); 284 285 if (larval) 286 list_add(&larval->alg.cra_list, &crypto_alg_list); 287 else 288 alg->cra_flags |= CRYPTO_ALG_TESTED; 289 290 crypto_stats_init(alg); 291 292 out: 293 return larval; 294 295 err: 296 larval = ERR_PTR(ret); 297 goto out; 298 } 299 300 void crypto_alg_tested(const char *name, int err) 301 { 302 struct crypto_larval *test; 303 struct crypto_alg *alg; 304 struct crypto_alg *q; 305 LIST_HEAD(list); 306 bool best; 307 308 down_write(&crypto_alg_sem); 309 list_for_each_entry(q, &crypto_alg_list, cra_list) { 310 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 311 continue; 312 313 test = (struct crypto_larval *)q; 314 315 if (!strcmp(q->cra_driver_name, name)) 316 goto found; 317 } 318 319 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 320 goto unlock; 321 322 found: 323 q->cra_flags |= CRYPTO_ALG_DEAD; 324 alg = test->adult; 325 if (err || list_empty(&alg->cra_list)) 326 goto complete; 327 328 alg->cra_flags |= CRYPTO_ALG_TESTED; 329 330 /* Only satisfy larval waiters if we are the best. */ 331 best = true; 332 list_for_each_entry(q, &crypto_alg_list, cra_list) { 333 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 334 continue; 335 336 if (strcmp(alg->cra_name, q->cra_name)) 337 continue; 338 339 if (q->cra_priority > alg->cra_priority) { 340 best = false; 341 break; 342 } 343 } 344 345 list_for_each_entry(q, &crypto_alg_list, cra_list) { 346 if (q == alg) 347 continue; 348 349 if (crypto_is_moribund(q)) 350 continue; 351 352 if (crypto_is_larval(q)) { 353 struct crypto_larval *larval = (void *)q; 354 355 /* 356 * Check to see if either our generic name or 357 * specific name can satisfy the name requested 358 * by the larval entry q. 359 */ 360 if (strcmp(alg->cra_name, q->cra_name) && 361 strcmp(alg->cra_driver_name, q->cra_name)) 362 continue; 363 364 if (larval->adult) 365 continue; 366 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 367 continue; 368 369 if (best && crypto_mod_get(alg)) 370 larval->adult = alg; 371 else 372 larval->adult = ERR_PTR(-EAGAIN); 373 374 continue; 375 } 376 377 if (strcmp(alg->cra_name, q->cra_name)) 378 continue; 379 380 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 381 q->cra_priority > alg->cra_priority) 382 continue; 383 384 crypto_remove_spawns(q, &list, alg); 385 } 386 387 complete: 388 complete_all(&test->completion); 389 390 unlock: 391 up_write(&crypto_alg_sem); 392 393 crypto_remove_final(&list); 394 } 395 EXPORT_SYMBOL_GPL(crypto_alg_tested); 396 397 void crypto_remove_final(struct list_head *list) 398 { 399 struct crypto_alg *alg; 400 struct crypto_alg *n; 401 402 list_for_each_entry_safe(alg, n, list, cra_list) { 403 list_del_init(&alg->cra_list); 404 crypto_alg_put(alg); 405 } 406 } 407 EXPORT_SYMBOL_GPL(crypto_remove_final); 408 409 int crypto_register_alg(struct crypto_alg *alg) 410 { 411 struct crypto_larval *larval; 412 bool test_started; 413 int err; 414 415 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 416 err = crypto_check_alg(alg); 417 if (err) 418 return err; 419 420 down_write(&crypto_alg_sem); 421 larval = __crypto_register_alg(alg); 422 test_started = static_key_enabled(&crypto_boot_test_finished); 423 if (!IS_ERR_OR_NULL(larval)) 424 larval->test_started = test_started; 425 up_write(&crypto_alg_sem); 426 427 if (IS_ERR_OR_NULL(larval)) 428 return PTR_ERR(larval); 429 430 if (test_started) 431 crypto_wait_for_test(larval); 432 return 0; 433 } 434 EXPORT_SYMBOL_GPL(crypto_register_alg); 435 436 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 437 { 438 if (unlikely(list_empty(&alg->cra_list))) 439 return -ENOENT; 440 441 alg->cra_flags |= CRYPTO_ALG_DEAD; 442 443 list_del_init(&alg->cra_list); 444 crypto_remove_spawns(alg, list, NULL); 445 446 return 0; 447 } 448 449 void crypto_unregister_alg(struct crypto_alg *alg) 450 { 451 int ret; 452 LIST_HEAD(list); 453 454 down_write(&crypto_alg_sem); 455 ret = crypto_remove_alg(alg, &list); 456 up_write(&crypto_alg_sem); 457 458 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name)) 459 return; 460 461 BUG_ON(refcount_read(&alg->cra_refcnt) != 1); 462 if (alg->cra_destroy) 463 alg->cra_destroy(alg); 464 465 crypto_remove_final(&list); 466 } 467 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 468 469 int crypto_register_algs(struct crypto_alg *algs, int count) 470 { 471 int i, ret; 472 473 for (i = 0; i < count; i++) { 474 ret = crypto_register_alg(&algs[i]); 475 if (ret) 476 goto err; 477 } 478 479 return 0; 480 481 err: 482 for (--i; i >= 0; --i) 483 crypto_unregister_alg(&algs[i]); 484 485 return ret; 486 } 487 EXPORT_SYMBOL_GPL(crypto_register_algs); 488 489 void crypto_unregister_algs(struct crypto_alg *algs, int count) 490 { 491 int i; 492 493 for (i = 0; i < count; i++) 494 crypto_unregister_alg(&algs[i]); 495 } 496 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 497 498 int crypto_register_template(struct crypto_template *tmpl) 499 { 500 struct crypto_template *q; 501 int err = -EEXIST; 502 503 down_write(&crypto_alg_sem); 504 505 crypto_check_module_sig(tmpl->module); 506 507 list_for_each_entry(q, &crypto_template_list, list) { 508 if (q == tmpl) 509 goto out; 510 } 511 512 list_add(&tmpl->list, &crypto_template_list); 513 err = 0; 514 out: 515 up_write(&crypto_alg_sem); 516 return err; 517 } 518 EXPORT_SYMBOL_GPL(crypto_register_template); 519 520 int crypto_register_templates(struct crypto_template *tmpls, int count) 521 { 522 int i, err; 523 524 for (i = 0; i < count; i++) { 525 err = crypto_register_template(&tmpls[i]); 526 if (err) 527 goto out; 528 } 529 return 0; 530 531 out: 532 for (--i; i >= 0; --i) 533 crypto_unregister_template(&tmpls[i]); 534 return err; 535 } 536 EXPORT_SYMBOL_GPL(crypto_register_templates); 537 538 void crypto_unregister_template(struct crypto_template *tmpl) 539 { 540 struct crypto_instance *inst; 541 struct hlist_node *n; 542 struct hlist_head *list; 543 LIST_HEAD(users); 544 545 down_write(&crypto_alg_sem); 546 547 BUG_ON(list_empty(&tmpl->list)); 548 list_del_init(&tmpl->list); 549 550 list = &tmpl->instances; 551 hlist_for_each_entry(inst, list, list) { 552 int err = crypto_remove_alg(&inst->alg, &users); 553 554 BUG_ON(err); 555 } 556 557 up_write(&crypto_alg_sem); 558 559 hlist_for_each_entry_safe(inst, n, list, list) { 560 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 561 crypto_free_instance(inst); 562 } 563 crypto_remove_final(&users); 564 } 565 EXPORT_SYMBOL_GPL(crypto_unregister_template); 566 567 void crypto_unregister_templates(struct crypto_template *tmpls, int count) 568 { 569 int i; 570 571 for (i = count - 1; i >= 0; --i) 572 crypto_unregister_template(&tmpls[i]); 573 } 574 EXPORT_SYMBOL_GPL(crypto_unregister_templates); 575 576 static struct crypto_template *__crypto_lookup_template(const char *name) 577 { 578 struct crypto_template *q, *tmpl = NULL; 579 580 down_read(&crypto_alg_sem); 581 list_for_each_entry(q, &crypto_template_list, list) { 582 if (strcmp(q->name, name)) 583 continue; 584 if (unlikely(!crypto_tmpl_get(q))) 585 continue; 586 587 tmpl = q; 588 break; 589 } 590 up_read(&crypto_alg_sem); 591 592 return tmpl; 593 } 594 595 struct crypto_template *crypto_lookup_template(const char *name) 596 { 597 return try_then_request_module(__crypto_lookup_template(name), 598 "crypto-%s", name); 599 } 600 EXPORT_SYMBOL_GPL(crypto_lookup_template); 601 602 int crypto_register_instance(struct crypto_template *tmpl, 603 struct crypto_instance *inst) 604 { 605 struct crypto_larval *larval; 606 struct crypto_spawn *spawn; 607 int err; 608 609 err = crypto_check_alg(&inst->alg); 610 if (err) 611 return err; 612 613 inst->alg.cra_module = tmpl->module; 614 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 615 616 down_write(&crypto_alg_sem); 617 618 larval = ERR_PTR(-EAGAIN); 619 for (spawn = inst->spawns; spawn;) { 620 struct crypto_spawn *next; 621 622 if (spawn->dead) 623 goto unlock; 624 625 next = spawn->next; 626 spawn->inst = inst; 627 spawn->registered = true; 628 629 crypto_mod_put(spawn->alg); 630 631 spawn = next; 632 } 633 634 larval = __crypto_register_alg(&inst->alg); 635 if (IS_ERR(larval)) 636 goto unlock; 637 else if (larval) 638 larval->test_started = true; 639 640 hlist_add_head(&inst->list, &tmpl->instances); 641 inst->tmpl = tmpl; 642 643 unlock: 644 up_write(&crypto_alg_sem); 645 646 err = PTR_ERR(larval); 647 if (IS_ERR_OR_NULL(larval)) 648 goto err; 649 650 crypto_wait_for_test(larval); 651 err = 0; 652 653 err: 654 return err; 655 } 656 EXPORT_SYMBOL_GPL(crypto_register_instance); 657 658 void crypto_unregister_instance(struct crypto_instance *inst) 659 { 660 LIST_HEAD(list); 661 662 down_write(&crypto_alg_sem); 663 664 crypto_remove_spawns(&inst->alg, &list, NULL); 665 crypto_remove_instance(inst, &list); 666 667 up_write(&crypto_alg_sem); 668 669 crypto_remove_final(&list); 670 } 671 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 672 673 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst, 674 const char *name, u32 type, u32 mask) 675 { 676 struct crypto_alg *alg; 677 int err = -EAGAIN; 678 679 if (WARN_ON_ONCE(inst == NULL)) 680 return -EINVAL; 681 682 /* Allow the result of crypto_attr_alg_name() to be passed directly */ 683 if (IS_ERR(name)) 684 return PTR_ERR(name); 685 686 alg = crypto_find_alg(name, spawn->frontend, type, mask); 687 if (IS_ERR(alg)) 688 return PTR_ERR(alg); 689 690 down_write(&crypto_alg_sem); 691 if (!crypto_is_moribund(alg)) { 692 list_add(&spawn->list, &alg->cra_users); 693 spawn->alg = alg; 694 spawn->mask = mask; 695 spawn->next = inst->spawns; 696 inst->spawns = spawn; 697 inst->alg.cra_flags |= 698 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS); 699 err = 0; 700 } 701 up_write(&crypto_alg_sem); 702 if (err) 703 crypto_mod_put(alg); 704 return err; 705 } 706 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 707 708 void crypto_drop_spawn(struct crypto_spawn *spawn) 709 { 710 if (!spawn->alg) /* not yet initialized? */ 711 return; 712 713 down_write(&crypto_alg_sem); 714 if (!spawn->dead) 715 list_del(&spawn->list); 716 up_write(&crypto_alg_sem); 717 718 if (!spawn->registered) 719 crypto_mod_put(spawn->alg); 720 } 721 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 722 723 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 724 { 725 struct crypto_alg *alg = ERR_PTR(-EAGAIN); 726 struct crypto_alg *target; 727 bool shoot = false; 728 729 down_read(&crypto_alg_sem); 730 if (!spawn->dead) { 731 alg = spawn->alg; 732 if (!crypto_mod_get(alg)) { 733 target = crypto_alg_get(alg); 734 shoot = true; 735 alg = ERR_PTR(-EAGAIN); 736 } 737 } 738 up_read(&crypto_alg_sem); 739 740 if (shoot) { 741 crypto_shoot_alg(target); 742 crypto_alg_put(target); 743 } 744 745 return alg; 746 } 747 748 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 749 u32 mask) 750 { 751 struct crypto_alg *alg; 752 struct crypto_tfm *tfm; 753 754 alg = crypto_spawn_alg(spawn); 755 if (IS_ERR(alg)) 756 return ERR_CAST(alg); 757 758 tfm = ERR_PTR(-EINVAL); 759 if (unlikely((alg->cra_flags ^ type) & mask)) 760 goto out_put_alg; 761 762 tfm = __crypto_alloc_tfm(alg, type, mask); 763 if (IS_ERR(tfm)) 764 goto out_put_alg; 765 766 return tfm; 767 768 out_put_alg: 769 crypto_mod_put(alg); 770 return tfm; 771 } 772 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 773 774 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 775 { 776 struct crypto_alg *alg; 777 struct crypto_tfm *tfm; 778 779 alg = crypto_spawn_alg(spawn); 780 if (IS_ERR(alg)) 781 return ERR_CAST(alg); 782 783 tfm = crypto_create_tfm(alg, spawn->frontend); 784 if (IS_ERR(tfm)) 785 goto out_put_alg; 786 787 return tfm; 788 789 out_put_alg: 790 crypto_mod_put(alg); 791 return tfm; 792 } 793 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 794 795 int crypto_register_notifier(struct notifier_block *nb) 796 { 797 return blocking_notifier_chain_register(&crypto_chain, nb); 798 } 799 EXPORT_SYMBOL_GPL(crypto_register_notifier); 800 801 int crypto_unregister_notifier(struct notifier_block *nb) 802 { 803 return blocking_notifier_chain_unregister(&crypto_chain, nb); 804 } 805 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 806 807 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 808 { 809 struct rtattr *rta = tb[0]; 810 struct crypto_attr_type *algt; 811 812 if (!rta) 813 return ERR_PTR(-ENOENT); 814 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 815 return ERR_PTR(-EINVAL); 816 if (rta->rta_type != CRYPTOA_TYPE) 817 return ERR_PTR(-EINVAL); 818 819 algt = RTA_DATA(rta); 820 821 return algt; 822 } 823 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 824 825 /** 826 * crypto_check_attr_type() - check algorithm type and compute inherited mask 827 * @tb: the template parameters 828 * @type: the algorithm type the template would be instantiated as 829 * @mask_ret: (output) the mask that should be passed to crypto_grab_*() 830 * to restrict the flags of any inner algorithms 831 * 832 * Validate that the algorithm type the user requested is compatible with the 833 * one the template would actually be instantiated as. E.g., if the user is 834 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because 835 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm. 836 * 837 * Also compute the mask to use to restrict the flags of any inner algorithms. 838 * 839 * Return: 0 on success; -errno on failure 840 */ 841 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret) 842 { 843 struct crypto_attr_type *algt; 844 845 algt = crypto_get_attr_type(tb); 846 if (IS_ERR(algt)) 847 return PTR_ERR(algt); 848 849 if ((algt->type ^ type) & algt->mask) 850 return -EINVAL; 851 852 *mask_ret = crypto_algt_inherited_mask(algt); 853 return 0; 854 } 855 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 856 857 const char *crypto_attr_alg_name(struct rtattr *rta) 858 { 859 struct crypto_attr_alg *alga; 860 861 if (!rta) 862 return ERR_PTR(-ENOENT); 863 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 864 return ERR_PTR(-EINVAL); 865 if (rta->rta_type != CRYPTOA_ALG) 866 return ERR_PTR(-EINVAL); 867 868 alga = RTA_DATA(rta); 869 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 870 871 return alga->name; 872 } 873 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 874 875 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 876 struct crypto_alg *alg) 877 { 878 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 879 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 880 return -ENAMETOOLONG; 881 882 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 883 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 884 return -ENAMETOOLONG; 885 886 return 0; 887 } 888 EXPORT_SYMBOL_GPL(crypto_inst_setname); 889 890 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 891 { 892 INIT_LIST_HEAD(&queue->list); 893 queue->backlog = &queue->list; 894 queue->qlen = 0; 895 queue->max_qlen = max_qlen; 896 } 897 EXPORT_SYMBOL_GPL(crypto_init_queue); 898 899 int crypto_enqueue_request(struct crypto_queue *queue, 900 struct crypto_async_request *request) 901 { 902 int err = -EINPROGRESS; 903 904 if (unlikely(queue->qlen >= queue->max_qlen)) { 905 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 906 err = -ENOSPC; 907 goto out; 908 } 909 err = -EBUSY; 910 if (queue->backlog == &queue->list) 911 queue->backlog = &request->list; 912 } 913 914 queue->qlen++; 915 list_add_tail(&request->list, &queue->list); 916 917 out: 918 return err; 919 } 920 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 921 922 void crypto_enqueue_request_head(struct crypto_queue *queue, 923 struct crypto_async_request *request) 924 { 925 queue->qlen++; 926 list_add(&request->list, &queue->list); 927 } 928 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head); 929 930 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 931 { 932 struct list_head *request; 933 934 if (unlikely(!queue->qlen)) 935 return NULL; 936 937 queue->qlen--; 938 939 if (queue->backlog != &queue->list) 940 queue->backlog = queue->backlog->next; 941 942 request = queue->list.next; 943 list_del(request); 944 945 return list_entry(request, struct crypto_async_request, list); 946 } 947 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 948 949 static inline void crypto_inc_byte(u8 *a, unsigned int size) 950 { 951 u8 *b = (a + size); 952 u8 c; 953 954 for (; size; size--) { 955 c = *--b + 1; 956 *b = c; 957 if (c) 958 break; 959 } 960 } 961 962 void crypto_inc(u8 *a, unsigned int size) 963 { 964 __be32 *b = (__be32 *)(a + size); 965 u32 c; 966 967 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 968 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 969 for (; size >= 4; size -= 4) { 970 c = be32_to_cpu(*--b) + 1; 971 *b = cpu_to_be32(c); 972 if (likely(c)) 973 return; 974 } 975 976 crypto_inc_byte(a, size); 977 } 978 EXPORT_SYMBOL_GPL(crypto_inc); 979 980 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) 981 { 982 int relalign = 0; 983 984 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { 985 int size = sizeof(unsigned long); 986 int d = (((unsigned long)dst ^ (unsigned long)src1) | 987 ((unsigned long)dst ^ (unsigned long)src2)) & 988 (size - 1); 989 990 relalign = d ? 1 << __ffs(d) : size; 991 992 /* 993 * If we care about alignment, process as many bytes as 994 * needed to advance dst and src to values whose alignments 995 * equal their relative alignment. This will allow us to 996 * process the remainder of the input using optimal strides. 997 */ 998 while (((unsigned long)dst & (relalign - 1)) && len > 0) { 999 *dst++ = *src1++ ^ *src2++; 1000 len--; 1001 } 1002 } 1003 1004 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { 1005 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; 1006 dst += 8; 1007 src1 += 8; 1008 src2 += 8; 1009 len -= 8; 1010 } 1011 1012 while (len >= 4 && !(relalign & 3)) { 1013 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; 1014 dst += 4; 1015 src1 += 4; 1016 src2 += 4; 1017 len -= 4; 1018 } 1019 1020 while (len >= 2 && !(relalign & 1)) { 1021 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; 1022 dst += 2; 1023 src1 += 2; 1024 src2 += 2; 1025 len -= 2; 1026 } 1027 1028 while (len--) 1029 *dst++ = *src1++ ^ *src2++; 1030 } 1031 EXPORT_SYMBOL_GPL(__crypto_xor); 1032 1033 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1034 { 1035 return alg->cra_ctxsize + 1036 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1037 } 1038 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1039 1040 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1041 u32 type, u32 mask) 1042 { 1043 int ret = 0; 1044 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1045 1046 if (!IS_ERR(alg)) { 1047 crypto_mod_put(alg); 1048 ret = 1; 1049 } 1050 1051 return ret; 1052 } 1053 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1054 1055 #ifdef CONFIG_CRYPTO_STATS 1056 void crypto_stats_init(struct crypto_alg *alg) 1057 { 1058 memset(&alg->stats, 0, sizeof(alg->stats)); 1059 } 1060 EXPORT_SYMBOL_GPL(crypto_stats_init); 1061 1062 void crypto_stats_get(struct crypto_alg *alg) 1063 { 1064 crypto_alg_get(alg); 1065 } 1066 EXPORT_SYMBOL_GPL(crypto_stats_get); 1067 1068 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, 1069 int ret) 1070 { 1071 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1072 atomic64_inc(&alg->stats.aead.err_cnt); 1073 } else { 1074 atomic64_inc(&alg->stats.aead.encrypt_cnt); 1075 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen); 1076 } 1077 crypto_alg_put(alg); 1078 } 1079 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt); 1080 1081 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, 1082 int ret) 1083 { 1084 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1085 atomic64_inc(&alg->stats.aead.err_cnt); 1086 } else { 1087 atomic64_inc(&alg->stats.aead.decrypt_cnt); 1088 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen); 1089 } 1090 crypto_alg_put(alg); 1091 } 1092 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt); 1093 1094 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, 1095 struct crypto_alg *alg) 1096 { 1097 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1098 atomic64_inc(&alg->stats.akcipher.err_cnt); 1099 } else { 1100 atomic64_inc(&alg->stats.akcipher.encrypt_cnt); 1101 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen); 1102 } 1103 crypto_alg_put(alg); 1104 } 1105 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt); 1106 1107 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, 1108 struct crypto_alg *alg) 1109 { 1110 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1111 atomic64_inc(&alg->stats.akcipher.err_cnt); 1112 } else { 1113 atomic64_inc(&alg->stats.akcipher.decrypt_cnt); 1114 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen); 1115 } 1116 crypto_alg_put(alg); 1117 } 1118 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt); 1119 1120 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) 1121 { 1122 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1123 atomic64_inc(&alg->stats.akcipher.err_cnt); 1124 else 1125 atomic64_inc(&alg->stats.akcipher.sign_cnt); 1126 crypto_alg_put(alg); 1127 } 1128 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign); 1129 1130 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) 1131 { 1132 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1133 atomic64_inc(&alg->stats.akcipher.err_cnt); 1134 else 1135 atomic64_inc(&alg->stats.akcipher.verify_cnt); 1136 crypto_alg_put(alg); 1137 } 1138 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify); 1139 1140 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) 1141 { 1142 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1143 atomic64_inc(&alg->stats.compress.err_cnt); 1144 } else { 1145 atomic64_inc(&alg->stats.compress.compress_cnt); 1146 atomic64_add(slen, &alg->stats.compress.compress_tlen); 1147 } 1148 crypto_alg_put(alg); 1149 } 1150 EXPORT_SYMBOL_GPL(crypto_stats_compress); 1151 1152 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) 1153 { 1154 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1155 atomic64_inc(&alg->stats.compress.err_cnt); 1156 } else { 1157 atomic64_inc(&alg->stats.compress.decompress_cnt); 1158 atomic64_add(slen, &alg->stats.compress.decompress_tlen); 1159 } 1160 crypto_alg_put(alg); 1161 } 1162 EXPORT_SYMBOL_GPL(crypto_stats_decompress); 1163 1164 void crypto_stats_ahash_update(unsigned int nbytes, int ret, 1165 struct crypto_alg *alg) 1166 { 1167 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1168 atomic64_inc(&alg->stats.hash.err_cnt); 1169 else 1170 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1171 crypto_alg_put(alg); 1172 } 1173 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update); 1174 1175 void crypto_stats_ahash_final(unsigned int nbytes, int ret, 1176 struct crypto_alg *alg) 1177 { 1178 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1179 atomic64_inc(&alg->stats.hash.err_cnt); 1180 } else { 1181 atomic64_inc(&alg->stats.hash.hash_cnt); 1182 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1183 } 1184 crypto_alg_put(alg); 1185 } 1186 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final); 1187 1188 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) 1189 { 1190 if (ret) 1191 atomic64_inc(&alg->stats.kpp.err_cnt); 1192 else 1193 atomic64_inc(&alg->stats.kpp.setsecret_cnt); 1194 crypto_alg_put(alg); 1195 } 1196 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret); 1197 1198 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) 1199 { 1200 if (ret) 1201 atomic64_inc(&alg->stats.kpp.err_cnt); 1202 else 1203 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt); 1204 crypto_alg_put(alg); 1205 } 1206 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key); 1207 1208 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) 1209 { 1210 if (ret) 1211 atomic64_inc(&alg->stats.kpp.err_cnt); 1212 else 1213 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt); 1214 crypto_alg_put(alg); 1215 } 1216 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret); 1217 1218 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) 1219 { 1220 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1221 atomic64_inc(&alg->stats.rng.err_cnt); 1222 else 1223 atomic64_inc(&alg->stats.rng.seed_cnt); 1224 crypto_alg_put(alg); 1225 } 1226 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed); 1227 1228 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, 1229 int ret) 1230 { 1231 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1232 atomic64_inc(&alg->stats.rng.err_cnt); 1233 } else { 1234 atomic64_inc(&alg->stats.rng.generate_cnt); 1235 atomic64_add(dlen, &alg->stats.rng.generate_tlen); 1236 } 1237 crypto_alg_put(alg); 1238 } 1239 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); 1240 1241 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, 1242 struct crypto_alg *alg) 1243 { 1244 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1245 atomic64_inc(&alg->stats.cipher.err_cnt); 1246 } else { 1247 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1248 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); 1249 } 1250 crypto_alg_put(alg); 1251 } 1252 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt); 1253 1254 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, 1255 struct crypto_alg *alg) 1256 { 1257 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1258 atomic64_inc(&alg->stats.cipher.err_cnt); 1259 } else { 1260 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1261 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); 1262 } 1263 crypto_alg_put(alg); 1264 } 1265 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt); 1266 #endif 1267 1268 static void __init crypto_start_tests(void) 1269 { 1270 for (;;) { 1271 struct crypto_larval *larval = NULL; 1272 struct crypto_alg *q; 1273 1274 down_write(&crypto_alg_sem); 1275 1276 list_for_each_entry(q, &crypto_alg_list, cra_list) { 1277 struct crypto_larval *l; 1278 1279 if (!crypto_is_larval(q)) 1280 continue; 1281 1282 l = (void *)q; 1283 1284 if (!crypto_is_test_larval(l)) 1285 continue; 1286 1287 if (l->test_started) 1288 continue; 1289 1290 l->test_started = true; 1291 larval = l; 1292 break; 1293 } 1294 1295 up_write(&crypto_alg_sem); 1296 1297 if (!larval) 1298 break; 1299 1300 crypto_wait_for_test(larval); 1301 } 1302 1303 static_branch_enable(&crypto_boot_test_finished); 1304 } 1305 1306 static int __init crypto_algapi_init(void) 1307 { 1308 crypto_init_proc(); 1309 crypto_start_tests(); 1310 return 0; 1311 } 1312 1313 static void __exit crypto_algapi_exit(void) 1314 { 1315 crypto_exit_proc(); 1316 } 1317 1318 /* 1319 * We run this at late_initcall so that all the built-in algorithms 1320 * have had a chance to register themselves first. 1321 */ 1322 late_initcall(crypto_algapi_init); 1323 module_exit(crypto_algapi_exit); 1324 1325 MODULE_LICENSE("GPL"); 1326 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1327 MODULE_SOFTDEP("pre: cryptomgr"); 1328