1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Cryptographic API for algorithms (i.e., low-level API). 4 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #include <crypto/algapi.h> 9 #include <linux/err.h> 10 #include <linux/errno.h> 11 #include <linux/fips.h> 12 #include <linux/init.h> 13 #include <linux/kernel.h> 14 #include <linux/list.h> 15 #include <linux/module.h> 16 #include <linux/rtnetlink.h> 17 #include <linux/slab.h> 18 #include <linux/string.h> 19 20 #include "internal.h" 21 22 static LIST_HEAD(crypto_template_list); 23 24 static inline int crypto_set_driver_name(struct crypto_alg *alg) 25 { 26 static const char suffix[] = "-generic"; 27 char *driver_name = alg->cra_driver_name; 28 int len; 29 30 if (*driver_name) 31 return 0; 32 33 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 34 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 35 return -ENAMETOOLONG; 36 37 memcpy(driver_name + len, suffix, sizeof(suffix)); 38 return 0; 39 } 40 41 static inline void crypto_check_module_sig(struct module *mod) 42 { 43 if (fips_enabled && mod && !module_sig_ok(mod)) 44 panic("Module %s signature verification failed in FIPS mode\n", 45 module_name(mod)); 46 } 47 48 static int crypto_check_alg(struct crypto_alg *alg) 49 { 50 crypto_check_module_sig(alg->cra_module); 51 52 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 53 return -EINVAL; 54 55 /* General maximums for all algs. */ 56 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 57 return -EINVAL; 58 59 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 60 return -EINVAL; 61 62 /* Lower maximums for specific alg types. */ 63 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 64 CRYPTO_ALG_TYPE_CIPHER) { 65 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 66 return -EINVAL; 67 68 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 69 return -EINVAL; 70 } 71 72 if (alg->cra_priority < 0) 73 return -EINVAL; 74 75 refcount_set(&alg->cra_refcnt, 1); 76 77 return crypto_set_driver_name(alg); 78 } 79 80 static void crypto_free_instance(struct crypto_instance *inst) 81 { 82 if (!inst->alg.cra_type->free) { 83 inst->tmpl->free(inst); 84 return; 85 } 86 87 inst->alg.cra_type->free(inst); 88 } 89 90 static void crypto_destroy_instance(struct crypto_alg *alg) 91 { 92 struct crypto_instance *inst = (void *)alg; 93 struct crypto_template *tmpl = inst->tmpl; 94 95 crypto_free_instance(inst); 96 crypto_tmpl_put(tmpl); 97 } 98 99 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 100 struct list_head *stack, 101 struct list_head *top, 102 struct list_head *secondary_spawns) 103 { 104 struct crypto_spawn *spawn, *n; 105 106 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 107 if (!spawn) 108 return NULL; 109 110 n = list_next_entry(spawn, list); 111 112 if (spawn->alg && &n->list != stack && !n->alg) 113 n->alg = (n->list.next == stack) ? alg : 114 &list_next_entry(n, list)->inst->alg; 115 116 list_move(&spawn->list, secondary_spawns); 117 118 return &n->list == stack ? top : &n->inst->alg.cra_users; 119 } 120 121 static void crypto_remove_instance(struct crypto_instance *inst, 122 struct list_head *list) 123 { 124 struct crypto_template *tmpl = inst->tmpl; 125 126 if (crypto_is_dead(&inst->alg)) 127 return; 128 129 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 130 if (hlist_unhashed(&inst->list)) 131 return; 132 133 if (!tmpl || !crypto_tmpl_get(tmpl)) 134 return; 135 136 list_move(&inst->alg.cra_list, list); 137 hlist_del(&inst->list); 138 inst->alg.cra_destroy = crypto_destroy_instance; 139 140 BUG_ON(!list_empty(&inst->alg.cra_users)); 141 } 142 143 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 144 struct crypto_alg *nalg) 145 { 146 u32 new_type = (nalg ?: alg)->cra_flags; 147 struct crypto_spawn *spawn, *n; 148 LIST_HEAD(secondary_spawns); 149 struct list_head *spawns; 150 LIST_HEAD(stack); 151 LIST_HEAD(top); 152 153 spawns = &alg->cra_users; 154 list_for_each_entry_safe(spawn, n, spawns, list) { 155 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 156 continue; 157 158 list_move(&spawn->list, &top); 159 } 160 161 spawns = ⊤ 162 do { 163 while (!list_empty(spawns)) { 164 struct crypto_instance *inst; 165 166 spawn = list_first_entry(spawns, struct crypto_spawn, 167 list); 168 inst = spawn->inst; 169 170 BUG_ON(&inst->alg == alg); 171 172 list_move(&spawn->list, &stack); 173 174 if (&inst->alg == nalg) 175 break; 176 177 spawn->alg = NULL; 178 spawns = &inst->alg.cra_users; 179 180 /* 181 * We may encounter an unregistered instance here, since 182 * an instance's spawns are set up prior to the instance 183 * being registered. An unregistered instance will have 184 * NULL ->cra_users.next, since ->cra_users isn't 185 * properly initialized until registration. But an 186 * unregistered instance cannot have any users, so treat 187 * it the same as ->cra_users being empty. 188 */ 189 if (spawns->next == NULL) 190 break; 191 } 192 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 193 &secondary_spawns))); 194 195 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 196 if (spawn->alg) 197 list_move(&spawn->list, &spawn->alg->cra_users); 198 else 199 crypto_remove_instance(spawn->inst, list); 200 } 201 } 202 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 203 204 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 205 { 206 struct crypto_alg *q; 207 struct crypto_larval *larval; 208 int ret = -EAGAIN; 209 210 if (crypto_is_dead(alg)) 211 goto err; 212 213 INIT_LIST_HEAD(&alg->cra_users); 214 215 /* No cheating! */ 216 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 217 218 ret = -EEXIST; 219 220 list_for_each_entry(q, &crypto_alg_list, cra_list) { 221 if (q == alg) 222 goto err; 223 224 if (crypto_is_moribund(q)) 225 continue; 226 227 if (crypto_is_larval(q)) { 228 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 229 goto err; 230 continue; 231 } 232 233 if (!strcmp(q->cra_driver_name, alg->cra_name) || 234 !strcmp(q->cra_name, alg->cra_driver_name)) 235 goto err; 236 } 237 238 larval = crypto_larval_alloc(alg->cra_name, 239 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 240 if (IS_ERR(larval)) 241 goto out; 242 243 ret = -ENOENT; 244 larval->adult = crypto_mod_get(alg); 245 if (!larval->adult) 246 goto free_larval; 247 248 refcount_set(&larval->alg.cra_refcnt, 1); 249 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 250 CRYPTO_MAX_ALG_NAME); 251 larval->alg.cra_priority = alg->cra_priority; 252 253 list_add(&alg->cra_list, &crypto_alg_list); 254 list_add(&larval->alg.cra_list, &crypto_alg_list); 255 256 crypto_stats_init(alg); 257 258 out: 259 return larval; 260 261 free_larval: 262 kfree(larval); 263 err: 264 larval = ERR_PTR(ret); 265 goto out; 266 } 267 268 void crypto_alg_tested(const char *name, int err) 269 { 270 struct crypto_larval *test; 271 struct crypto_alg *alg; 272 struct crypto_alg *q; 273 LIST_HEAD(list); 274 275 down_write(&crypto_alg_sem); 276 list_for_each_entry(q, &crypto_alg_list, cra_list) { 277 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 278 continue; 279 280 test = (struct crypto_larval *)q; 281 282 if (!strcmp(q->cra_driver_name, name)) 283 goto found; 284 } 285 286 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 287 goto unlock; 288 289 found: 290 q->cra_flags |= CRYPTO_ALG_DEAD; 291 alg = test->adult; 292 if (err || list_empty(&alg->cra_list)) 293 goto complete; 294 295 alg->cra_flags |= CRYPTO_ALG_TESTED; 296 297 list_for_each_entry(q, &crypto_alg_list, cra_list) { 298 if (q == alg) 299 continue; 300 301 if (crypto_is_moribund(q)) 302 continue; 303 304 if (crypto_is_larval(q)) { 305 struct crypto_larval *larval = (void *)q; 306 307 /* 308 * Check to see if either our generic name or 309 * specific name can satisfy the name requested 310 * by the larval entry q. 311 */ 312 if (strcmp(alg->cra_name, q->cra_name) && 313 strcmp(alg->cra_driver_name, q->cra_name)) 314 continue; 315 316 if (larval->adult) 317 continue; 318 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 319 continue; 320 if (!crypto_mod_get(alg)) 321 continue; 322 323 larval->adult = alg; 324 continue; 325 } 326 327 if (strcmp(alg->cra_name, q->cra_name)) 328 continue; 329 330 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 331 q->cra_priority > alg->cra_priority) 332 continue; 333 334 crypto_remove_spawns(q, &list, alg); 335 } 336 337 complete: 338 complete_all(&test->completion); 339 340 unlock: 341 up_write(&crypto_alg_sem); 342 343 crypto_remove_final(&list); 344 } 345 EXPORT_SYMBOL_GPL(crypto_alg_tested); 346 347 void crypto_remove_final(struct list_head *list) 348 { 349 struct crypto_alg *alg; 350 struct crypto_alg *n; 351 352 list_for_each_entry_safe(alg, n, list, cra_list) { 353 list_del_init(&alg->cra_list); 354 crypto_alg_put(alg); 355 } 356 } 357 EXPORT_SYMBOL_GPL(crypto_remove_final); 358 359 static void crypto_wait_for_test(struct crypto_larval *larval) 360 { 361 int err; 362 363 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 364 if (err != NOTIFY_STOP) { 365 if (WARN_ON(err != NOTIFY_DONE)) 366 goto out; 367 crypto_alg_tested(larval->alg.cra_driver_name, 0); 368 } 369 370 err = wait_for_completion_killable(&larval->completion); 371 WARN_ON(err); 372 if (!err) 373 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval); 374 375 out: 376 crypto_larval_kill(&larval->alg); 377 } 378 379 int crypto_register_alg(struct crypto_alg *alg) 380 { 381 struct crypto_larval *larval; 382 int err; 383 384 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 385 err = crypto_check_alg(alg); 386 if (err) 387 return err; 388 389 down_write(&crypto_alg_sem); 390 larval = __crypto_register_alg(alg); 391 up_write(&crypto_alg_sem); 392 393 if (IS_ERR(larval)) 394 return PTR_ERR(larval); 395 396 crypto_wait_for_test(larval); 397 return 0; 398 } 399 EXPORT_SYMBOL_GPL(crypto_register_alg); 400 401 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 402 { 403 if (unlikely(list_empty(&alg->cra_list))) 404 return -ENOENT; 405 406 alg->cra_flags |= CRYPTO_ALG_DEAD; 407 408 list_del_init(&alg->cra_list); 409 crypto_remove_spawns(alg, list, NULL); 410 411 return 0; 412 } 413 414 int crypto_unregister_alg(struct crypto_alg *alg) 415 { 416 int ret; 417 LIST_HEAD(list); 418 419 down_write(&crypto_alg_sem); 420 ret = crypto_remove_alg(alg, &list); 421 up_write(&crypto_alg_sem); 422 423 if (ret) 424 return ret; 425 426 BUG_ON(refcount_read(&alg->cra_refcnt) != 1); 427 if (alg->cra_destroy) 428 alg->cra_destroy(alg); 429 430 crypto_remove_final(&list); 431 return 0; 432 } 433 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 434 435 int crypto_register_algs(struct crypto_alg *algs, int count) 436 { 437 int i, ret; 438 439 for (i = 0; i < count; i++) { 440 ret = crypto_register_alg(&algs[i]); 441 if (ret) 442 goto err; 443 } 444 445 return 0; 446 447 err: 448 for (--i; i >= 0; --i) 449 crypto_unregister_alg(&algs[i]); 450 451 return ret; 452 } 453 EXPORT_SYMBOL_GPL(crypto_register_algs); 454 455 int crypto_unregister_algs(struct crypto_alg *algs, int count) 456 { 457 int i, ret; 458 459 for (i = 0; i < count; i++) { 460 ret = crypto_unregister_alg(&algs[i]); 461 if (ret) 462 pr_err("Failed to unregister %s %s: %d\n", 463 algs[i].cra_driver_name, algs[i].cra_name, ret); 464 } 465 466 return 0; 467 } 468 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 469 470 int crypto_register_template(struct crypto_template *tmpl) 471 { 472 struct crypto_template *q; 473 int err = -EEXIST; 474 475 down_write(&crypto_alg_sem); 476 477 crypto_check_module_sig(tmpl->module); 478 479 list_for_each_entry(q, &crypto_template_list, list) { 480 if (q == tmpl) 481 goto out; 482 } 483 484 list_add(&tmpl->list, &crypto_template_list); 485 err = 0; 486 out: 487 up_write(&crypto_alg_sem); 488 return err; 489 } 490 EXPORT_SYMBOL_GPL(crypto_register_template); 491 492 int crypto_register_templates(struct crypto_template *tmpls, int count) 493 { 494 int i, err; 495 496 for (i = 0; i < count; i++) { 497 err = crypto_register_template(&tmpls[i]); 498 if (err) 499 goto out; 500 } 501 return 0; 502 503 out: 504 for (--i; i >= 0; --i) 505 crypto_unregister_template(&tmpls[i]); 506 return err; 507 } 508 EXPORT_SYMBOL_GPL(crypto_register_templates); 509 510 void crypto_unregister_template(struct crypto_template *tmpl) 511 { 512 struct crypto_instance *inst; 513 struct hlist_node *n; 514 struct hlist_head *list; 515 LIST_HEAD(users); 516 517 down_write(&crypto_alg_sem); 518 519 BUG_ON(list_empty(&tmpl->list)); 520 list_del_init(&tmpl->list); 521 522 list = &tmpl->instances; 523 hlist_for_each_entry(inst, list, list) { 524 int err = crypto_remove_alg(&inst->alg, &users); 525 526 BUG_ON(err); 527 } 528 529 up_write(&crypto_alg_sem); 530 531 hlist_for_each_entry_safe(inst, n, list, list) { 532 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 533 crypto_free_instance(inst); 534 } 535 crypto_remove_final(&users); 536 } 537 EXPORT_SYMBOL_GPL(crypto_unregister_template); 538 539 void crypto_unregister_templates(struct crypto_template *tmpls, int count) 540 { 541 int i; 542 543 for (i = count - 1; i >= 0; --i) 544 crypto_unregister_template(&tmpls[i]); 545 } 546 EXPORT_SYMBOL_GPL(crypto_unregister_templates); 547 548 static struct crypto_template *__crypto_lookup_template(const char *name) 549 { 550 struct crypto_template *q, *tmpl = NULL; 551 552 down_read(&crypto_alg_sem); 553 list_for_each_entry(q, &crypto_template_list, list) { 554 if (strcmp(q->name, name)) 555 continue; 556 if (unlikely(!crypto_tmpl_get(q))) 557 continue; 558 559 tmpl = q; 560 break; 561 } 562 up_read(&crypto_alg_sem); 563 564 return tmpl; 565 } 566 567 struct crypto_template *crypto_lookup_template(const char *name) 568 { 569 return try_then_request_module(__crypto_lookup_template(name), 570 "crypto-%s", name); 571 } 572 EXPORT_SYMBOL_GPL(crypto_lookup_template); 573 574 int crypto_register_instance(struct crypto_template *tmpl, 575 struct crypto_instance *inst) 576 { 577 struct crypto_larval *larval; 578 int err; 579 580 err = crypto_check_alg(&inst->alg); 581 if (err) 582 return err; 583 584 inst->alg.cra_module = tmpl->module; 585 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 586 587 down_write(&crypto_alg_sem); 588 589 larval = __crypto_register_alg(&inst->alg); 590 if (IS_ERR(larval)) 591 goto unlock; 592 593 hlist_add_head(&inst->list, &tmpl->instances); 594 inst->tmpl = tmpl; 595 596 unlock: 597 up_write(&crypto_alg_sem); 598 599 err = PTR_ERR(larval); 600 if (IS_ERR(larval)) 601 goto err; 602 603 crypto_wait_for_test(larval); 604 err = 0; 605 606 err: 607 return err; 608 } 609 EXPORT_SYMBOL_GPL(crypto_register_instance); 610 611 int crypto_unregister_instance(struct crypto_instance *inst) 612 { 613 LIST_HEAD(list); 614 615 down_write(&crypto_alg_sem); 616 617 crypto_remove_spawns(&inst->alg, &list, NULL); 618 crypto_remove_instance(inst, &list); 619 620 up_write(&crypto_alg_sem); 621 622 crypto_remove_final(&list); 623 624 return 0; 625 } 626 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 627 628 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 629 struct crypto_instance *inst, u32 mask) 630 { 631 int err = -EAGAIN; 632 633 if (WARN_ON_ONCE(inst == NULL)) 634 return -EINVAL; 635 636 spawn->inst = inst; 637 spawn->mask = mask; 638 639 down_write(&crypto_alg_sem); 640 if (!crypto_is_moribund(alg)) { 641 list_add(&spawn->list, &alg->cra_users); 642 spawn->alg = alg; 643 err = 0; 644 } 645 up_write(&crypto_alg_sem); 646 647 return err; 648 } 649 EXPORT_SYMBOL_GPL(crypto_init_spawn); 650 651 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, 652 struct crypto_instance *inst, 653 const struct crypto_type *frontend) 654 { 655 int err = -EINVAL; 656 657 if ((alg->cra_flags ^ frontend->type) & frontend->maskset) 658 goto out; 659 660 spawn->frontend = frontend; 661 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); 662 663 out: 664 return err; 665 } 666 EXPORT_SYMBOL_GPL(crypto_init_spawn2); 667 668 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name, 669 u32 type, u32 mask) 670 { 671 struct crypto_alg *alg; 672 int err; 673 674 alg = crypto_find_alg(name, spawn->frontend, type, mask); 675 if (IS_ERR(alg)) 676 return PTR_ERR(alg); 677 678 err = crypto_init_spawn(spawn, alg, spawn->inst, mask); 679 crypto_mod_put(alg); 680 return err; 681 } 682 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 683 684 void crypto_drop_spawn(struct crypto_spawn *spawn) 685 { 686 if (!spawn->alg) 687 return; 688 689 down_write(&crypto_alg_sem); 690 list_del(&spawn->list); 691 up_write(&crypto_alg_sem); 692 } 693 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 694 695 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 696 { 697 struct crypto_alg *alg; 698 struct crypto_alg *alg2; 699 700 down_read(&crypto_alg_sem); 701 alg = spawn->alg; 702 alg2 = alg; 703 if (alg2) 704 alg2 = crypto_mod_get(alg2); 705 up_read(&crypto_alg_sem); 706 707 if (!alg2) { 708 if (alg) 709 crypto_shoot_alg(alg); 710 return ERR_PTR(-EAGAIN); 711 } 712 713 return alg; 714 } 715 716 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 717 u32 mask) 718 { 719 struct crypto_alg *alg; 720 struct crypto_tfm *tfm; 721 722 alg = crypto_spawn_alg(spawn); 723 if (IS_ERR(alg)) 724 return ERR_CAST(alg); 725 726 tfm = ERR_PTR(-EINVAL); 727 if (unlikely((alg->cra_flags ^ type) & mask)) 728 goto out_put_alg; 729 730 tfm = __crypto_alloc_tfm(alg, type, mask); 731 if (IS_ERR(tfm)) 732 goto out_put_alg; 733 734 return tfm; 735 736 out_put_alg: 737 crypto_mod_put(alg); 738 return tfm; 739 } 740 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 741 742 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 743 { 744 struct crypto_alg *alg; 745 struct crypto_tfm *tfm; 746 747 alg = crypto_spawn_alg(spawn); 748 if (IS_ERR(alg)) 749 return ERR_CAST(alg); 750 751 tfm = crypto_create_tfm(alg, spawn->frontend); 752 if (IS_ERR(tfm)) 753 goto out_put_alg; 754 755 return tfm; 756 757 out_put_alg: 758 crypto_mod_put(alg); 759 return tfm; 760 } 761 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 762 763 int crypto_register_notifier(struct notifier_block *nb) 764 { 765 return blocking_notifier_chain_register(&crypto_chain, nb); 766 } 767 EXPORT_SYMBOL_GPL(crypto_register_notifier); 768 769 int crypto_unregister_notifier(struct notifier_block *nb) 770 { 771 return blocking_notifier_chain_unregister(&crypto_chain, nb); 772 } 773 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 774 775 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 776 { 777 struct rtattr *rta = tb[0]; 778 struct crypto_attr_type *algt; 779 780 if (!rta) 781 return ERR_PTR(-ENOENT); 782 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 783 return ERR_PTR(-EINVAL); 784 if (rta->rta_type != CRYPTOA_TYPE) 785 return ERR_PTR(-EINVAL); 786 787 algt = RTA_DATA(rta); 788 789 return algt; 790 } 791 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 792 793 int crypto_check_attr_type(struct rtattr **tb, u32 type) 794 { 795 struct crypto_attr_type *algt; 796 797 algt = crypto_get_attr_type(tb); 798 if (IS_ERR(algt)) 799 return PTR_ERR(algt); 800 801 if ((algt->type ^ type) & algt->mask) 802 return -EINVAL; 803 804 return 0; 805 } 806 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 807 808 const char *crypto_attr_alg_name(struct rtattr *rta) 809 { 810 struct crypto_attr_alg *alga; 811 812 if (!rta) 813 return ERR_PTR(-ENOENT); 814 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 815 return ERR_PTR(-EINVAL); 816 if (rta->rta_type != CRYPTOA_ALG) 817 return ERR_PTR(-EINVAL); 818 819 alga = RTA_DATA(rta); 820 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 821 822 return alga->name; 823 } 824 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 825 826 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, 827 const struct crypto_type *frontend, 828 u32 type, u32 mask) 829 { 830 const char *name; 831 832 name = crypto_attr_alg_name(rta); 833 if (IS_ERR(name)) 834 return ERR_CAST(name); 835 836 return crypto_find_alg(name, frontend, type, mask); 837 } 838 EXPORT_SYMBOL_GPL(crypto_attr_alg2); 839 840 int crypto_attr_u32(struct rtattr *rta, u32 *num) 841 { 842 struct crypto_attr_u32 *nu32; 843 844 if (!rta) 845 return -ENOENT; 846 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 847 return -EINVAL; 848 if (rta->rta_type != CRYPTOA_U32) 849 return -EINVAL; 850 851 nu32 = RTA_DATA(rta); 852 *num = nu32->num; 853 854 return 0; 855 } 856 EXPORT_SYMBOL_GPL(crypto_attr_u32); 857 858 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 859 struct crypto_alg *alg) 860 { 861 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 862 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 863 return -ENAMETOOLONG; 864 865 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 866 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 867 return -ENAMETOOLONG; 868 869 return 0; 870 } 871 EXPORT_SYMBOL_GPL(crypto_inst_setname); 872 873 void *crypto_alloc_instance(const char *name, struct crypto_alg *alg, 874 unsigned int head) 875 { 876 struct crypto_instance *inst; 877 char *p; 878 int err; 879 880 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), 881 GFP_KERNEL); 882 if (!p) 883 return ERR_PTR(-ENOMEM); 884 885 inst = (void *)(p + head); 886 887 err = crypto_inst_setname(inst, name, alg); 888 if (err) 889 goto err_free_inst; 890 891 return p; 892 893 err_free_inst: 894 kfree(p); 895 return ERR_PTR(err); 896 } 897 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 898 899 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 900 { 901 INIT_LIST_HEAD(&queue->list); 902 queue->backlog = &queue->list; 903 queue->qlen = 0; 904 queue->max_qlen = max_qlen; 905 } 906 EXPORT_SYMBOL_GPL(crypto_init_queue); 907 908 int crypto_enqueue_request(struct crypto_queue *queue, 909 struct crypto_async_request *request) 910 { 911 int err = -EINPROGRESS; 912 913 if (unlikely(queue->qlen >= queue->max_qlen)) { 914 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 915 err = -ENOSPC; 916 goto out; 917 } 918 err = -EBUSY; 919 if (queue->backlog == &queue->list) 920 queue->backlog = &request->list; 921 } 922 923 queue->qlen++; 924 list_add_tail(&request->list, &queue->list); 925 926 out: 927 return err; 928 } 929 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 930 931 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 932 { 933 struct list_head *request; 934 935 if (unlikely(!queue->qlen)) 936 return NULL; 937 938 queue->qlen--; 939 940 if (queue->backlog != &queue->list) 941 queue->backlog = queue->backlog->next; 942 943 request = queue->list.next; 944 list_del(request); 945 946 return list_entry(request, struct crypto_async_request, list); 947 } 948 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 949 950 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 951 { 952 struct crypto_async_request *req; 953 954 list_for_each_entry(req, &queue->list, list) { 955 if (req->tfm == tfm) 956 return 1; 957 } 958 959 return 0; 960 } 961 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 962 963 static inline void crypto_inc_byte(u8 *a, unsigned int size) 964 { 965 u8 *b = (a + size); 966 u8 c; 967 968 for (; size; size--) { 969 c = *--b + 1; 970 *b = c; 971 if (c) 972 break; 973 } 974 } 975 976 void crypto_inc(u8 *a, unsigned int size) 977 { 978 __be32 *b = (__be32 *)(a + size); 979 u32 c; 980 981 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 982 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 983 for (; size >= 4; size -= 4) { 984 c = be32_to_cpu(*--b) + 1; 985 *b = cpu_to_be32(c); 986 if (likely(c)) 987 return; 988 } 989 990 crypto_inc_byte(a, size); 991 } 992 EXPORT_SYMBOL_GPL(crypto_inc); 993 994 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) 995 { 996 int relalign = 0; 997 998 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { 999 int size = sizeof(unsigned long); 1000 int d = (((unsigned long)dst ^ (unsigned long)src1) | 1001 ((unsigned long)dst ^ (unsigned long)src2)) & 1002 (size - 1); 1003 1004 relalign = d ? 1 << __ffs(d) : size; 1005 1006 /* 1007 * If we care about alignment, process as many bytes as 1008 * needed to advance dst and src to values whose alignments 1009 * equal their relative alignment. This will allow us to 1010 * process the remainder of the input using optimal strides. 1011 */ 1012 while (((unsigned long)dst & (relalign - 1)) && len > 0) { 1013 *dst++ = *src1++ ^ *src2++; 1014 len--; 1015 } 1016 } 1017 1018 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { 1019 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; 1020 dst += 8; 1021 src1 += 8; 1022 src2 += 8; 1023 len -= 8; 1024 } 1025 1026 while (len >= 4 && !(relalign & 3)) { 1027 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; 1028 dst += 4; 1029 src1 += 4; 1030 src2 += 4; 1031 len -= 4; 1032 } 1033 1034 while (len >= 2 && !(relalign & 1)) { 1035 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; 1036 dst += 2; 1037 src1 += 2; 1038 src2 += 2; 1039 len -= 2; 1040 } 1041 1042 while (len--) 1043 *dst++ = *src1++ ^ *src2++; 1044 } 1045 EXPORT_SYMBOL_GPL(__crypto_xor); 1046 1047 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1048 { 1049 return alg->cra_ctxsize + 1050 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1051 } 1052 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1053 1054 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1055 u32 type, u32 mask) 1056 { 1057 int ret = 0; 1058 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1059 1060 if (!IS_ERR(alg)) { 1061 crypto_mod_put(alg); 1062 ret = 1; 1063 } 1064 1065 return ret; 1066 } 1067 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1068 1069 #ifdef CONFIG_CRYPTO_STATS 1070 void crypto_stats_init(struct crypto_alg *alg) 1071 { 1072 memset(&alg->stats, 0, sizeof(alg->stats)); 1073 } 1074 EXPORT_SYMBOL_GPL(crypto_stats_init); 1075 1076 void crypto_stats_get(struct crypto_alg *alg) 1077 { 1078 crypto_alg_get(alg); 1079 } 1080 EXPORT_SYMBOL_GPL(crypto_stats_get); 1081 1082 void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, 1083 struct crypto_alg *alg) 1084 { 1085 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1086 atomic64_inc(&alg->stats.cipher.err_cnt); 1087 } else { 1088 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1089 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen); 1090 } 1091 crypto_alg_put(alg); 1092 } 1093 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt); 1094 1095 void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, 1096 struct crypto_alg *alg) 1097 { 1098 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1099 atomic64_inc(&alg->stats.cipher.err_cnt); 1100 } else { 1101 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1102 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen); 1103 } 1104 crypto_alg_put(alg); 1105 } 1106 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt); 1107 1108 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, 1109 int ret) 1110 { 1111 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1112 atomic64_inc(&alg->stats.aead.err_cnt); 1113 } else { 1114 atomic64_inc(&alg->stats.aead.encrypt_cnt); 1115 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen); 1116 } 1117 crypto_alg_put(alg); 1118 } 1119 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt); 1120 1121 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, 1122 int ret) 1123 { 1124 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1125 atomic64_inc(&alg->stats.aead.err_cnt); 1126 } else { 1127 atomic64_inc(&alg->stats.aead.decrypt_cnt); 1128 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen); 1129 } 1130 crypto_alg_put(alg); 1131 } 1132 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt); 1133 1134 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, 1135 struct crypto_alg *alg) 1136 { 1137 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1138 atomic64_inc(&alg->stats.akcipher.err_cnt); 1139 } else { 1140 atomic64_inc(&alg->stats.akcipher.encrypt_cnt); 1141 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen); 1142 } 1143 crypto_alg_put(alg); 1144 } 1145 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt); 1146 1147 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, 1148 struct crypto_alg *alg) 1149 { 1150 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1151 atomic64_inc(&alg->stats.akcipher.err_cnt); 1152 } else { 1153 atomic64_inc(&alg->stats.akcipher.decrypt_cnt); 1154 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen); 1155 } 1156 crypto_alg_put(alg); 1157 } 1158 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt); 1159 1160 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) 1161 { 1162 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1163 atomic64_inc(&alg->stats.akcipher.err_cnt); 1164 else 1165 atomic64_inc(&alg->stats.akcipher.sign_cnt); 1166 crypto_alg_put(alg); 1167 } 1168 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign); 1169 1170 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) 1171 { 1172 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1173 atomic64_inc(&alg->stats.akcipher.err_cnt); 1174 else 1175 atomic64_inc(&alg->stats.akcipher.verify_cnt); 1176 crypto_alg_put(alg); 1177 } 1178 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify); 1179 1180 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) 1181 { 1182 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1183 atomic64_inc(&alg->stats.compress.err_cnt); 1184 } else { 1185 atomic64_inc(&alg->stats.compress.compress_cnt); 1186 atomic64_add(slen, &alg->stats.compress.compress_tlen); 1187 } 1188 crypto_alg_put(alg); 1189 } 1190 EXPORT_SYMBOL_GPL(crypto_stats_compress); 1191 1192 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) 1193 { 1194 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1195 atomic64_inc(&alg->stats.compress.err_cnt); 1196 } else { 1197 atomic64_inc(&alg->stats.compress.decompress_cnt); 1198 atomic64_add(slen, &alg->stats.compress.decompress_tlen); 1199 } 1200 crypto_alg_put(alg); 1201 } 1202 EXPORT_SYMBOL_GPL(crypto_stats_decompress); 1203 1204 void crypto_stats_ahash_update(unsigned int nbytes, int ret, 1205 struct crypto_alg *alg) 1206 { 1207 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1208 atomic64_inc(&alg->stats.hash.err_cnt); 1209 else 1210 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1211 crypto_alg_put(alg); 1212 } 1213 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update); 1214 1215 void crypto_stats_ahash_final(unsigned int nbytes, int ret, 1216 struct crypto_alg *alg) 1217 { 1218 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1219 atomic64_inc(&alg->stats.hash.err_cnt); 1220 } else { 1221 atomic64_inc(&alg->stats.hash.hash_cnt); 1222 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1223 } 1224 crypto_alg_put(alg); 1225 } 1226 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final); 1227 1228 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) 1229 { 1230 if (ret) 1231 atomic64_inc(&alg->stats.kpp.err_cnt); 1232 else 1233 atomic64_inc(&alg->stats.kpp.setsecret_cnt); 1234 crypto_alg_put(alg); 1235 } 1236 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret); 1237 1238 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) 1239 { 1240 if (ret) 1241 atomic64_inc(&alg->stats.kpp.err_cnt); 1242 else 1243 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt); 1244 crypto_alg_put(alg); 1245 } 1246 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key); 1247 1248 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) 1249 { 1250 if (ret) 1251 atomic64_inc(&alg->stats.kpp.err_cnt); 1252 else 1253 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt); 1254 crypto_alg_put(alg); 1255 } 1256 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret); 1257 1258 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) 1259 { 1260 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1261 atomic64_inc(&alg->stats.rng.err_cnt); 1262 else 1263 atomic64_inc(&alg->stats.rng.seed_cnt); 1264 crypto_alg_put(alg); 1265 } 1266 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed); 1267 1268 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, 1269 int ret) 1270 { 1271 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1272 atomic64_inc(&alg->stats.rng.err_cnt); 1273 } else { 1274 atomic64_inc(&alg->stats.rng.generate_cnt); 1275 atomic64_add(dlen, &alg->stats.rng.generate_tlen); 1276 } 1277 crypto_alg_put(alg); 1278 } 1279 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); 1280 1281 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, 1282 struct crypto_alg *alg) 1283 { 1284 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1285 atomic64_inc(&alg->stats.cipher.err_cnt); 1286 } else { 1287 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1288 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); 1289 } 1290 crypto_alg_put(alg); 1291 } 1292 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt); 1293 1294 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, 1295 struct crypto_alg *alg) 1296 { 1297 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1298 atomic64_inc(&alg->stats.cipher.err_cnt); 1299 } else { 1300 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1301 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); 1302 } 1303 crypto_alg_put(alg); 1304 } 1305 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt); 1306 #endif 1307 1308 static int __init crypto_algapi_init(void) 1309 { 1310 crypto_init_proc(); 1311 return 0; 1312 } 1313 1314 static void __exit crypto_algapi_exit(void) 1315 { 1316 crypto_exit_proc(); 1317 } 1318 1319 module_init(crypto_algapi_init); 1320 module_exit(crypto_algapi_exit); 1321 1322 MODULE_LICENSE("GPL"); 1323 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1324