1 /* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/algapi.h> 14 #include <linux/err.h> 15 #include <linux/errno.h> 16 #include <linux/fips.h> 17 #include <linux/init.h> 18 #include <linux/kernel.h> 19 #include <linux/list.h> 20 #include <linux/module.h> 21 #include <linux/rtnetlink.h> 22 #include <linux/slab.h> 23 #include <linux/string.h> 24 25 #include "internal.h" 26 27 static LIST_HEAD(crypto_template_list); 28 29 static inline int crypto_set_driver_name(struct crypto_alg *alg) 30 { 31 static const char suffix[] = "-generic"; 32 char *driver_name = alg->cra_driver_name; 33 int len; 34 35 if (*driver_name) 36 return 0; 37 38 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 39 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 40 return -ENAMETOOLONG; 41 42 memcpy(driver_name + len, suffix, sizeof(suffix)); 43 return 0; 44 } 45 46 static inline void crypto_check_module_sig(struct module *mod) 47 { 48 if (fips_enabled && mod && !module_sig_ok(mod)) 49 panic("Module %s signature verification failed in FIPS mode\n", 50 module_name(mod)); 51 } 52 53 static int crypto_check_alg(struct crypto_alg *alg) 54 { 55 crypto_check_module_sig(alg->cra_module); 56 57 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 58 return -EINVAL; 59 60 /* General maximums for all algs. */ 61 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 62 return -EINVAL; 63 64 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 65 return -EINVAL; 66 67 /* Lower maximums for specific alg types. */ 68 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 69 CRYPTO_ALG_TYPE_CIPHER) { 70 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 71 return -EINVAL; 72 73 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 74 return -EINVAL; 75 } 76 77 if (alg->cra_priority < 0) 78 return -EINVAL; 79 80 refcount_set(&alg->cra_refcnt, 1); 81 82 return crypto_set_driver_name(alg); 83 } 84 85 static void crypto_free_instance(struct crypto_instance *inst) 86 { 87 if (!inst->alg.cra_type->free) { 88 inst->tmpl->free(inst); 89 return; 90 } 91 92 inst->alg.cra_type->free(inst); 93 } 94 95 static void crypto_destroy_instance(struct crypto_alg *alg) 96 { 97 struct crypto_instance *inst = (void *)alg; 98 struct crypto_template *tmpl = inst->tmpl; 99 100 crypto_free_instance(inst); 101 crypto_tmpl_put(tmpl); 102 } 103 104 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 105 struct list_head *stack, 106 struct list_head *top, 107 struct list_head *secondary_spawns) 108 { 109 struct crypto_spawn *spawn, *n; 110 111 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 112 if (!spawn) 113 return NULL; 114 115 n = list_next_entry(spawn, list); 116 117 if (spawn->alg && &n->list != stack && !n->alg) 118 n->alg = (n->list.next == stack) ? alg : 119 &list_next_entry(n, list)->inst->alg; 120 121 list_move(&spawn->list, secondary_spawns); 122 123 return &n->list == stack ? top : &n->inst->alg.cra_users; 124 } 125 126 static void crypto_remove_instance(struct crypto_instance *inst, 127 struct list_head *list) 128 { 129 struct crypto_template *tmpl = inst->tmpl; 130 131 if (crypto_is_dead(&inst->alg)) 132 return; 133 134 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 135 if (hlist_unhashed(&inst->list)) 136 return; 137 138 if (!tmpl || !crypto_tmpl_get(tmpl)) 139 return; 140 141 list_move(&inst->alg.cra_list, list); 142 hlist_del(&inst->list); 143 inst->alg.cra_destroy = crypto_destroy_instance; 144 145 BUG_ON(!list_empty(&inst->alg.cra_users)); 146 } 147 148 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 149 struct crypto_alg *nalg) 150 { 151 u32 new_type = (nalg ?: alg)->cra_flags; 152 struct crypto_spawn *spawn, *n; 153 LIST_HEAD(secondary_spawns); 154 struct list_head *spawns; 155 LIST_HEAD(stack); 156 LIST_HEAD(top); 157 158 spawns = &alg->cra_users; 159 list_for_each_entry_safe(spawn, n, spawns, list) { 160 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 161 continue; 162 163 list_move(&spawn->list, &top); 164 } 165 166 spawns = ⊤ 167 do { 168 while (!list_empty(spawns)) { 169 struct crypto_instance *inst; 170 171 spawn = list_first_entry(spawns, struct crypto_spawn, 172 list); 173 inst = spawn->inst; 174 175 BUG_ON(&inst->alg == alg); 176 177 list_move(&spawn->list, &stack); 178 179 if (&inst->alg == nalg) 180 break; 181 182 spawn->alg = NULL; 183 spawns = &inst->alg.cra_users; 184 185 /* 186 * We may encounter an unregistered instance here, since 187 * an instance's spawns are set up prior to the instance 188 * being registered. An unregistered instance will have 189 * NULL ->cra_users.next, since ->cra_users isn't 190 * properly initialized until registration. But an 191 * unregistered instance cannot have any users, so treat 192 * it the same as ->cra_users being empty. 193 */ 194 if (spawns->next == NULL) 195 break; 196 } 197 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 198 &secondary_spawns))); 199 200 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 201 if (spawn->alg) 202 list_move(&spawn->list, &spawn->alg->cra_users); 203 else 204 crypto_remove_instance(spawn->inst, list); 205 } 206 } 207 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 208 209 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 210 { 211 struct crypto_alg *q; 212 struct crypto_larval *larval; 213 int ret = -EAGAIN; 214 215 if (crypto_is_dead(alg)) 216 goto err; 217 218 INIT_LIST_HEAD(&alg->cra_users); 219 220 /* No cheating! */ 221 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 222 223 ret = -EEXIST; 224 225 list_for_each_entry(q, &crypto_alg_list, cra_list) { 226 if (q == alg) 227 goto err; 228 229 if (crypto_is_moribund(q)) 230 continue; 231 232 if (crypto_is_larval(q)) { 233 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 234 goto err; 235 continue; 236 } 237 238 if (!strcmp(q->cra_driver_name, alg->cra_name) || 239 !strcmp(q->cra_name, alg->cra_driver_name)) 240 goto err; 241 } 242 243 larval = crypto_larval_alloc(alg->cra_name, 244 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 245 if (IS_ERR(larval)) 246 goto out; 247 248 ret = -ENOENT; 249 larval->adult = crypto_mod_get(alg); 250 if (!larval->adult) 251 goto free_larval; 252 253 refcount_set(&larval->alg.cra_refcnt, 1); 254 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 255 CRYPTO_MAX_ALG_NAME); 256 larval->alg.cra_priority = alg->cra_priority; 257 258 list_add(&alg->cra_list, &crypto_alg_list); 259 list_add(&larval->alg.cra_list, &crypto_alg_list); 260 261 crypto_stats_init(alg); 262 263 out: 264 return larval; 265 266 free_larval: 267 kfree(larval); 268 err: 269 larval = ERR_PTR(ret); 270 goto out; 271 } 272 273 void crypto_alg_tested(const char *name, int err) 274 { 275 struct crypto_larval *test; 276 struct crypto_alg *alg; 277 struct crypto_alg *q; 278 LIST_HEAD(list); 279 280 down_write(&crypto_alg_sem); 281 list_for_each_entry(q, &crypto_alg_list, cra_list) { 282 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 283 continue; 284 285 test = (struct crypto_larval *)q; 286 287 if (!strcmp(q->cra_driver_name, name)) 288 goto found; 289 } 290 291 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 292 goto unlock; 293 294 found: 295 q->cra_flags |= CRYPTO_ALG_DEAD; 296 alg = test->adult; 297 if (err || list_empty(&alg->cra_list)) 298 goto complete; 299 300 alg->cra_flags |= CRYPTO_ALG_TESTED; 301 302 list_for_each_entry(q, &crypto_alg_list, cra_list) { 303 if (q == alg) 304 continue; 305 306 if (crypto_is_moribund(q)) 307 continue; 308 309 if (crypto_is_larval(q)) { 310 struct crypto_larval *larval = (void *)q; 311 312 /* 313 * Check to see if either our generic name or 314 * specific name can satisfy the name requested 315 * by the larval entry q. 316 */ 317 if (strcmp(alg->cra_name, q->cra_name) && 318 strcmp(alg->cra_driver_name, q->cra_name)) 319 continue; 320 321 if (larval->adult) 322 continue; 323 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 324 continue; 325 if (!crypto_mod_get(alg)) 326 continue; 327 328 larval->adult = alg; 329 continue; 330 } 331 332 if (strcmp(alg->cra_name, q->cra_name)) 333 continue; 334 335 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 336 q->cra_priority > alg->cra_priority) 337 continue; 338 339 crypto_remove_spawns(q, &list, alg); 340 } 341 342 complete: 343 complete_all(&test->completion); 344 345 unlock: 346 up_write(&crypto_alg_sem); 347 348 crypto_remove_final(&list); 349 } 350 EXPORT_SYMBOL_GPL(crypto_alg_tested); 351 352 void crypto_remove_final(struct list_head *list) 353 { 354 struct crypto_alg *alg; 355 struct crypto_alg *n; 356 357 list_for_each_entry_safe(alg, n, list, cra_list) { 358 list_del_init(&alg->cra_list); 359 crypto_alg_put(alg); 360 } 361 } 362 EXPORT_SYMBOL_GPL(crypto_remove_final); 363 364 static void crypto_wait_for_test(struct crypto_larval *larval) 365 { 366 int err; 367 368 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 369 if (err != NOTIFY_STOP) { 370 if (WARN_ON(err != NOTIFY_DONE)) 371 goto out; 372 crypto_alg_tested(larval->alg.cra_driver_name, 0); 373 } 374 375 err = wait_for_completion_killable(&larval->completion); 376 WARN_ON(err); 377 if (!err) 378 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval); 379 380 out: 381 crypto_larval_kill(&larval->alg); 382 } 383 384 int crypto_register_alg(struct crypto_alg *alg) 385 { 386 struct crypto_larval *larval; 387 int err; 388 389 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 390 err = crypto_check_alg(alg); 391 if (err) 392 return err; 393 394 down_write(&crypto_alg_sem); 395 larval = __crypto_register_alg(alg); 396 up_write(&crypto_alg_sem); 397 398 if (IS_ERR(larval)) 399 return PTR_ERR(larval); 400 401 crypto_wait_for_test(larval); 402 return 0; 403 } 404 EXPORT_SYMBOL_GPL(crypto_register_alg); 405 406 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 407 { 408 if (unlikely(list_empty(&alg->cra_list))) 409 return -ENOENT; 410 411 alg->cra_flags |= CRYPTO_ALG_DEAD; 412 413 list_del_init(&alg->cra_list); 414 crypto_remove_spawns(alg, list, NULL); 415 416 return 0; 417 } 418 419 int crypto_unregister_alg(struct crypto_alg *alg) 420 { 421 int ret; 422 LIST_HEAD(list); 423 424 down_write(&crypto_alg_sem); 425 ret = crypto_remove_alg(alg, &list); 426 up_write(&crypto_alg_sem); 427 428 if (ret) 429 return ret; 430 431 BUG_ON(refcount_read(&alg->cra_refcnt) != 1); 432 if (alg->cra_destroy) 433 alg->cra_destroy(alg); 434 435 crypto_remove_final(&list); 436 return 0; 437 } 438 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 439 440 int crypto_register_algs(struct crypto_alg *algs, int count) 441 { 442 int i, ret; 443 444 for (i = 0; i < count; i++) { 445 ret = crypto_register_alg(&algs[i]); 446 if (ret) 447 goto err; 448 } 449 450 return 0; 451 452 err: 453 for (--i; i >= 0; --i) 454 crypto_unregister_alg(&algs[i]); 455 456 return ret; 457 } 458 EXPORT_SYMBOL_GPL(crypto_register_algs); 459 460 int crypto_unregister_algs(struct crypto_alg *algs, int count) 461 { 462 int i, ret; 463 464 for (i = 0; i < count; i++) { 465 ret = crypto_unregister_alg(&algs[i]); 466 if (ret) 467 pr_err("Failed to unregister %s %s: %d\n", 468 algs[i].cra_driver_name, algs[i].cra_name, ret); 469 } 470 471 return 0; 472 } 473 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 474 475 int crypto_register_template(struct crypto_template *tmpl) 476 { 477 struct crypto_template *q; 478 int err = -EEXIST; 479 480 down_write(&crypto_alg_sem); 481 482 crypto_check_module_sig(tmpl->module); 483 484 list_for_each_entry(q, &crypto_template_list, list) { 485 if (q == tmpl) 486 goto out; 487 } 488 489 list_add(&tmpl->list, &crypto_template_list); 490 err = 0; 491 out: 492 up_write(&crypto_alg_sem); 493 return err; 494 } 495 EXPORT_SYMBOL_GPL(crypto_register_template); 496 497 int crypto_register_templates(struct crypto_template *tmpls, int count) 498 { 499 int i, err; 500 501 for (i = 0; i < count; i++) { 502 err = crypto_register_template(&tmpls[i]); 503 if (err) 504 goto out; 505 } 506 return 0; 507 508 out: 509 for (--i; i >= 0; --i) 510 crypto_unregister_template(&tmpls[i]); 511 return err; 512 } 513 EXPORT_SYMBOL_GPL(crypto_register_templates); 514 515 void crypto_unregister_template(struct crypto_template *tmpl) 516 { 517 struct crypto_instance *inst; 518 struct hlist_node *n; 519 struct hlist_head *list; 520 LIST_HEAD(users); 521 522 down_write(&crypto_alg_sem); 523 524 BUG_ON(list_empty(&tmpl->list)); 525 list_del_init(&tmpl->list); 526 527 list = &tmpl->instances; 528 hlist_for_each_entry(inst, list, list) { 529 int err = crypto_remove_alg(&inst->alg, &users); 530 531 BUG_ON(err); 532 } 533 534 up_write(&crypto_alg_sem); 535 536 hlist_for_each_entry_safe(inst, n, list, list) { 537 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 538 crypto_free_instance(inst); 539 } 540 crypto_remove_final(&users); 541 } 542 EXPORT_SYMBOL_GPL(crypto_unregister_template); 543 544 void crypto_unregister_templates(struct crypto_template *tmpls, int count) 545 { 546 int i; 547 548 for (i = count - 1; i >= 0; --i) 549 crypto_unregister_template(&tmpls[i]); 550 } 551 EXPORT_SYMBOL_GPL(crypto_unregister_templates); 552 553 static struct crypto_template *__crypto_lookup_template(const char *name) 554 { 555 struct crypto_template *q, *tmpl = NULL; 556 557 down_read(&crypto_alg_sem); 558 list_for_each_entry(q, &crypto_template_list, list) { 559 if (strcmp(q->name, name)) 560 continue; 561 if (unlikely(!crypto_tmpl_get(q))) 562 continue; 563 564 tmpl = q; 565 break; 566 } 567 up_read(&crypto_alg_sem); 568 569 return tmpl; 570 } 571 572 struct crypto_template *crypto_lookup_template(const char *name) 573 { 574 return try_then_request_module(__crypto_lookup_template(name), 575 "crypto-%s", name); 576 } 577 EXPORT_SYMBOL_GPL(crypto_lookup_template); 578 579 int crypto_register_instance(struct crypto_template *tmpl, 580 struct crypto_instance *inst) 581 { 582 struct crypto_larval *larval; 583 int err; 584 585 err = crypto_check_alg(&inst->alg); 586 if (err) 587 return err; 588 589 inst->alg.cra_module = tmpl->module; 590 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 591 592 down_write(&crypto_alg_sem); 593 594 larval = __crypto_register_alg(&inst->alg); 595 if (IS_ERR(larval)) 596 goto unlock; 597 598 hlist_add_head(&inst->list, &tmpl->instances); 599 inst->tmpl = tmpl; 600 601 unlock: 602 up_write(&crypto_alg_sem); 603 604 err = PTR_ERR(larval); 605 if (IS_ERR(larval)) 606 goto err; 607 608 crypto_wait_for_test(larval); 609 err = 0; 610 611 err: 612 return err; 613 } 614 EXPORT_SYMBOL_GPL(crypto_register_instance); 615 616 int crypto_unregister_instance(struct crypto_instance *inst) 617 { 618 LIST_HEAD(list); 619 620 down_write(&crypto_alg_sem); 621 622 crypto_remove_spawns(&inst->alg, &list, NULL); 623 crypto_remove_instance(inst, &list); 624 625 up_write(&crypto_alg_sem); 626 627 crypto_remove_final(&list); 628 629 return 0; 630 } 631 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 632 633 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 634 struct crypto_instance *inst, u32 mask) 635 { 636 int err = -EAGAIN; 637 638 if (WARN_ON_ONCE(inst == NULL)) 639 return -EINVAL; 640 641 spawn->inst = inst; 642 spawn->mask = mask; 643 644 down_write(&crypto_alg_sem); 645 if (!crypto_is_moribund(alg)) { 646 list_add(&spawn->list, &alg->cra_users); 647 spawn->alg = alg; 648 err = 0; 649 } 650 up_write(&crypto_alg_sem); 651 652 return err; 653 } 654 EXPORT_SYMBOL_GPL(crypto_init_spawn); 655 656 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, 657 struct crypto_instance *inst, 658 const struct crypto_type *frontend) 659 { 660 int err = -EINVAL; 661 662 if ((alg->cra_flags ^ frontend->type) & frontend->maskset) 663 goto out; 664 665 spawn->frontend = frontend; 666 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); 667 668 out: 669 return err; 670 } 671 EXPORT_SYMBOL_GPL(crypto_init_spawn2); 672 673 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name, 674 u32 type, u32 mask) 675 { 676 struct crypto_alg *alg; 677 int err; 678 679 alg = crypto_find_alg(name, spawn->frontend, type, mask); 680 if (IS_ERR(alg)) 681 return PTR_ERR(alg); 682 683 err = crypto_init_spawn(spawn, alg, spawn->inst, mask); 684 crypto_mod_put(alg); 685 return err; 686 } 687 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 688 689 void crypto_drop_spawn(struct crypto_spawn *spawn) 690 { 691 if (!spawn->alg) 692 return; 693 694 down_write(&crypto_alg_sem); 695 list_del(&spawn->list); 696 up_write(&crypto_alg_sem); 697 } 698 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 699 700 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 701 { 702 struct crypto_alg *alg; 703 struct crypto_alg *alg2; 704 705 down_read(&crypto_alg_sem); 706 alg = spawn->alg; 707 alg2 = alg; 708 if (alg2) 709 alg2 = crypto_mod_get(alg2); 710 up_read(&crypto_alg_sem); 711 712 if (!alg2) { 713 if (alg) 714 crypto_shoot_alg(alg); 715 return ERR_PTR(-EAGAIN); 716 } 717 718 return alg; 719 } 720 721 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 722 u32 mask) 723 { 724 struct crypto_alg *alg; 725 struct crypto_tfm *tfm; 726 727 alg = crypto_spawn_alg(spawn); 728 if (IS_ERR(alg)) 729 return ERR_CAST(alg); 730 731 tfm = ERR_PTR(-EINVAL); 732 if (unlikely((alg->cra_flags ^ type) & mask)) 733 goto out_put_alg; 734 735 tfm = __crypto_alloc_tfm(alg, type, mask); 736 if (IS_ERR(tfm)) 737 goto out_put_alg; 738 739 return tfm; 740 741 out_put_alg: 742 crypto_mod_put(alg); 743 return tfm; 744 } 745 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 746 747 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 748 { 749 struct crypto_alg *alg; 750 struct crypto_tfm *tfm; 751 752 alg = crypto_spawn_alg(spawn); 753 if (IS_ERR(alg)) 754 return ERR_CAST(alg); 755 756 tfm = crypto_create_tfm(alg, spawn->frontend); 757 if (IS_ERR(tfm)) 758 goto out_put_alg; 759 760 return tfm; 761 762 out_put_alg: 763 crypto_mod_put(alg); 764 return tfm; 765 } 766 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 767 768 int crypto_register_notifier(struct notifier_block *nb) 769 { 770 return blocking_notifier_chain_register(&crypto_chain, nb); 771 } 772 EXPORT_SYMBOL_GPL(crypto_register_notifier); 773 774 int crypto_unregister_notifier(struct notifier_block *nb) 775 { 776 return blocking_notifier_chain_unregister(&crypto_chain, nb); 777 } 778 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 779 780 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 781 { 782 struct rtattr *rta = tb[0]; 783 struct crypto_attr_type *algt; 784 785 if (!rta) 786 return ERR_PTR(-ENOENT); 787 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 788 return ERR_PTR(-EINVAL); 789 if (rta->rta_type != CRYPTOA_TYPE) 790 return ERR_PTR(-EINVAL); 791 792 algt = RTA_DATA(rta); 793 794 return algt; 795 } 796 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 797 798 int crypto_check_attr_type(struct rtattr **tb, u32 type) 799 { 800 struct crypto_attr_type *algt; 801 802 algt = crypto_get_attr_type(tb); 803 if (IS_ERR(algt)) 804 return PTR_ERR(algt); 805 806 if ((algt->type ^ type) & algt->mask) 807 return -EINVAL; 808 809 return 0; 810 } 811 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 812 813 const char *crypto_attr_alg_name(struct rtattr *rta) 814 { 815 struct crypto_attr_alg *alga; 816 817 if (!rta) 818 return ERR_PTR(-ENOENT); 819 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 820 return ERR_PTR(-EINVAL); 821 if (rta->rta_type != CRYPTOA_ALG) 822 return ERR_PTR(-EINVAL); 823 824 alga = RTA_DATA(rta); 825 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 826 827 return alga->name; 828 } 829 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 830 831 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, 832 const struct crypto_type *frontend, 833 u32 type, u32 mask) 834 { 835 const char *name; 836 837 name = crypto_attr_alg_name(rta); 838 if (IS_ERR(name)) 839 return ERR_CAST(name); 840 841 return crypto_find_alg(name, frontend, type, mask); 842 } 843 EXPORT_SYMBOL_GPL(crypto_attr_alg2); 844 845 int crypto_attr_u32(struct rtattr *rta, u32 *num) 846 { 847 struct crypto_attr_u32 *nu32; 848 849 if (!rta) 850 return -ENOENT; 851 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 852 return -EINVAL; 853 if (rta->rta_type != CRYPTOA_U32) 854 return -EINVAL; 855 856 nu32 = RTA_DATA(rta); 857 *num = nu32->num; 858 859 return 0; 860 } 861 EXPORT_SYMBOL_GPL(crypto_attr_u32); 862 863 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 864 struct crypto_alg *alg) 865 { 866 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 867 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 868 return -ENAMETOOLONG; 869 870 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 871 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 872 return -ENAMETOOLONG; 873 874 return 0; 875 } 876 EXPORT_SYMBOL_GPL(crypto_inst_setname); 877 878 void *crypto_alloc_instance(const char *name, struct crypto_alg *alg, 879 unsigned int head) 880 { 881 struct crypto_instance *inst; 882 char *p; 883 int err; 884 885 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), 886 GFP_KERNEL); 887 if (!p) 888 return ERR_PTR(-ENOMEM); 889 890 inst = (void *)(p + head); 891 892 err = crypto_inst_setname(inst, name, alg); 893 if (err) 894 goto err_free_inst; 895 896 return p; 897 898 err_free_inst: 899 kfree(p); 900 return ERR_PTR(err); 901 } 902 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 903 904 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 905 { 906 INIT_LIST_HEAD(&queue->list); 907 queue->backlog = &queue->list; 908 queue->qlen = 0; 909 queue->max_qlen = max_qlen; 910 } 911 EXPORT_SYMBOL_GPL(crypto_init_queue); 912 913 int crypto_enqueue_request(struct crypto_queue *queue, 914 struct crypto_async_request *request) 915 { 916 int err = -EINPROGRESS; 917 918 if (unlikely(queue->qlen >= queue->max_qlen)) { 919 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 920 err = -ENOSPC; 921 goto out; 922 } 923 err = -EBUSY; 924 if (queue->backlog == &queue->list) 925 queue->backlog = &request->list; 926 } 927 928 queue->qlen++; 929 list_add_tail(&request->list, &queue->list); 930 931 out: 932 return err; 933 } 934 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 935 936 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 937 { 938 struct list_head *request; 939 940 if (unlikely(!queue->qlen)) 941 return NULL; 942 943 queue->qlen--; 944 945 if (queue->backlog != &queue->list) 946 queue->backlog = queue->backlog->next; 947 948 request = queue->list.next; 949 list_del(request); 950 951 return list_entry(request, struct crypto_async_request, list); 952 } 953 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 954 955 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 956 { 957 struct crypto_async_request *req; 958 959 list_for_each_entry(req, &queue->list, list) { 960 if (req->tfm == tfm) 961 return 1; 962 } 963 964 return 0; 965 } 966 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 967 968 static inline void crypto_inc_byte(u8 *a, unsigned int size) 969 { 970 u8 *b = (a + size); 971 u8 c; 972 973 for (; size; size--) { 974 c = *--b + 1; 975 *b = c; 976 if (c) 977 break; 978 } 979 } 980 981 void crypto_inc(u8 *a, unsigned int size) 982 { 983 __be32 *b = (__be32 *)(a + size); 984 u32 c; 985 986 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 987 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 988 for (; size >= 4; size -= 4) { 989 c = be32_to_cpu(*--b) + 1; 990 *b = cpu_to_be32(c); 991 if (likely(c)) 992 return; 993 } 994 995 crypto_inc_byte(a, size); 996 } 997 EXPORT_SYMBOL_GPL(crypto_inc); 998 999 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) 1000 { 1001 int relalign = 0; 1002 1003 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { 1004 int size = sizeof(unsigned long); 1005 int d = (((unsigned long)dst ^ (unsigned long)src1) | 1006 ((unsigned long)dst ^ (unsigned long)src2)) & 1007 (size - 1); 1008 1009 relalign = d ? 1 << __ffs(d) : size; 1010 1011 /* 1012 * If we care about alignment, process as many bytes as 1013 * needed to advance dst and src to values whose alignments 1014 * equal their relative alignment. This will allow us to 1015 * process the remainder of the input using optimal strides. 1016 */ 1017 while (((unsigned long)dst & (relalign - 1)) && len > 0) { 1018 *dst++ = *src1++ ^ *src2++; 1019 len--; 1020 } 1021 } 1022 1023 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { 1024 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; 1025 dst += 8; 1026 src1 += 8; 1027 src2 += 8; 1028 len -= 8; 1029 } 1030 1031 while (len >= 4 && !(relalign & 3)) { 1032 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; 1033 dst += 4; 1034 src1 += 4; 1035 src2 += 4; 1036 len -= 4; 1037 } 1038 1039 while (len >= 2 && !(relalign & 1)) { 1040 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; 1041 dst += 2; 1042 src1 += 2; 1043 src2 += 2; 1044 len -= 2; 1045 } 1046 1047 while (len--) 1048 *dst++ = *src1++ ^ *src2++; 1049 } 1050 EXPORT_SYMBOL_GPL(__crypto_xor); 1051 1052 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1053 { 1054 return alg->cra_ctxsize + 1055 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1056 } 1057 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1058 1059 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1060 u32 type, u32 mask) 1061 { 1062 int ret = 0; 1063 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1064 1065 if (!IS_ERR(alg)) { 1066 crypto_mod_put(alg); 1067 ret = 1; 1068 } 1069 1070 return ret; 1071 } 1072 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1073 1074 #ifdef CONFIG_CRYPTO_STATS 1075 void crypto_stats_init(struct crypto_alg *alg) 1076 { 1077 memset(&alg->stats, 0, sizeof(alg->stats)); 1078 } 1079 EXPORT_SYMBOL_GPL(crypto_stats_init); 1080 1081 void crypto_stats_get(struct crypto_alg *alg) 1082 { 1083 crypto_alg_get(alg); 1084 } 1085 EXPORT_SYMBOL_GPL(crypto_stats_get); 1086 1087 void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, 1088 struct crypto_alg *alg) 1089 { 1090 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1091 atomic64_inc(&alg->stats.cipher.err_cnt); 1092 } else { 1093 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1094 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen); 1095 } 1096 crypto_alg_put(alg); 1097 } 1098 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt); 1099 1100 void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, 1101 struct crypto_alg *alg) 1102 { 1103 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1104 atomic64_inc(&alg->stats.cipher.err_cnt); 1105 } else { 1106 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1107 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen); 1108 } 1109 crypto_alg_put(alg); 1110 } 1111 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt); 1112 1113 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, 1114 int ret) 1115 { 1116 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1117 atomic64_inc(&alg->stats.aead.err_cnt); 1118 } else { 1119 atomic64_inc(&alg->stats.aead.encrypt_cnt); 1120 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen); 1121 } 1122 crypto_alg_put(alg); 1123 } 1124 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt); 1125 1126 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, 1127 int ret) 1128 { 1129 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1130 atomic64_inc(&alg->stats.aead.err_cnt); 1131 } else { 1132 atomic64_inc(&alg->stats.aead.decrypt_cnt); 1133 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen); 1134 } 1135 crypto_alg_put(alg); 1136 } 1137 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt); 1138 1139 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, 1140 struct crypto_alg *alg) 1141 { 1142 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1143 atomic64_inc(&alg->stats.akcipher.err_cnt); 1144 } else { 1145 atomic64_inc(&alg->stats.akcipher.encrypt_cnt); 1146 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen); 1147 } 1148 crypto_alg_put(alg); 1149 } 1150 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt); 1151 1152 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, 1153 struct crypto_alg *alg) 1154 { 1155 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1156 atomic64_inc(&alg->stats.akcipher.err_cnt); 1157 } else { 1158 atomic64_inc(&alg->stats.akcipher.decrypt_cnt); 1159 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen); 1160 } 1161 crypto_alg_put(alg); 1162 } 1163 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt); 1164 1165 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) 1166 { 1167 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1168 atomic64_inc(&alg->stats.akcipher.err_cnt); 1169 else 1170 atomic64_inc(&alg->stats.akcipher.sign_cnt); 1171 crypto_alg_put(alg); 1172 } 1173 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign); 1174 1175 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) 1176 { 1177 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1178 atomic64_inc(&alg->stats.akcipher.err_cnt); 1179 else 1180 atomic64_inc(&alg->stats.akcipher.verify_cnt); 1181 crypto_alg_put(alg); 1182 } 1183 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify); 1184 1185 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) 1186 { 1187 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1188 atomic64_inc(&alg->stats.compress.err_cnt); 1189 } else { 1190 atomic64_inc(&alg->stats.compress.compress_cnt); 1191 atomic64_add(slen, &alg->stats.compress.compress_tlen); 1192 } 1193 crypto_alg_put(alg); 1194 } 1195 EXPORT_SYMBOL_GPL(crypto_stats_compress); 1196 1197 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) 1198 { 1199 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1200 atomic64_inc(&alg->stats.compress.err_cnt); 1201 } else { 1202 atomic64_inc(&alg->stats.compress.decompress_cnt); 1203 atomic64_add(slen, &alg->stats.compress.decompress_tlen); 1204 } 1205 crypto_alg_put(alg); 1206 } 1207 EXPORT_SYMBOL_GPL(crypto_stats_decompress); 1208 1209 void crypto_stats_ahash_update(unsigned int nbytes, int ret, 1210 struct crypto_alg *alg) 1211 { 1212 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1213 atomic64_inc(&alg->stats.hash.err_cnt); 1214 else 1215 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1216 crypto_alg_put(alg); 1217 } 1218 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update); 1219 1220 void crypto_stats_ahash_final(unsigned int nbytes, int ret, 1221 struct crypto_alg *alg) 1222 { 1223 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1224 atomic64_inc(&alg->stats.hash.err_cnt); 1225 } else { 1226 atomic64_inc(&alg->stats.hash.hash_cnt); 1227 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1228 } 1229 crypto_alg_put(alg); 1230 } 1231 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final); 1232 1233 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) 1234 { 1235 if (ret) 1236 atomic64_inc(&alg->stats.kpp.err_cnt); 1237 else 1238 atomic64_inc(&alg->stats.kpp.setsecret_cnt); 1239 crypto_alg_put(alg); 1240 } 1241 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret); 1242 1243 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) 1244 { 1245 if (ret) 1246 atomic64_inc(&alg->stats.kpp.err_cnt); 1247 else 1248 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt); 1249 crypto_alg_put(alg); 1250 } 1251 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key); 1252 1253 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) 1254 { 1255 if (ret) 1256 atomic64_inc(&alg->stats.kpp.err_cnt); 1257 else 1258 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt); 1259 crypto_alg_put(alg); 1260 } 1261 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret); 1262 1263 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) 1264 { 1265 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1266 atomic64_inc(&alg->stats.rng.err_cnt); 1267 else 1268 atomic64_inc(&alg->stats.rng.seed_cnt); 1269 crypto_alg_put(alg); 1270 } 1271 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed); 1272 1273 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, 1274 int ret) 1275 { 1276 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1277 atomic64_inc(&alg->stats.rng.err_cnt); 1278 } else { 1279 atomic64_inc(&alg->stats.rng.generate_cnt); 1280 atomic64_add(dlen, &alg->stats.rng.generate_tlen); 1281 } 1282 crypto_alg_put(alg); 1283 } 1284 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); 1285 1286 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, 1287 struct crypto_alg *alg) 1288 { 1289 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1290 atomic64_inc(&alg->stats.cipher.err_cnt); 1291 } else { 1292 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1293 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); 1294 } 1295 crypto_alg_put(alg); 1296 } 1297 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt); 1298 1299 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, 1300 struct crypto_alg *alg) 1301 { 1302 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1303 atomic64_inc(&alg->stats.cipher.err_cnt); 1304 } else { 1305 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1306 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); 1307 } 1308 crypto_alg_put(alg); 1309 } 1310 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt); 1311 #endif 1312 1313 static int __init crypto_algapi_init(void) 1314 { 1315 crypto_init_proc(); 1316 return 0; 1317 } 1318 1319 static void __exit crypto_algapi_exit(void) 1320 { 1321 crypto_exit_proc(); 1322 } 1323 1324 module_init(crypto_algapi_init); 1325 module_exit(crypto_algapi_exit); 1326 1327 MODULE_LICENSE("GPL"); 1328 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1329