1 /* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/algapi.h> 14 #include <linux/err.h> 15 #include <linux/errno.h> 16 #include <linux/fips.h> 17 #include <linux/init.h> 18 #include <linux/kernel.h> 19 #include <linux/list.h> 20 #include <linux/module.h> 21 #include <linux/rtnetlink.h> 22 #include <linux/slab.h> 23 #include <linux/string.h> 24 25 #include "internal.h" 26 27 static LIST_HEAD(crypto_template_list); 28 29 static inline int crypto_set_driver_name(struct crypto_alg *alg) 30 { 31 static const char suffix[] = "-generic"; 32 char *driver_name = alg->cra_driver_name; 33 int len; 34 35 if (*driver_name) 36 return 0; 37 38 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 39 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 40 return -ENAMETOOLONG; 41 42 memcpy(driver_name + len, suffix, sizeof(suffix)); 43 return 0; 44 } 45 46 static inline void crypto_check_module_sig(struct module *mod) 47 { 48 if (fips_enabled && mod && !module_sig_ok(mod)) 49 panic("Module %s signature verification failed in FIPS mode\n", 50 module_name(mod)); 51 } 52 53 static int crypto_check_alg(struct crypto_alg *alg) 54 { 55 crypto_check_module_sig(alg->cra_module); 56 57 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 58 return -EINVAL; 59 60 /* General maximums for all algs. */ 61 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK) 62 return -EINVAL; 63 64 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE) 65 return -EINVAL; 66 67 /* Lower maximums for specific alg types. */ 68 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == 69 CRYPTO_ALG_TYPE_CIPHER) { 70 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK) 71 return -EINVAL; 72 73 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE) 74 return -EINVAL; 75 } 76 77 if (alg->cra_priority < 0) 78 return -EINVAL; 79 80 refcount_set(&alg->cra_refcnt, 1); 81 82 return crypto_set_driver_name(alg); 83 } 84 85 static void crypto_free_instance(struct crypto_instance *inst) 86 { 87 if (!inst->alg.cra_type->free) { 88 inst->tmpl->free(inst); 89 return; 90 } 91 92 inst->alg.cra_type->free(inst); 93 } 94 95 static void crypto_destroy_instance(struct crypto_alg *alg) 96 { 97 struct crypto_instance *inst = (void *)alg; 98 struct crypto_template *tmpl = inst->tmpl; 99 100 crypto_free_instance(inst); 101 crypto_tmpl_put(tmpl); 102 } 103 104 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 105 struct list_head *stack, 106 struct list_head *top, 107 struct list_head *secondary_spawns) 108 { 109 struct crypto_spawn *spawn, *n; 110 111 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list); 112 if (!spawn) 113 return NULL; 114 115 n = list_next_entry(spawn, list); 116 117 if (spawn->alg && &n->list != stack && !n->alg) 118 n->alg = (n->list.next == stack) ? alg : 119 &list_next_entry(n, list)->inst->alg; 120 121 list_move(&spawn->list, secondary_spawns); 122 123 return &n->list == stack ? top : &n->inst->alg.cra_users; 124 } 125 126 static void crypto_remove_instance(struct crypto_instance *inst, 127 struct list_head *list) 128 { 129 struct crypto_template *tmpl = inst->tmpl; 130 131 if (crypto_is_dead(&inst->alg)) 132 return; 133 134 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 135 if (hlist_unhashed(&inst->list)) 136 return; 137 138 if (!tmpl || !crypto_tmpl_get(tmpl)) 139 return; 140 141 list_move(&inst->alg.cra_list, list); 142 hlist_del(&inst->list); 143 inst->alg.cra_destroy = crypto_destroy_instance; 144 145 BUG_ON(!list_empty(&inst->alg.cra_users)); 146 } 147 148 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 149 struct crypto_alg *nalg) 150 { 151 u32 new_type = (nalg ?: alg)->cra_flags; 152 struct crypto_spawn *spawn, *n; 153 LIST_HEAD(secondary_spawns); 154 struct list_head *spawns; 155 LIST_HEAD(stack); 156 LIST_HEAD(top); 157 158 spawns = &alg->cra_users; 159 list_for_each_entry_safe(spawn, n, spawns, list) { 160 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 161 continue; 162 163 list_move(&spawn->list, &top); 164 } 165 166 spawns = ⊤ 167 do { 168 while (!list_empty(spawns)) { 169 struct crypto_instance *inst; 170 171 spawn = list_first_entry(spawns, struct crypto_spawn, 172 list); 173 inst = spawn->inst; 174 175 BUG_ON(&inst->alg == alg); 176 177 list_move(&spawn->list, &stack); 178 179 if (&inst->alg == nalg) 180 break; 181 182 spawn->alg = NULL; 183 spawns = &inst->alg.cra_users; 184 185 /* 186 * We may encounter an unregistered instance here, since 187 * an instance's spawns are set up prior to the instance 188 * being registered. An unregistered instance will have 189 * NULL ->cra_users.next, since ->cra_users isn't 190 * properly initialized until registration. But an 191 * unregistered instance cannot have any users, so treat 192 * it the same as ->cra_users being empty. 193 */ 194 if (spawns->next == NULL) 195 break; 196 } 197 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 198 &secondary_spawns))); 199 200 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 201 if (spawn->alg) 202 list_move(&spawn->list, &spawn->alg->cra_users); 203 else 204 crypto_remove_instance(spawn->inst, list); 205 } 206 } 207 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 208 209 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 210 { 211 struct crypto_alg *q; 212 struct crypto_larval *larval; 213 int ret = -EAGAIN; 214 215 if (crypto_is_dead(alg)) 216 goto err; 217 218 INIT_LIST_HEAD(&alg->cra_users); 219 220 /* No cheating! */ 221 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 222 223 ret = -EEXIST; 224 225 list_for_each_entry(q, &crypto_alg_list, cra_list) { 226 if (q == alg) 227 goto err; 228 229 if (crypto_is_moribund(q)) 230 continue; 231 232 if (crypto_is_larval(q)) { 233 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 234 goto err; 235 continue; 236 } 237 238 if (!strcmp(q->cra_driver_name, alg->cra_name) || 239 !strcmp(q->cra_name, alg->cra_driver_name)) 240 goto err; 241 } 242 243 larval = crypto_larval_alloc(alg->cra_name, 244 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 245 if (IS_ERR(larval)) 246 goto out; 247 248 ret = -ENOENT; 249 larval->adult = crypto_mod_get(alg); 250 if (!larval->adult) 251 goto free_larval; 252 253 refcount_set(&larval->alg.cra_refcnt, 1); 254 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 255 CRYPTO_MAX_ALG_NAME); 256 larval->alg.cra_priority = alg->cra_priority; 257 258 list_add(&alg->cra_list, &crypto_alg_list); 259 list_add(&larval->alg.cra_list, &crypto_alg_list); 260 261 crypto_stats_init(alg); 262 263 out: 264 return larval; 265 266 free_larval: 267 kfree(larval); 268 err: 269 larval = ERR_PTR(ret); 270 goto out; 271 } 272 273 void crypto_alg_tested(const char *name, int err) 274 { 275 struct crypto_larval *test; 276 struct crypto_alg *alg; 277 struct crypto_alg *q; 278 LIST_HEAD(list); 279 280 down_write(&crypto_alg_sem); 281 list_for_each_entry(q, &crypto_alg_list, cra_list) { 282 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 283 continue; 284 285 test = (struct crypto_larval *)q; 286 287 if (!strcmp(q->cra_driver_name, name)) 288 goto found; 289 } 290 291 pr_err("alg: Unexpected test result for %s: %d\n", name, err); 292 goto unlock; 293 294 found: 295 q->cra_flags |= CRYPTO_ALG_DEAD; 296 alg = test->adult; 297 if (err || list_empty(&alg->cra_list)) 298 goto complete; 299 300 alg->cra_flags |= CRYPTO_ALG_TESTED; 301 302 list_for_each_entry(q, &crypto_alg_list, cra_list) { 303 if (q == alg) 304 continue; 305 306 if (crypto_is_moribund(q)) 307 continue; 308 309 if (crypto_is_larval(q)) { 310 struct crypto_larval *larval = (void *)q; 311 312 /* 313 * Check to see if either our generic name or 314 * specific name can satisfy the name requested 315 * by the larval entry q. 316 */ 317 if (strcmp(alg->cra_name, q->cra_name) && 318 strcmp(alg->cra_driver_name, q->cra_name)) 319 continue; 320 321 if (larval->adult) 322 continue; 323 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 324 continue; 325 if (!crypto_mod_get(alg)) 326 continue; 327 328 larval->adult = alg; 329 continue; 330 } 331 332 if (strcmp(alg->cra_name, q->cra_name)) 333 continue; 334 335 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 336 q->cra_priority > alg->cra_priority) 337 continue; 338 339 crypto_remove_spawns(q, &list, alg); 340 } 341 342 complete: 343 complete_all(&test->completion); 344 345 unlock: 346 up_write(&crypto_alg_sem); 347 348 crypto_remove_final(&list); 349 } 350 EXPORT_SYMBOL_GPL(crypto_alg_tested); 351 352 void crypto_remove_final(struct list_head *list) 353 { 354 struct crypto_alg *alg; 355 struct crypto_alg *n; 356 357 list_for_each_entry_safe(alg, n, list, cra_list) { 358 list_del_init(&alg->cra_list); 359 crypto_alg_put(alg); 360 } 361 } 362 EXPORT_SYMBOL_GPL(crypto_remove_final); 363 364 static void crypto_wait_for_test(struct crypto_larval *larval) 365 { 366 int err; 367 368 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 369 if (err != NOTIFY_STOP) { 370 if (WARN_ON(err != NOTIFY_DONE)) 371 goto out; 372 crypto_alg_tested(larval->alg.cra_driver_name, 0); 373 } 374 375 err = wait_for_completion_killable(&larval->completion); 376 WARN_ON(err); 377 if (!err) 378 crypto_probing_notify(CRYPTO_MSG_ALG_LOADED, larval); 379 380 out: 381 crypto_larval_kill(&larval->alg); 382 } 383 384 int crypto_register_alg(struct crypto_alg *alg) 385 { 386 struct crypto_larval *larval; 387 int err; 388 389 alg->cra_flags &= ~CRYPTO_ALG_DEAD; 390 err = crypto_check_alg(alg); 391 if (err) 392 return err; 393 394 down_write(&crypto_alg_sem); 395 larval = __crypto_register_alg(alg); 396 up_write(&crypto_alg_sem); 397 398 if (IS_ERR(larval)) 399 return PTR_ERR(larval); 400 401 crypto_wait_for_test(larval); 402 return 0; 403 } 404 EXPORT_SYMBOL_GPL(crypto_register_alg); 405 406 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 407 { 408 if (unlikely(list_empty(&alg->cra_list))) 409 return -ENOENT; 410 411 alg->cra_flags |= CRYPTO_ALG_DEAD; 412 413 list_del_init(&alg->cra_list); 414 crypto_remove_spawns(alg, list, NULL); 415 416 return 0; 417 } 418 419 int crypto_unregister_alg(struct crypto_alg *alg) 420 { 421 int ret; 422 LIST_HEAD(list); 423 424 down_write(&crypto_alg_sem); 425 ret = crypto_remove_alg(alg, &list); 426 up_write(&crypto_alg_sem); 427 428 if (ret) 429 return ret; 430 431 BUG_ON(refcount_read(&alg->cra_refcnt) != 1); 432 if (alg->cra_destroy) 433 alg->cra_destroy(alg); 434 435 crypto_remove_final(&list); 436 return 0; 437 } 438 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 439 440 int crypto_register_algs(struct crypto_alg *algs, int count) 441 { 442 int i, ret; 443 444 for (i = 0; i < count; i++) { 445 ret = crypto_register_alg(&algs[i]); 446 if (ret) 447 goto err; 448 } 449 450 return 0; 451 452 err: 453 for (--i; i >= 0; --i) 454 crypto_unregister_alg(&algs[i]); 455 456 return ret; 457 } 458 EXPORT_SYMBOL_GPL(crypto_register_algs); 459 460 int crypto_unregister_algs(struct crypto_alg *algs, int count) 461 { 462 int i, ret; 463 464 for (i = 0; i < count; i++) { 465 ret = crypto_unregister_alg(&algs[i]); 466 if (ret) 467 pr_err("Failed to unregister %s %s: %d\n", 468 algs[i].cra_driver_name, algs[i].cra_name, ret); 469 } 470 471 return 0; 472 } 473 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 474 475 int crypto_register_template(struct crypto_template *tmpl) 476 { 477 struct crypto_template *q; 478 int err = -EEXIST; 479 480 down_write(&crypto_alg_sem); 481 482 crypto_check_module_sig(tmpl->module); 483 484 list_for_each_entry(q, &crypto_template_list, list) { 485 if (q == tmpl) 486 goto out; 487 } 488 489 list_add(&tmpl->list, &crypto_template_list); 490 err = 0; 491 out: 492 up_write(&crypto_alg_sem); 493 return err; 494 } 495 EXPORT_SYMBOL_GPL(crypto_register_template); 496 497 void crypto_unregister_template(struct crypto_template *tmpl) 498 { 499 struct crypto_instance *inst; 500 struct hlist_node *n; 501 struct hlist_head *list; 502 LIST_HEAD(users); 503 504 down_write(&crypto_alg_sem); 505 506 BUG_ON(list_empty(&tmpl->list)); 507 list_del_init(&tmpl->list); 508 509 list = &tmpl->instances; 510 hlist_for_each_entry(inst, list, list) { 511 int err = crypto_remove_alg(&inst->alg, &users); 512 513 BUG_ON(err); 514 } 515 516 up_write(&crypto_alg_sem); 517 518 hlist_for_each_entry_safe(inst, n, list, list) { 519 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1); 520 crypto_free_instance(inst); 521 } 522 crypto_remove_final(&users); 523 } 524 EXPORT_SYMBOL_GPL(crypto_unregister_template); 525 526 static struct crypto_template *__crypto_lookup_template(const char *name) 527 { 528 struct crypto_template *q, *tmpl = NULL; 529 530 down_read(&crypto_alg_sem); 531 list_for_each_entry(q, &crypto_template_list, list) { 532 if (strcmp(q->name, name)) 533 continue; 534 if (unlikely(!crypto_tmpl_get(q))) 535 continue; 536 537 tmpl = q; 538 break; 539 } 540 up_read(&crypto_alg_sem); 541 542 return tmpl; 543 } 544 545 struct crypto_template *crypto_lookup_template(const char *name) 546 { 547 return try_then_request_module(__crypto_lookup_template(name), 548 "crypto-%s", name); 549 } 550 EXPORT_SYMBOL_GPL(crypto_lookup_template); 551 552 int crypto_register_instance(struct crypto_template *tmpl, 553 struct crypto_instance *inst) 554 { 555 struct crypto_larval *larval; 556 int err; 557 558 err = crypto_check_alg(&inst->alg); 559 if (err) 560 return err; 561 562 inst->alg.cra_module = tmpl->module; 563 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 564 565 down_write(&crypto_alg_sem); 566 567 larval = __crypto_register_alg(&inst->alg); 568 if (IS_ERR(larval)) 569 goto unlock; 570 571 hlist_add_head(&inst->list, &tmpl->instances); 572 inst->tmpl = tmpl; 573 574 unlock: 575 up_write(&crypto_alg_sem); 576 577 err = PTR_ERR(larval); 578 if (IS_ERR(larval)) 579 goto err; 580 581 crypto_wait_for_test(larval); 582 err = 0; 583 584 err: 585 return err; 586 } 587 EXPORT_SYMBOL_GPL(crypto_register_instance); 588 589 int crypto_unregister_instance(struct crypto_instance *inst) 590 { 591 LIST_HEAD(list); 592 593 down_write(&crypto_alg_sem); 594 595 crypto_remove_spawns(&inst->alg, &list, NULL); 596 crypto_remove_instance(inst, &list); 597 598 up_write(&crypto_alg_sem); 599 600 crypto_remove_final(&list); 601 602 return 0; 603 } 604 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 605 606 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 607 struct crypto_instance *inst, u32 mask) 608 { 609 int err = -EAGAIN; 610 611 spawn->inst = inst; 612 spawn->mask = mask; 613 614 down_write(&crypto_alg_sem); 615 if (!crypto_is_moribund(alg)) { 616 list_add(&spawn->list, &alg->cra_users); 617 spawn->alg = alg; 618 err = 0; 619 } 620 up_write(&crypto_alg_sem); 621 622 return err; 623 } 624 EXPORT_SYMBOL_GPL(crypto_init_spawn); 625 626 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, 627 struct crypto_instance *inst, 628 const struct crypto_type *frontend) 629 { 630 int err = -EINVAL; 631 632 if ((alg->cra_flags ^ frontend->type) & frontend->maskset) 633 goto out; 634 635 spawn->frontend = frontend; 636 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); 637 638 out: 639 return err; 640 } 641 EXPORT_SYMBOL_GPL(crypto_init_spawn2); 642 643 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name, 644 u32 type, u32 mask) 645 { 646 struct crypto_alg *alg; 647 int err; 648 649 alg = crypto_find_alg(name, spawn->frontend, type, mask); 650 if (IS_ERR(alg)) 651 return PTR_ERR(alg); 652 653 err = crypto_init_spawn(spawn, alg, spawn->inst, mask); 654 crypto_mod_put(alg); 655 return err; 656 } 657 EXPORT_SYMBOL_GPL(crypto_grab_spawn); 658 659 void crypto_drop_spawn(struct crypto_spawn *spawn) 660 { 661 if (!spawn->alg) 662 return; 663 664 down_write(&crypto_alg_sem); 665 list_del(&spawn->list); 666 up_write(&crypto_alg_sem); 667 } 668 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 669 670 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 671 { 672 struct crypto_alg *alg; 673 struct crypto_alg *alg2; 674 675 down_read(&crypto_alg_sem); 676 alg = spawn->alg; 677 alg2 = alg; 678 if (alg2) 679 alg2 = crypto_mod_get(alg2); 680 up_read(&crypto_alg_sem); 681 682 if (!alg2) { 683 if (alg) 684 crypto_shoot_alg(alg); 685 return ERR_PTR(-EAGAIN); 686 } 687 688 return alg; 689 } 690 691 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 692 u32 mask) 693 { 694 struct crypto_alg *alg; 695 struct crypto_tfm *tfm; 696 697 alg = crypto_spawn_alg(spawn); 698 if (IS_ERR(alg)) 699 return ERR_CAST(alg); 700 701 tfm = ERR_PTR(-EINVAL); 702 if (unlikely((alg->cra_flags ^ type) & mask)) 703 goto out_put_alg; 704 705 tfm = __crypto_alloc_tfm(alg, type, mask); 706 if (IS_ERR(tfm)) 707 goto out_put_alg; 708 709 return tfm; 710 711 out_put_alg: 712 crypto_mod_put(alg); 713 return tfm; 714 } 715 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 716 717 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 718 { 719 struct crypto_alg *alg; 720 struct crypto_tfm *tfm; 721 722 alg = crypto_spawn_alg(spawn); 723 if (IS_ERR(alg)) 724 return ERR_CAST(alg); 725 726 tfm = crypto_create_tfm(alg, spawn->frontend); 727 if (IS_ERR(tfm)) 728 goto out_put_alg; 729 730 return tfm; 731 732 out_put_alg: 733 crypto_mod_put(alg); 734 return tfm; 735 } 736 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 737 738 int crypto_register_notifier(struct notifier_block *nb) 739 { 740 return blocking_notifier_chain_register(&crypto_chain, nb); 741 } 742 EXPORT_SYMBOL_GPL(crypto_register_notifier); 743 744 int crypto_unregister_notifier(struct notifier_block *nb) 745 { 746 return blocking_notifier_chain_unregister(&crypto_chain, nb); 747 } 748 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 749 750 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 751 { 752 struct rtattr *rta = tb[0]; 753 struct crypto_attr_type *algt; 754 755 if (!rta) 756 return ERR_PTR(-ENOENT); 757 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 758 return ERR_PTR(-EINVAL); 759 if (rta->rta_type != CRYPTOA_TYPE) 760 return ERR_PTR(-EINVAL); 761 762 algt = RTA_DATA(rta); 763 764 return algt; 765 } 766 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 767 768 int crypto_check_attr_type(struct rtattr **tb, u32 type) 769 { 770 struct crypto_attr_type *algt; 771 772 algt = crypto_get_attr_type(tb); 773 if (IS_ERR(algt)) 774 return PTR_ERR(algt); 775 776 if ((algt->type ^ type) & algt->mask) 777 return -EINVAL; 778 779 return 0; 780 } 781 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 782 783 const char *crypto_attr_alg_name(struct rtattr *rta) 784 { 785 struct crypto_attr_alg *alga; 786 787 if (!rta) 788 return ERR_PTR(-ENOENT); 789 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 790 return ERR_PTR(-EINVAL); 791 if (rta->rta_type != CRYPTOA_ALG) 792 return ERR_PTR(-EINVAL); 793 794 alga = RTA_DATA(rta); 795 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 796 797 return alga->name; 798 } 799 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 800 801 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, 802 const struct crypto_type *frontend, 803 u32 type, u32 mask) 804 { 805 const char *name; 806 807 name = crypto_attr_alg_name(rta); 808 if (IS_ERR(name)) 809 return ERR_CAST(name); 810 811 return crypto_find_alg(name, frontend, type, mask); 812 } 813 EXPORT_SYMBOL_GPL(crypto_attr_alg2); 814 815 int crypto_attr_u32(struct rtattr *rta, u32 *num) 816 { 817 struct crypto_attr_u32 *nu32; 818 819 if (!rta) 820 return -ENOENT; 821 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 822 return -EINVAL; 823 if (rta->rta_type != CRYPTOA_U32) 824 return -EINVAL; 825 826 nu32 = RTA_DATA(rta); 827 *num = nu32->num; 828 829 return 0; 830 } 831 EXPORT_SYMBOL_GPL(crypto_attr_u32); 832 833 int crypto_inst_setname(struct crypto_instance *inst, const char *name, 834 struct crypto_alg *alg) 835 { 836 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 837 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 838 return -ENAMETOOLONG; 839 840 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 841 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 842 return -ENAMETOOLONG; 843 844 return 0; 845 } 846 EXPORT_SYMBOL_GPL(crypto_inst_setname); 847 848 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, 849 unsigned int head) 850 { 851 struct crypto_instance *inst; 852 char *p; 853 int err; 854 855 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), 856 GFP_KERNEL); 857 if (!p) 858 return ERR_PTR(-ENOMEM); 859 860 inst = (void *)(p + head); 861 862 err = crypto_inst_setname(inst, name, alg); 863 if (err) 864 goto err_free_inst; 865 866 return p; 867 868 err_free_inst: 869 kfree(p); 870 return ERR_PTR(err); 871 } 872 EXPORT_SYMBOL_GPL(crypto_alloc_instance2); 873 874 struct crypto_instance *crypto_alloc_instance(const char *name, 875 struct crypto_alg *alg) 876 { 877 struct crypto_instance *inst; 878 struct crypto_spawn *spawn; 879 int err; 880 881 inst = crypto_alloc_instance2(name, alg, 0); 882 if (IS_ERR(inst)) 883 goto out; 884 885 spawn = crypto_instance_ctx(inst); 886 err = crypto_init_spawn(spawn, alg, inst, 887 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 888 889 if (err) 890 goto err_free_inst; 891 892 return inst; 893 894 err_free_inst: 895 kfree(inst); 896 inst = ERR_PTR(err); 897 898 out: 899 return inst; 900 } 901 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 902 903 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 904 { 905 INIT_LIST_HEAD(&queue->list); 906 queue->backlog = &queue->list; 907 queue->qlen = 0; 908 queue->max_qlen = max_qlen; 909 } 910 EXPORT_SYMBOL_GPL(crypto_init_queue); 911 912 int crypto_enqueue_request(struct crypto_queue *queue, 913 struct crypto_async_request *request) 914 { 915 int err = -EINPROGRESS; 916 917 if (unlikely(queue->qlen >= queue->max_qlen)) { 918 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) { 919 err = -ENOSPC; 920 goto out; 921 } 922 err = -EBUSY; 923 if (queue->backlog == &queue->list) 924 queue->backlog = &request->list; 925 } 926 927 queue->qlen++; 928 list_add_tail(&request->list, &queue->list); 929 930 out: 931 return err; 932 } 933 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 934 935 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 936 { 937 struct list_head *request; 938 939 if (unlikely(!queue->qlen)) 940 return NULL; 941 942 queue->qlen--; 943 944 if (queue->backlog != &queue->list) 945 queue->backlog = queue->backlog->next; 946 947 request = queue->list.next; 948 list_del(request); 949 950 return list_entry(request, struct crypto_async_request, list); 951 } 952 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 953 954 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 955 { 956 struct crypto_async_request *req; 957 958 list_for_each_entry(req, &queue->list, list) { 959 if (req->tfm == tfm) 960 return 1; 961 } 962 963 return 0; 964 } 965 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 966 967 static inline void crypto_inc_byte(u8 *a, unsigned int size) 968 { 969 u8 *b = (a + size); 970 u8 c; 971 972 for (; size; size--) { 973 c = *--b + 1; 974 *b = c; 975 if (c) 976 break; 977 } 978 } 979 980 void crypto_inc(u8 *a, unsigned int size) 981 { 982 __be32 *b = (__be32 *)(a + size); 983 u32 c; 984 985 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 986 IS_ALIGNED((unsigned long)b, __alignof__(*b))) 987 for (; size >= 4; size -= 4) { 988 c = be32_to_cpu(*--b) + 1; 989 *b = cpu_to_be32(c); 990 if (likely(c)) 991 return; 992 } 993 994 crypto_inc_byte(a, size); 995 } 996 EXPORT_SYMBOL_GPL(crypto_inc); 997 998 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len) 999 { 1000 int relalign = 0; 1001 1002 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { 1003 int size = sizeof(unsigned long); 1004 int d = (((unsigned long)dst ^ (unsigned long)src1) | 1005 ((unsigned long)dst ^ (unsigned long)src2)) & 1006 (size - 1); 1007 1008 relalign = d ? 1 << __ffs(d) : size; 1009 1010 /* 1011 * If we care about alignment, process as many bytes as 1012 * needed to advance dst and src to values whose alignments 1013 * equal their relative alignment. This will allow us to 1014 * process the remainder of the input using optimal strides. 1015 */ 1016 while (((unsigned long)dst & (relalign - 1)) && len > 0) { 1017 *dst++ = *src1++ ^ *src2++; 1018 len--; 1019 } 1020 } 1021 1022 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) { 1023 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2; 1024 dst += 8; 1025 src1 += 8; 1026 src2 += 8; 1027 len -= 8; 1028 } 1029 1030 while (len >= 4 && !(relalign & 3)) { 1031 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2; 1032 dst += 4; 1033 src1 += 4; 1034 src2 += 4; 1035 len -= 4; 1036 } 1037 1038 while (len >= 2 && !(relalign & 1)) { 1039 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2; 1040 dst += 2; 1041 src1 += 2; 1042 src2 += 2; 1043 len -= 2; 1044 } 1045 1046 while (len--) 1047 *dst++ = *src1++ ^ *src2++; 1048 } 1049 EXPORT_SYMBOL_GPL(__crypto_xor); 1050 1051 unsigned int crypto_alg_extsize(struct crypto_alg *alg) 1052 { 1053 return alg->cra_ctxsize + 1054 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 1055 } 1056 EXPORT_SYMBOL_GPL(crypto_alg_extsize); 1057 1058 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend, 1059 u32 type, u32 mask) 1060 { 1061 int ret = 0; 1062 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask); 1063 1064 if (!IS_ERR(alg)) { 1065 crypto_mod_put(alg); 1066 ret = 1; 1067 } 1068 1069 return ret; 1070 } 1071 EXPORT_SYMBOL_GPL(crypto_type_has_alg); 1072 1073 #ifdef CONFIG_CRYPTO_STATS 1074 void crypto_stats_init(struct crypto_alg *alg) 1075 { 1076 memset(&alg->stats, 0, sizeof(alg->stats)); 1077 } 1078 EXPORT_SYMBOL_GPL(crypto_stats_init); 1079 1080 void crypto_stats_get(struct crypto_alg *alg) 1081 { 1082 crypto_alg_get(alg); 1083 } 1084 EXPORT_SYMBOL_GPL(crypto_stats_get); 1085 1086 void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret, 1087 struct crypto_alg *alg) 1088 { 1089 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1090 atomic64_inc(&alg->stats.cipher.err_cnt); 1091 } else { 1092 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1093 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen); 1094 } 1095 crypto_alg_put(alg); 1096 } 1097 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt); 1098 1099 void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret, 1100 struct crypto_alg *alg) 1101 { 1102 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1103 atomic64_inc(&alg->stats.cipher.err_cnt); 1104 } else { 1105 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1106 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen); 1107 } 1108 crypto_alg_put(alg); 1109 } 1110 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt); 1111 1112 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, 1113 int ret) 1114 { 1115 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1116 atomic64_inc(&alg->stats.aead.err_cnt); 1117 } else { 1118 atomic64_inc(&alg->stats.aead.encrypt_cnt); 1119 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen); 1120 } 1121 crypto_alg_put(alg); 1122 } 1123 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt); 1124 1125 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, 1126 int ret) 1127 { 1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1129 atomic64_inc(&alg->stats.aead.err_cnt); 1130 } else { 1131 atomic64_inc(&alg->stats.aead.decrypt_cnt); 1132 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen); 1133 } 1134 crypto_alg_put(alg); 1135 } 1136 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt); 1137 1138 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, 1139 struct crypto_alg *alg) 1140 { 1141 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1142 atomic64_inc(&alg->stats.akcipher.err_cnt); 1143 } else { 1144 atomic64_inc(&alg->stats.akcipher.encrypt_cnt); 1145 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen); 1146 } 1147 crypto_alg_put(alg); 1148 } 1149 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt); 1150 1151 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, 1152 struct crypto_alg *alg) 1153 { 1154 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1155 atomic64_inc(&alg->stats.akcipher.err_cnt); 1156 } else { 1157 atomic64_inc(&alg->stats.akcipher.decrypt_cnt); 1158 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen); 1159 } 1160 crypto_alg_put(alg); 1161 } 1162 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt); 1163 1164 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg) 1165 { 1166 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1167 atomic64_inc(&alg->stats.akcipher.err_cnt); 1168 else 1169 atomic64_inc(&alg->stats.akcipher.sign_cnt); 1170 crypto_alg_put(alg); 1171 } 1172 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign); 1173 1174 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg) 1175 { 1176 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1177 atomic64_inc(&alg->stats.akcipher.err_cnt); 1178 else 1179 atomic64_inc(&alg->stats.akcipher.verify_cnt); 1180 crypto_alg_put(alg); 1181 } 1182 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify); 1183 1184 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg) 1185 { 1186 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1187 atomic64_inc(&alg->stats.compress.err_cnt); 1188 } else { 1189 atomic64_inc(&alg->stats.compress.compress_cnt); 1190 atomic64_add(slen, &alg->stats.compress.compress_tlen); 1191 } 1192 crypto_alg_put(alg); 1193 } 1194 EXPORT_SYMBOL_GPL(crypto_stats_compress); 1195 1196 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg) 1197 { 1198 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1199 atomic64_inc(&alg->stats.compress.err_cnt); 1200 } else { 1201 atomic64_inc(&alg->stats.compress.decompress_cnt); 1202 atomic64_add(slen, &alg->stats.compress.decompress_tlen); 1203 } 1204 crypto_alg_put(alg); 1205 } 1206 EXPORT_SYMBOL_GPL(crypto_stats_decompress); 1207 1208 void crypto_stats_ahash_update(unsigned int nbytes, int ret, 1209 struct crypto_alg *alg) 1210 { 1211 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1212 atomic64_inc(&alg->stats.hash.err_cnt); 1213 else 1214 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1215 crypto_alg_put(alg); 1216 } 1217 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update); 1218 1219 void crypto_stats_ahash_final(unsigned int nbytes, int ret, 1220 struct crypto_alg *alg) 1221 { 1222 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1223 atomic64_inc(&alg->stats.hash.err_cnt); 1224 } else { 1225 atomic64_inc(&alg->stats.hash.hash_cnt); 1226 atomic64_add(nbytes, &alg->stats.hash.hash_tlen); 1227 } 1228 crypto_alg_put(alg); 1229 } 1230 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final); 1231 1232 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret) 1233 { 1234 if (ret) 1235 atomic64_inc(&alg->stats.kpp.err_cnt); 1236 else 1237 atomic64_inc(&alg->stats.kpp.setsecret_cnt); 1238 crypto_alg_put(alg); 1239 } 1240 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret); 1241 1242 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret) 1243 { 1244 if (ret) 1245 atomic64_inc(&alg->stats.kpp.err_cnt); 1246 else 1247 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt); 1248 crypto_alg_put(alg); 1249 } 1250 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key); 1251 1252 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret) 1253 { 1254 if (ret) 1255 atomic64_inc(&alg->stats.kpp.err_cnt); 1256 else 1257 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt); 1258 crypto_alg_put(alg); 1259 } 1260 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret); 1261 1262 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret) 1263 { 1264 if (ret && ret != -EINPROGRESS && ret != -EBUSY) 1265 atomic64_inc(&alg->stats.rng.err_cnt); 1266 else 1267 atomic64_inc(&alg->stats.rng.seed_cnt); 1268 crypto_alg_put(alg); 1269 } 1270 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed); 1271 1272 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, 1273 int ret) 1274 { 1275 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1276 atomic64_inc(&alg->stats.rng.err_cnt); 1277 } else { 1278 atomic64_inc(&alg->stats.rng.generate_cnt); 1279 atomic64_add(dlen, &alg->stats.rng.generate_tlen); 1280 } 1281 crypto_alg_put(alg); 1282 } 1283 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate); 1284 1285 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, 1286 struct crypto_alg *alg) 1287 { 1288 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1289 atomic64_inc(&alg->stats.cipher.err_cnt); 1290 } else { 1291 atomic64_inc(&alg->stats.cipher.encrypt_cnt); 1292 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); 1293 } 1294 crypto_alg_put(alg); 1295 } 1296 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt); 1297 1298 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, 1299 struct crypto_alg *alg) 1300 { 1301 if (ret && ret != -EINPROGRESS && ret != -EBUSY) { 1302 atomic64_inc(&alg->stats.cipher.err_cnt); 1303 } else { 1304 atomic64_inc(&alg->stats.cipher.decrypt_cnt); 1305 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); 1306 } 1307 crypto_alg_put(alg); 1308 } 1309 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt); 1310 #endif 1311 1312 static int __init crypto_algapi_init(void) 1313 { 1314 crypto_init_proc(); 1315 return 0; 1316 } 1317 1318 static void __exit crypto_algapi_exit(void) 1319 { 1320 crypto_exit_proc(); 1321 } 1322 1323 module_init(crypto_algapi_init); 1324 module_exit(crypto_algapi_exit); 1325 1326 MODULE_LICENSE("GPL"); 1327 MODULE_DESCRIPTION("Cryptographic algorithms API"); 1328