1 /* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <linux/err.h> 14 #include <linux/errno.h> 15 #include <linux/init.h> 16 #include <linux/kernel.h> 17 #include <linux/list.h> 18 #include <linux/module.h> 19 #include <linux/rtnetlink.h> 20 #include <linux/slab.h> 21 #include <linux/string.h> 22 23 #include "internal.h" 24 25 static LIST_HEAD(crypto_template_list); 26 27 static inline int crypto_set_driver_name(struct crypto_alg *alg) 28 { 29 static const char suffix[] = "-generic"; 30 char *driver_name = alg->cra_driver_name; 31 int len; 32 33 if (*driver_name) 34 return 0; 35 36 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 37 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME) 38 return -ENAMETOOLONG; 39 40 memcpy(driver_name + len, suffix, sizeof(suffix)); 41 return 0; 42 } 43 44 static int crypto_check_alg(struct crypto_alg *alg) 45 { 46 if (alg->cra_alignmask & (alg->cra_alignmask + 1)) 47 return -EINVAL; 48 49 if (alg->cra_blocksize > PAGE_SIZE / 8) 50 return -EINVAL; 51 52 if (alg->cra_priority < 0) 53 return -EINVAL; 54 55 return crypto_set_driver_name(alg); 56 } 57 58 static void crypto_destroy_instance(struct crypto_alg *alg) 59 { 60 struct crypto_instance *inst = (void *)alg; 61 struct crypto_template *tmpl = inst->tmpl; 62 63 tmpl->free(inst); 64 crypto_tmpl_put(tmpl); 65 } 66 67 static struct list_head *crypto_more_spawns(struct crypto_alg *alg, 68 struct list_head *stack, 69 struct list_head *top, 70 struct list_head *secondary_spawns) 71 { 72 struct crypto_spawn *spawn, *n; 73 74 if (list_empty(stack)) 75 return NULL; 76 77 spawn = list_first_entry(stack, struct crypto_spawn, list); 78 n = list_entry(spawn->list.next, struct crypto_spawn, list); 79 80 if (spawn->alg && &n->list != stack && !n->alg) 81 n->alg = (n->list.next == stack) ? alg : 82 &list_entry(n->list.next, struct crypto_spawn, 83 list)->inst->alg; 84 85 list_move(&spawn->list, secondary_spawns); 86 87 return &n->list == stack ? top : &n->inst->alg.cra_users; 88 } 89 90 static void crypto_remove_spawn(struct crypto_spawn *spawn, 91 struct list_head *list) 92 { 93 struct crypto_instance *inst = spawn->inst; 94 struct crypto_template *tmpl = inst->tmpl; 95 96 if (crypto_is_dead(&inst->alg)) 97 return; 98 99 inst->alg.cra_flags |= CRYPTO_ALG_DEAD; 100 if (hlist_unhashed(&inst->list)) 101 return; 102 103 if (!tmpl || !crypto_tmpl_get(tmpl)) 104 return; 105 106 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg); 107 list_move(&inst->alg.cra_list, list); 108 hlist_del(&inst->list); 109 inst->alg.cra_destroy = crypto_destroy_instance; 110 111 BUG_ON(!list_empty(&inst->alg.cra_users)); 112 } 113 114 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list, 115 struct crypto_alg *nalg) 116 { 117 u32 new_type = (nalg ?: alg)->cra_flags; 118 struct crypto_spawn *spawn, *n; 119 LIST_HEAD(secondary_spawns); 120 struct list_head *spawns; 121 LIST_HEAD(stack); 122 LIST_HEAD(top); 123 124 spawns = &alg->cra_users; 125 list_for_each_entry_safe(spawn, n, spawns, list) { 126 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask) 127 continue; 128 129 list_move(&spawn->list, &top); 130 } 131 132 spawns = ⊤ 133 do { 134 while (!list_empty(spawns)) { 135 struct crypto_instance *inst; 136 137 spawn = list_first_entry(spawns, struct crypto_spawn, 138 list); 139 inst = spawn->inst; 140 141 BUG_ON(&inst->alg == alg); 142 143 list_move(&spawn->list, &stack); 144 145 if (&inst->alg == nalg) 146 break; 147 148 spawn->alg = NULL; 149 spawns = &inst->alg.cra_users; 150 } 151 } while ((spawns = crypto_more_spawns(alg, &stack, &top, 152 &secondary_spawns))); 153 154 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) { 155 if (spawn->alg) 156 list_move(&spawn->list, &spawn->alg->cra_users); 157 else 158 crypto_remove_spawn(spawn, list); 159 } 160 } 161 EXPORT_SYMBOL_GPL(crypto_remove_spawns); 162 163 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg) 164 { 165 struct crypto_alg *q; 166 struct crypto_larval *larval; 167 int ret = -EAGAIN; 168 169 if (crypto_is_dead(alg)) 170 goto err; 171 172 INIT_LIST_HEAD(&alg->cra_users); 173 174 /* No cheating! */ 175 alg->cra_flags &= ~CRYPTO_ALG_TESTED; 176 177 ret = -EEXIST; 178 179 atomic_set(&alg->cra_refcnt, 1); 180 list_for_each_entry(q, &crypto_alg_list, cra_list) { 181 if (q == alg) 182 goto err; 183 184 if (crypto_is_moribund(q)) 185 continue; 186 187 if (crypto_is_larval(q)) { 188 if (!strcmp(alg->cra_driver_name, q->cra_driver_name)) 189 goto err; 190 continue; 191 } 192 193 if (!strcmp(q->cra_driver_name, alg->cra_name) || 194 !strcmp(q->cra_name, alg->cra_driver_name)) 195 goto err; 196 } 197 198 larval = crypto_larval_alloc(alg->cra_name, 199 alg->cra_flags | CRYPTO_ALG_TESTED, 0); 200 if (IS_ERR(larval)) 201 goto out; 202 203 ret = -ENOENT; 204 larval->adult = crypto_mod_get(alg); 205 if (!larval->adult) 206 goto free_larval; 207 208 atomic_set(&larval->alg.cra_refcnt, 1); 209 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name, 210 CRYPTO_MAX_ALG_NAME); 211 larval->alg.cra_priority = alg->cra_priority; 212 213 list_add(&alg->cra_list, &crypto_alg_list); 214 list_add(&larval->alg.cra_list, &crypto_alg_list); 215 216 out: 217 return larval; 218 219 free_larval: 220 kfree(larval); 221 err: 222 larval = ERR_PTR(ret); 223 goto out; 224 } 225 226 void crypto_alg_tested(const char *name, int err) 227 { 228 struct crypto_larval *test; 229 struct crypto_alg *alg; 230 struct crypto_alg *q; 231 LIST_HEAD(list); 232 233 down_write(&crypto_alg_sem); 234 list_for_each_entry(q, &crypto_alg_list, cra_list) { 235 if (crypto_is_moribund(q) || !crypto_is_larval(q)) 236 continue; 237 238 test = (struct crypto_larval *)q; 239 240 if (!strcmp(q->cra_driver_name, name)) 241 goto found; 242 } 243 244 printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err); 245 goto unlock; 246 247 found: 248 q->cra_flags |= CRYPTO_ALG_DEAD; 249 alg = test->adult; 250 if (err || list_empty(&alg->cra_list)) 251 goto complete; 252 253 alg->cra_flags |= CRYPTO_ALG_TESTED; 254 255 list_for_each_entry(q, &crypto_alg_list, cra_list) { 256 if (q == alg) 257 continue; 258 259 if (crypto_is_moribund(q)) 260 continue; 261 262 if (crypto_is_larval(q)) { 263 struct crypto_larval *larval = (void *)q; 264 265 /* 266 * Check to see if either our generic name or 267 * specific name can satisfy the name requested 268 * by the larval entry q. 269 */ 270 if (strcmp(alg->cra_name, q->cra_name) && 271 strcmp(alg->cra_driver_name, q->cra_name)) 272 continue; 273 274 if (larval->adult) 275 continue; 276 if ((q->cra_flags ^ alg->cra_flags) & larval->mask) 277 continue; 278 if (!crypto_mod_get(alg)) 279 continue; 280 281 larval->adult = alg; 282 continue; 283 } 284 285 if (strcmp(alg->cra_name, q->cra_name)) 286 continue; 287 288 if (strcmp(alg->cra_driver_name, q->cra_driver_name) && 289 q->cra_priority > alg->cra_priority) 290 continue; 291 292 crypto_remove_spawns(q, &list, alg); 293 } 294 295 complete: 296 complete_all(&test->completion); 297 298 unlock: 299 up_write(&crypto_alg_sem); 300 301 crypto_remove_final(&list); 302 } 303 EXPORT_SYMBOL_GPL(crypto_alg_tested); 304 305 void crypto_remove_final(struct list_head *list) 306 { 307 struct crypto_alg *alg; 308 struct crypto_alg *n; 309 310 list_for_each_entry_safe(alg, n, list, cra_list) { 311 list_del_init(&alg->cra_list); 312 crypto_alg_put(alg); 313 } 314 } 315 EXPORT_SYMBOL_GPL(crypto_remove_final); 316 317 static void crypto_wait_for_test(struct crypto_larval *larval) 318 { 319 int err; 320 321 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult); 322 if (err != NOTIFY_STOP) { 323 if (WARN_ON(err != NOTIFY_DONE)) 324 goto out; 325 crypto_alg_tested(larval->alg.cra_driver_name, 0); 326 } 327 328 err = wait_for_completion_interruptible(&larval->completion); 329 WARN_ON(err); 330 331 out: 332 crypto_larval_kill(&larval->alg); 333 } 334 335 int crypto_register_alg(struct crypto_alg *alg) 336 { 337 struct crypto_larval *larval; 338 int err; 339 340 err = crypto_check_alg(alg); 341 if (err) 342 return err; 343 344 down_write(&crypto_alg_sem); 345 larval = __crypto_register_alg(alg); 346 up_write(&crypto_alg_sem); 347 348 if (IS_ERR(larval)) 349 return PTR_ERR(larval); 350 351 crypto_wait_for_test(larval); 352 return 0; 353 } 354 EXPORT_SYMBOL_GPL(crypto_register_alg); 355 356 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list) 357 { 358 if (unlikely(list_empty(&alg->cra_list))) 359 return -ENOENT; 360 361 alg->cra_flags |= CRYPTO_ALG_DEAD; 362 363 crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg); 364 list_del_init(&alg->cra_list); 365 crypto_remove_spawns(alg, list, NULL); 366 367 return 0; 368 } 369 370 int crypto_unregister_alg(struct crypto_alg *alg) 371 { 372 int ret; 373 LIST_HEAD(list); 374 375 down_write(&crypto_alg_sem); 376 ret = crypto_remove_alg(alg, &list); 377 up_write(&crypto_alg_sem); 378 379 if (ret) 380 return ret; 381 382 BUG_ON(atomic_read(&alg->cra_refcnt) != 1); 383 if (alg->cra_destroy) 384 alg->cra_destroy(alg); 385 386 crypto_remove_final(&list); 387 return 0; 388 } 389 EXPORT_SYMBOL_GPL(crypto_unregister_alg); 390 391 int crypto_register_algs(struct crypto_alg *algs, int count) 392 { 393 int i, ret; 394 395 for (i = 0; i < count; i++) { 396 ret = crypto_register_alg(&algs[i]); 397 if (ret) 398 goto err; 399 } 400 401 return 0; 402 403 err: 404 for (--i; i >= 0; --i) 405 crypto_unregister_alg(&algs[i]); 406 407 return ret; 408 } 409 EXPORT_SYMBOL_GPL(crypto_register_algs); 410 411 int crypto_unregister_algs(struct crypto_alg *algs, int count) 412 { 413 int i, ret; 414 415 for (i = 0; i < count; i++) { 416 ret = crypto_unregister_alg(&algs[i]); 417 if (ret) 418 pr_err("Failed to unregister %s %s: %d\n", 419 algs[i].cra_driver_name, algs[i].cra_name, ret); 420 } 421 422 return 0; 423 } 424 EXPORT_SYMBOL_GPL(crypto_unregister_algs); 425 426 int crypto_register_template(struct crypto_template *tmpl) 427 { 428 struct crypto_template *q; 429 int err = -EEXIST; 430 431 down_write(&crypto_alg_sem); 432 433 list_for_each_entry(q, &crypto_template_list, list) { 434 if (q == tmpl) 435 goto out; 436 } 437 438 list_add(&tmpl->list, &crypto_template_list); 439 crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl); 440 err = 0; 441 out: 442 up_write(&crypto_alg_sem); 443 return err; 444 } 445 EXPORT_SYMBOL_GPL(crypto_register_template); 446 447 void crypto_unregister_template(struct crypto_template *tmpl) 448 { 449 struct crypto_instance *inst; 450 struct hlist_node *p, *n; 451 struct hlist_head *list; 452 LIST_HEAD(users); 453 454 down_write(&crypto_alg_sem); 455 456 BUG_ON(list_empty(&tmpl->list)); 457 list_del_init(&tmpl->list); 458 459 list = &tmpl->instances; 460 hlist_for_each_entry(inst, p, list, list) { 461 int err = crypto_remove_alg(&inst->alg, &users); 462 BUG_ON(err); 463 } 464 465 crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl); 466 467 up_write(&crypto_alg_sem); 468 469 hlist_for_each_entry_safe(inst, p, n, list, list) { 470 BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1); 471 tmpl->free(inst); 472 } 473 crypto_remove_final(&users); 474 } 475 EXPORT_SYMBOL_GPL(crypto_unregister_template); 476 477 static struct crypto_template *__crypto_lookup_template(const char *name) 478 { 479 struct crypto_template *q, *tmpl = NULL; 480 481 down_read(&crypto_alg_sem); 482 list_for_each_entry(q, &crypto_template_list, list) { 483 if (strcmp(q->name, name)) 484 continue; 485 if (unlikely(!crypto_tmpl_get(q))) 486 continue; 487 488 tmpl = q; 489 break; 490 } 491 up_read(&crypto_alg_sem); 492 493 return tmpl; 494 } 495 496 struct crypto_template *crypto_lookup_template(const char *name) 497 { 498 return try_then_request_module(__crypto_lookup_template(name), name); 499 } 500 EXPORT_SYMBOL_GPL(crypto_lookup_template); 501 502 int crypto_register_instance(struct crypto_template *tmpl, 503 struct crypto_instance *inst) 504 { 505 struct crypto_larval *larval; 506 int err; 507 508 err = crypto_check_alg(&inst->alg); 509 if (err) 510 goto err; 511 512 inst->alg.cra_module = tmpl->module; 513 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE; 514 515 down_write(&crypto_alg_sem); 516 517 larval = __crypto_register_alg(&inst->alg); 518 if (IS_ERR(larval)) 519 goto unlock; 520 521 hlist_add_head(&inst->list, &tmpl->instances); 522 inst->tmpl = tmpl; 523 524 unlock: 525 up_write(&crypto_alg_sem); 526 527 err = PTR_ERR(larval); 528 if (IS_ERR(larval)) 529 goto err; 530 531 crypto_wait_for_test(larval); 532 err = 0; 533 534 err: 535 return err; 536 } 537 EXPORT_SYMBOL_GPL(crypto_register_instance); 538 539 int crypto_unregister_instance(struct crypto_alg *alg) 540 { 541 int err; 542 struct crypto_instance *inst = (void *)alg; 543 struct crypto_template *tmpl = inst->tmpl; 544 LIST_HEAD(users); 545 546 if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE)) 547 return -EINVAL; 548 549 BUG_ON(atomic_read(&alg->cra_refcnt) != 1); 550 551 down_write(&crypto_alg_sem); 552 553 hlist_del_init(&inst->list); 554 err = crypto_remove_alg(alg, &users); 555 556 up_write(&crypto_alg_sem); 557 558 if (err) 559 return err; 560 561 tmpl->free(inst); 562 crypto_remove_final(&users); 563 564 return 0; 565 } 566 EXPORT_SYMBOL_GPL(crypto_unregister_instance); 567 568 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 569 struct crypto_instance *inst, u32 mask) 570 { 571 int err = -EAGAIN; 572 573 spawn->inst = inst; 574 spawn->mask = mask; 575 576 down_write(&crypto_alg_sem); 577 if (!crypto_is_moribund(alg)) { 578 list_add(&spawn->list, &alg->cra_users); 579 spawn->alg = alg; 580 err = 0; 581 } 582 up_write(&crypto_alg_sem); 583 584 return err; 585 } 586 EXPORT_SYMBOL_GPL(crypto_init_spawn); 587 588 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, 589 struct crypto_instance *inst, 590 const struct crypto_type *frontend) 591 { 592 int err = -EINVAL; 593 594 if ((alg->cra_flags ^ frontend->type) & frontend->maskset) 595 goto out; 596 597 spawn->frontend = frontend; 598 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset); 599 600 out: 601 return err; 602 } 603 EXPORT_SYMBOL_GPL(crypto_init_spawn2); 604 605 void crypto_drop_spawn(struct crypto_spawn *spawn) 606 { 607 if (!spawn->alg) 608 return; 609 610 down_write(&crypto_alg_sem); 611 list_del(&spawn->list); 612 up_write(&crypto_alg_sem); 613 } 614 EXPORT_SYMBOL_GPL(crypto_drop_spawn); 615 616 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn) 617 { 618 struct crypto_alg *alg; 619 struct crypto_alg *alg2; 620 621 down_read(&crypto_alg_sem); 622 alg = spawn->alg; 623 alg2 = alg; 624 if (alg2) 625 alg2 = crypto_mod_get(alg2); 626 up_read(&crypto_alg_sem); 627 628 if (!alg2) { 629 if (alg) 630 crypto_shoot_alg(alg); 631 return ERR_PTR(-EAGAIN); 632 } 633 634 return alg; 635 } 636 637 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 638 u32 mask) 639 { 640 struct crypto_alg *alg; 641 struct crypto_tfm *tfm; 642 643 alg = crypto_spawn_alg(spawn); 644 if (IS_ERR(alg)) 645 return ERR_CAST(alg); 646 647 tfm = ERR_PTR(-EINVAL); 648 if (unlikely((alg->cra_flags ^ type) & mask)) 649 goto out_put_alg; 650 651 tfm = __crypto_alloc_tfm(alg, type, mask); 652 if (IS_ERR(tfm)) 653 goto out_put_alg; 654 655 return tfm; 656 657 out_put_alg: 658 crypto_mod_put(alg); 659 return tfm; 660 } 661 EXPORT_SYMBOL_GPL(crypto_spawn_tfm); 662 663 void *crypto_spawn_tfm2(struct crypto_spawn *spawn) 664 { 665 struct crypto_alg *alg; 666 struct crypto_tfm *tfm; 667 668 alg = crypto_spawn_alg(spawn); 669 if (IS_ERR(alg)) 670 return ERR_CAST(alg); 671 672 tfm = crypto_create_tfm(alg, spawn->frontend); 673 if (IS_ERR(tfm)) 674 goto out_put_alg; 675 676 return tfm; 677 678 out_put_alg: 679 crypto_mod_put(alg); 680 return tfm; 681 } 682 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2); 683 684 int crypto_register_notifier(struct notifier_block *nb) 685 { 686 return blocking_notifier_chain_register(&crypto_chain, nb); 687 } 688 EXPORT_SYMBOL_GPL(crypto_register_notifier); 689 690 int crypto_unregister_notifier(struct notifier_block *nb) 691 { 692 return blocking_notifier_chain_unregister(&crypto_chain, nb); 693 } 694 EXPORT_SYMBOL_GPL(crypto_unregister_notifier); 695 696 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb) 697 { 698 struct rtattr *rta = tb[0]; 699 struct crypto_attr_type *algt; 700 701 if (!rta) 702 return ERR_PTR(-ENOENT); 703 if (RTA_PAYLOAD(rta) < sizeof(*algt)) 704 return ERR_PTR(-EINVAL); 705 if (rta->rta_type != CRYPTOA_TYPE) 706 return ERR_PTR(-EINVAL); 707 708 algt = RTA_DATA(rta); 709 710 return algt; 711 } 712 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 713 714 int crypto_check_attr_type(struct rtattr **tb, u32 type) 715 { 716 struct crypto_attr_type *algt; 717 718 algt = crypto_get_attr_type(tb); 719 if (IS_ERR(algt)) 720 return PTR_ERR(algt); 721 722 if ((algt->type ^ type) & algt->mask) 723 return -EINVAL; 724 725 return 0; 726 } 727 EXPORT_SYMBOL_GPL(crypto_check_attr_type); 728 729 const char *crypto_attr_alg_name(struct rtattr *rta) 730 { 731 struct crypto_attr_alg *alga; 732 733 if (!rta) 734 return ERR_PTR(-ENOENT); 735 if (RTA_PAYLOAD(rta) < sizeof(*alga)) 736 return ERR_PTR(-EINVAL); 737 if (rta->rta_type != CRYPTOA_ALG) 738 return ERR_PTR(-EINVAL); 739 740 alga = RTA_DATA(rta); 741 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0; 742 743 return alga->name; 744 } 745 EXPORT_SYMBOL_GPL(crypto_attr_alg_name); 746 747 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, 748 const struct crypto_type *frontend, 749 u32 type, u32 mask) 750 { 751 const char *name; 752 int err; 753 754 name = crypto_attr_alg_name(rta); 755 err = PTR_ERR(name); 756 if (IS_ERR(name)) 757 return ERR_PTR(err); 758 759 return crypto_find_alg(name, frontend, type, mask); 760 } 761 EXPORT_SYMBOL_GPL(crypto_attr_alg2); 762 763 int crypto_attr_u32(struct rtattr *rta, u32 *num) 764 { 765 struct crypto_attr_u32 *nu32; 766 767 if (!rta) 768 return -ENOENT; 769 if (RTA_PAYLOAD(rta) < sizeof(*nu32)) 770 return -EINVAL; 771 if (rta->rta_type != CRYPTOA_U32) 772 return -EINVAL; 773 774 nu32 = RTA_DATA(rta); 775 *num = nu32->num; 776 777 return 0; 778 } 779 EXPORT_SYMBOL_GPL(crypto_attr_u32); 780 781 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, 782 unsigned int head) 783 { 784 struct crypto_instance *inst; 785 char *p; 786 int err; 787 788 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn), 789 GFP_KERNEL); 790 if (!p) 791 return ERR_PTR(-ENOMEM); 792 793 inst = (void *)(p + head); 794 795 err = -ENAMETOOLONG; 796 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name, 797 alg->cra_name) >= CRYPTO_MAX_ALG_NAME) 798 goto err_free_inst; 799 800 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", 801 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 802 goto err_free_inst; 803 804 return p; 805 806 err_free_inst: 807 kfree(p); 808 return ERR_PTR(err); 809 } 810 EXPORT_SYMBOL_GPL(crypto_alloc_instance2); 811 812 struct crypto_instance *crypto_alloc_instance(const char *name, 813 struct crypto_alg *alg) 814 { 815 struct crypto_instance *inst; 816 struct crypto_spawn *spawn; 817 int err; 818 819 inst = crypto_alloc_instance2(name, alg, 0); 820 if (IS_ERR(inst)) 821 goto out; 822 823 spawn = crypto_instance_ctx(inst); 824 err = crypto_init_spawn(spawn, alg, inst, 825 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 826 827 if (err) 828 goto err_free_inst; 829 830 return inst; 831 832 err_free_inst: 833 kfree(inst); 834 inst = ERR_PTR(err); 835 836 out: 837 return inst; 838 } 839 EXPORT_SYMBOL_GPL(crypto_alloc_instance); 840 841 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen) 842 { 843 INIT_LIST_HEAD(&queue->list); 844 queue->backlog = &queue->list; 845 queue->qlen = 0; 846 queue->max_qlen = max_qlen; 847 } 848 EXPORT_SYMBOL_GPL(crypto_init_queue); 849 850 int crypto_enqueue_request(struct crypto_queue *queue, 851 struct crypto_async_request *request) 852 { 853 int err = -EINPROGRESS; 854 855 if (unlikely(queue->qlen >= queue->max_qlen)) { 856 err = -EBUSY; 857 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) 858 goto out; 859 if (queue->backlog == &queue->list) 860 queue->backlog = &request->list; 861 } 862 863 queue->qlen++; 864 list_add_tail(&request->list, &queue->list); 865 866 out: 867 return err; 868 } 869 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 870 871 void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset) 872 { 873 struct list_head *request; 874 875 if (unlikely(!queue->qlen)) 876 return NULL; 877 878 queue->qlen--; 879 880 if (queue->backlog != &queue->list) 881 queue->backlog = queue->backlog->next; 882 883 request = queue->list.next; 884 list_del(request); 885 886 return (char *)list_entry(request, struct crypto_async_request, list) - 887 offset; 888 } 889 EXPORT_SYMBOL_GPL(__crypto_dequeue_request); 890 891 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 892 { 893 return __crypto_dequeue_request(queue, 0); 894 } 895 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 896 897 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm) 898 { 899 struct crypto_async_request *req; 900 901 list_for_each_entry(req, &queue->list, list) { 902 if (req->tfm == tfm) 903 return 1; 904 } 905 906 return 0; 907 } 908 EXPORT_SYMBOL_GPL(crypto_tfm_in_queue); 909 910 static inline void crypto_inc_byte(u8 *a, unsigned int size) 911 { 912 u8 *b = (a + size); 913 u8 c; 914 915 for (; size; size--) { 916 c = *--b + 1; 917 *b = c; 918 if (c) 919 break; 920 } 921 } 922 923 void crypto_inc(u8 *a, unsigned int size) 924 { 925 __be32 *b = (__be32 *)(a + size); 926 u32 c; 927 928 for (; size >= 4; size -= 4) { 929 c = be32_to_cpu(*--b) + 1; 930 *b = cpu_to_be32(c); 931 if (c) 932 return; 933 } 934 935 crypto_inc_byte(a, size); 936 } 937 EXPORT_SYMBOL_GPL(crypto_inc); 938 939 static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size) 940 { 941 for (; size; size--) 942 *a++ ^= *b++; 943 } 944 945 void crypto_xor(u8 *dst, const u8 *src, unsigned int size) 946 { 947 u32 *a = (u32 *)dst; 948 u32 *b = (u32 *)src; 949 950 for (; size >= 4; size -= 4) 951 *a++ ^= *b++; 952 953 crypto_xor_byte((u8 *)a, (u8 *)b, size); 954 } 955 EXPORT_SYMBOL_GPL(crypto_xor); 956 957 static int __init crypto_algapi_init(void) 958 { 959 crypto_init_proc(); 960 return 0; 961 } 962 963 static void __exit crypto_algapi_exit(void) 964 { 965 crypto_exit_proc(); 966 } 967 968 module_init(crypto_algapi_init); 969 module_exit(crypto_algapi_exit); 970 971 MODULE_LICENSE("GPL"); 972 MODULE_DESCRIPTION("Cryptographic algorithms API"); 973