1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Synchronous Cryptographic Hash operations. 4 * 5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 6 */ 7 8 #include <crypto/scatterwalk.h> 9 #include <linux/cryptouser.h> 10 #include <linux/err.h> 11 #include <linux/kernel.h> 12 #include <linux/module.h> 13 #include <linux/slab.h> 14 #include <linux/seq_file.h> 15 #include <linux/string.h> 16 #include <net/netlink.h> 17 18 #include "hash.h" 19 20 #define MAX_SHASH_ALIGNMASK 63 21 22 static const struct crypto_type crypto_shash_type; 23 24 static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg) 25 { 26 return hash_get_stat(&alg->halg); 27 } 28 29 static inline int crypto_shash_errstat(struct shash_alg *alg, int err) 30 { 31 return crypto_hash_errstat(&alg->halg, err); 32 } 33 34 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, 35 unsigned int keylen) 36 { 37 return -ENOSYS; 38 } 39 EXPORT_SYMBOL_GPL(shash_no_setkey); 40 41 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, 42 unsigned int keylen) 43 { 44 struct shash_alg *shash = crypto_shash_alg(tfm); 45 unsigned long alignmask = crypto_shash_alignmask(tfm); 46 unsigned long absize; 47 u8 *buffer, *alignbuffer; 48 int err; 49 50 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 51 buffer = kmalloc(absize, GFP_ATOMIC); 52 if (!buffer) 53 return -ENOMEM; 54 55 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 56 memcpy(alignbuffer, key, keylen); 57 err = shash->setkey(tfm, alignbuffer, keylen); 58 kfree_sensitive(buffer); 59 return err; 60 } 61 62 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) 63 { 64 if (crypto_shash_alg_needs_key(alg)) 65 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); 66 } 67 68 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, 69 unsigned int keylen) 70 { 71 struct shash_alg *shash = crypto_shash_alg(tfm); 72 unsigned long alignmask = crypto_shash_alignmask(tfm); 73 int err; 74 75 if ((unsigned long)key & alignmask) 76 err = shash_setkey_unaligned(tfm, key, keylen); 77 else 78 err = shash->setkey(tfm, key, keylen); 79 80 if (unlikely(err)) { 81 shash_set_needkey(tfm, shash); 82 return err; 83 } 84 85 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); 86 return 0; 87 } 88 EXPORT_SYMBOL_GPL(crypto_shash_setkey); 89 90 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, 91 unsigned int len) 92 { 93 struct crypto_shash *tfm = desc->tfm; 94 struct shash_alg *shash = crypto_shash_alg(tfm); 95 unsigned long alignmask = crypto_shash_alignmask(tfm); 96 unsigned int unaligned_len = alignmask + 1 - 97 ((unsigned long)data & alignmask); 98 /* 99 * We cannot count on __aligned() working for large values: 100 * https://patchwork.kernel.org/patch/9507697/ 101 */ 102 u8 ubuf[MAX_SHASH_ALIGNMASK * 2]; 103 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 104 int err; 105 106 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf))) 107 return -EINVAL; 108 109 if (unaligned_len > len) 110 unaligned_len = len; 111 112 memcpy(buf, data, unaligned_len); 113 err = shash->update(desc, buf, unaligned_len); 114 memset(buf, 0, unaligned_len); 115 116 return err ?: 117 shash->update(desc, data + unaligned_len, len - unaligned_len); 118 } 119 120 int crypto_shash_update(struct shash_desc *desc, const u8 *data, 121 unsigned int len) 122 { 123 struct crypto_shash *tfm = desc->tfm; 124 struct shash_alg *shash = crypto_shash_alg(tfm); 125 unsigned long alignmask = crypto_shash_alignmask(tfm); 126 int err; 127 128 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) 129 atomic64_add(len, &shash_get_stat(shash)->hash_tlen); 130 131 if ((unsigned long)data & alignmask) 132 err = shash_update_unaligned(desc, data, len); 133 else 134 err = shash->update(desc, data, len); 135 136 return crypto_shash_errstat(shash, err); 137 } 138 EXPORT_SYMBOL_GPL(crypto_shash_update); 139 140 static int shash_final_unaligned(struct shash_desc *desc, u8 *out) 141 { 142 struct crypto_shash *tfm = desc->tfm; 143 unsigned long alignmask = crypto_shash_alignmask(tfm); 144 struct shash_alg *shash = crypto_shash_alg(tfm); 145 unsigned int ds = crypto_shash_digestsize(tfm); 146 /* 147 * We cannot count on __aligned() working for large values: 148 * https://patchwork.kernel.org/patch/9507697/ 149 */ 150 u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE]; 151 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 152 int err; 153 154 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf))) 155 return -EINVAL; 156 157 err = shash->final(desc, buf); 158 if (err) 159 goto out; 160 161 memcpy(out, buf, ds); 162 163 out: 164 memset(buf, 0, ds); 165 return err; 166 } 167 168 int crypto_shash_final(struct shash_desc *desc, u8 *out) 169 { 170 struct crypto_shash *tfm = desc->tfm; 171 struct shash_alg *shash = crypto_shash_alg(tfm); 172 unsigned long alignmask = crypto_shash_alignmask(tfm); 173 int err; 174 175 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) 176 atomic64_inc(&shash_get_stat(shash)->hash_cnt); 177 178 if ((unsigned long)out & alignmask) 179 err = shash_final_unaligned(desc, out); 180 else 181 err = shash->final(desc, out); 182 183 return crypto_shash_errstat(shash, err); 184 } 185 EXPORT_SYMBOL_GPL(crypto_shash_final); 186 187 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, 188 unsigned int len, u8 *out) 189 { 190 return shash_update_unaligned(desc, data, len) ?: 191 shash_final_unaligned(desc, out); 192 } 193 194 int crypto_shash_finup(struct shash_desc *desc, const u8 *data, 195 unsigned int len, u8 *out) 196 { 197 struct crypto_shash *tfm = desc->tfm; 198 struct shash_alg *shash = crypto_shash_alg(tfm); 199 unsigned long alignmask = crypto_shash_alignmask(tfm); 200 int err; 201 202 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { 203 struct crypto_istat_hash *istat = shash_get_stat(shash); 204 205 atomic64_inc(&istat->hash_cnt); 206 atomic64_add(len, &istat->hash_tlen); 207 } 208 209 if (((unsigned long)data | (unsigned long)out) & alignmask) 210 err = shash_finup_unaligned(desc, data, len, out); 211 else 212 err = shash->finup(desc, data, len, out); 213 214 215 return crypto_shash_errstat(shash, err); 216 } 217 EXPORT_SYMBOL_GPL(crypto_shash_finup); 218 219 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, 220 unsigned int len, u8 *out) 221 { 222 return crypto_shash_init(desc) ?: 223 shash_update_unaligned(desc, data, len) ?: 224 shash_final_unaligned(desc, out); 225 } 226 227 int crypto_shash_digest(struct shash_desc *desc, const u8 *data, 228 unsigned int len, u8 *out) 229 { 230 struct crypto_shash *tfm = desc->tfm; 231 struct shash_alg *shash = crypto_shash_alg(tfm); 232 unsigned long alignmask = crypto_shash_alignmask(tfm); 233 int err; 234 235 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) { 236 struct crypto_istat_hash *istat = shash_get_stat(shash); 237 238 atomic64_inc(&istat->hash_cnt); 239 atomic64_add(len, &istat->hash_tlen); 240 } 241 242 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) 243 err = -ENOKEY; 244 else if (((unsigned long)data | (unsigned long)out) & alignmask) 245 err = shash_digest_unaligned(desc, data, len, out); 246 else 247 err = shash->digest(desc, data, len, out); 248 249 return crypto_shash_errstat(shash, err); 250 } 251 EXPORT_SYMBOL_GPL(crypto_shash_digest); 252 253 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, 254 unsigned int len, u8 *out) 255 { 256 SHASH_DESC_ON_STACK(desc, tfm); 257 int err; 258 259 desc->tfm = tfm; 260 261 err = crypto_shash_digest(desc, data, len, out); 262 263 shash_desc_zero(desc); 264 265 return err; 266 } 267 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); 268 269 static int shash_default_export(struct shash_desc *desc, void *out) 270 { 271 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); 272 return 0; 273 } 274 275 static int shash_default_import(struct shash_desc *desc, const void *in) 276 { 277 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); 278 return 0; 279 } 280 281 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, 282 unsigned int keylen) 283 { 284 struct crypto_shash **ctx = crypto_ahash_ctx(tfm); 285 286 return crypto_shash_setkey(*ctx, key, keylen); 287 } 288 289 static int shash_async_init(struct ahash_request *req) 290 { 291 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 292 struct shash_desc *desc = ahash_request_ctx(req); 293 294 desc->tfm = *ctx; 295 296 return crypto_shash_init(desc); 297 } 298 299 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 300 { 301 struct crypto_hash_walk walk; 302 int nbytes; 303 304 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 305 nbytes = crypto_hash_walk_done(&walk, nbytes)) 306 nbytes = crypto_shash_update(desc, walk.data, nbytes); 307 308 return nbytes; 309 } 310 EXPORT_SYMBOL_GPL(shash_ahash_update); 311 312 static int shash_async_update(struct ahash_request *req) 313 { 314 return shash_ahash_update(req, ahash_request_ctx(req)); 315 } 316 317 static int shash_async_final(struct ahash_request *req) 318 { 319 return crypto_shash_final(ahash_request_ctx(req), req->result); 320 } 321 322 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 323 { 324 struct crypto_hash_walk walk; 325 int nbytes; 326 327 nbytes = crypto_hash_walk_first(req, &walk); 328 if (!nbytes) 329 return crypto_shash_final(desc, req->result); 330 331 do { 332 nbytes = crypto_hash_walk_last(&walk) ? 333 crypto_shash_finup(desc, walk.data, nbytes, 334 req->result) : 335 crypto_shash_update(desc, walk.data, nbytes); 336 nbytes = crypto_hash_walk_done(&walk, nbytes); 337 } while (nbytes > 0); 338 339 return nbytes; 340 } 341 EXPORT_SYMBOL_GPL(shash_ahash_finup); 342 343 static int shash_async_finup(struct ahash_request *req) 344 { 345 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 346 struct shash_desc *desc = ahash_request_ctx(req); 347 348 desc->tfm = *ctx; 349 350 return shash_ahash_finup(req, desc); 351 } 352 353 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 354 { 355 unsigned int nbytes = req->nbytes; 356 struct scatterlist *sg; 357 unsigned int offset; 358 int err; 359 360 if (nbytes && 361 (sg = req->src, offset = sg->offset, 362 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) { 363 void *data; 364 365 data = kmap_local_page(sg_page(sg)); 366 err = crypto_shash_digest(desc, data + offset, nbytes, 367 req->result); 368 kunmap_local(data); 369 } else 370 err = crypto_shash_init(desc) ?: 371 shash_ahash_finup(req, desc); 372 373 return err; 374 } 375 EXPORT_SYMBOL_GPL(shash_ahash_digest); 376 377 static int shash_async_digest(struct ahash_request *req) 378 { 379 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 380 struct shash_desc *desc = ahash_request_ctx(req); 381 382 desc->tfm = *ctx; 383 384 return shash_ahash_digest(req, desc); 385 } 386 387 static int shash_async_export(struct ahash_request *req, void *out) 388 { 389 return crypto_shash_export(ahash_request_ctx(req), out); 390 } 391 392 static int shash_async_import(struct ahash_request *req, const void *in) 393 { 394 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 395 struct shash_desc *desc = ahash_request_ctx(req); 396 397 desc->tfm = *ctx; 398 399 return crypto_shash_import(desc, in); 400 } 401 402 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) 403 { 404 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 405 406 crypto_free_shash(*ctx); 407 } 408 409 int crypto_init_shash_ops_async(struct crypto_tfm *tfm) 410 { 411 struct crypto_alg *calg = tfm->__crt_alg; 412 struct shash_alg *alg = __crypto_shash_alg(calg); 413 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 414 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 415 struct crypto_shash *shash; 416 417 if (!crypto_mod_get(calg)) 418 return -EAGAIN; 419 420 shash = crypto_create_tfm(calg, &crypto_shash_type); 421 if (IS_ERR(shash)) { 422 crypto_mod_put(calg); 423 return PTR_ERR(shash); 424 } 425 426 *ctx = shash; 427 tfm->exit = crypto_exit_shash_ops_async; 428 429 crt->init = shash_async_init; 430 crt->update = shash_async_update; 431 crt->final = shash_async_final; 432 crt->finup = shash_async_finup; 433 crt->digest = shash_async_digest; 434 if (crypto_shash_alg_has_setkey(alg)) 435 crt->setkey = shash_async_setkey; 436 437 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) & 438 CRYPTO_TFM_NEED_KEY); 439 440 crt->export = shash_async_export; 441 crt->import = shash_async_import; 442 443 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 444 445 return 0; 446 } 447 448 struct crypto_ahash *crypto_clone_shash_ops_async(struct crypto_ahash *nhash, 449 struct crypto_ahash *hash) 450 { 451 struct crypto_shash **nctx = crypto_ahash_ctx(nhash); 452 struct crypto_shash **ctx = crypto_ahash_ctx(hash); 453 struct crypto_shash *shash; 454 455 shash = crypto_clone_shash(*ctx); 456 if (IS_ERR(shash)) { 457 crypto_free_ahash(nhash); 458 return ERR_CAST(shash); 459 } 460 461 *nctx = shash; 462 463 return nhash; 464 } 465 466 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) 467 { 468 struct crypto_shash *hash = __crypto_shash_cast(tfm); 469 struct shash_alg *alg = crypto_shash_alg(hash); 470 471 alg->exit_tfm(hash); 472 } 473 474 static int crypto_shash_init_tfm(struct crypto_tfm *tfm) 475 { 476 struct crypto_shash *hash = __crypto_shash_cast(tfm); 477 struct shash_alg *alg = crypto_shash_alg(hash); 478 int err; 479 480 hash->descsize = alg->descsize; 481 482 shash_set_needkey(hash, alg); 483 484 if (alg->exit_tfm) 485 tfm->exit = crypto_shash_exit_tfm; 486 487 if (!alg->init_tfm) 488 return 0; 489 490 err = alg->init_tfm(hash); 491 if (err) 492 return err; 493 494 /* ->init_tfm() may have increased the descsize. */ 495 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) { 496 if (alg->exit_tfm) 497 alg->exit_tfm(hash); 498 return -EINVAL; 499 } 500 501 return 0; 502 } 503 504 static void crypto_shash_free_instance(struct crypto_instance *inst) 505 { 506 struct shash_instance *shash = shash_instance(inst); 507 508 shash->free(shash); 509 } 510 511 static int __maybe_unused crypto_shash_report( 512 struct sk_buff *skb, struct crypto_alg *alg) 513 { 514 struct crypto_report_hash rhash; 515 struct shash_alg *salg = __crypto_shash_alg(alg); 516 517 memset(&rhash, 0, sizeof(rhash)); 518 519 strscpy(rhash.type, "shash", sizeof(rhash.type)); 520 521 rhash.blocksize = alg->cra_blocksize; 522 rhash.digestsize = salg->digestsize; 523 524 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); 525 } 526 527 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 528 __maybe_unused; 529 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 530 { 531 struct shash_alg *salg = __crypto_shash_alg(alg); 532 533 seq_printf(m, "type : shash\n"); 534 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 535 seq_printf(m, "digestsize : %u\n", salg->digestsize); 536 } 537 538 static int __maybe_unused crypto_shash_report_stat( 539 struct sk_buff *skb, struct crypto_alg *alg) 540 { 541 return crypto_hash_report_stat(skb, alg, "shash"); 542 } 543 544 static const struct crypto_type crypto_shash_type = { 545 .extsize = crypto_alg_extsize, 546 .init_tfm = crypto_shash_init_tfm, 547 .free = crypto_shash_free_instance, 548 #ifdef CONFIG_PROC_FS 549 .show = crypto_shash_show, 550 #endif 551 #if IS_ENABLED(CONFIG_CRYPTO_USER) 552 .report = crypto_shash_report, 553 #endif 554 #ifdef CONFIG_CRYPTO_STATS 555 .report_stat = crypto_shash_report_stat, 556 #endif 557 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 558 .maskset = CRYPTO_ALG_TYPE_MASK, 559 .type = CRYPTO_ALG_TYPE_SHASH, 560 .tfmsize = offsetof(struct crypto_shash, base), 561 }; 562 563 int crypto_grab_shash(struct crypto_shash_spawn *spawn, 564 struct crypto_instance *inst, 565 const char *name, u32 type, u32 mask) 566 { 567 spawn->base.frontend = &crypto_shash_type; 568 return crypto_grab_spawn(&spawn->base, inst, name, type, mask); 569 } 570 EXPORT_SYMBOL_GPL(crypto_grab_shash); 571 572 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, 573 u32 mask) 574 { 575 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); 576 } 577 EXPORT_SYMBOL_GPL(crypto_alloc_shash); 578 579 int crypto_has_shash(const char *alg_name, u32 type, u32 mask) 580 { 581 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask); 582 } 583 EXPORT_SYMBOL_GPL(crypto_has_shash); 584 585 struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash) 586 { 587 struct crypto_tfm *tfm = crypto_shash_tfm(hash); 588 struct shash_alg *alg = crypto_shash_alg(hash); 589 struct crypto_shash *nhash; 590 int err; 591 592 if (!crypto_shash_alg_has_setkey(alg)) { 593 tfm = crypto_tfm_get(tfm); 594 if (IS_ERR(tfm)) 595 return ERR_CAST(tfm); 596 597 return hash; 598 } 599 600 if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init)) 601 return ERR_PTR(-ENOSYS); 602 603 nhash = crypto_clone_tfm(&crypto_shash_type, tfm); 604 if (IS_ERR(nhash)) 605 return nhash; 606 607 nhash->descsize = hash->descsize; 608 609 if (alg->clone_tfm) { 610 err = alg->clone_tfm(nhash, hash); 611 if (err) { 612 crypto_free_shash(nhash); 613 return ERR_PTR(err); 614 } 615 } 616 617 return nhash; 618 } 619 EXPORT_SYMBOL_GPL(crypto_clone_shash); 620 621 int hash_prepare_alg(struct hash_alg_common *alg) 622 { 623 struct crypto_istat_hash *istat = hash_get_stat(alg); 624 struct crypto_alg *base = &alg->base; 625 626 if (alg->digestsize > HASH_MAX_DIGESTSIZE) 627 return -EINVAL; 628 629 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 630 631 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) 632 memset(istat, 0, sizeof(*istat)); 633 634 return 0; 635 } 636 637 static int shash_prepare_alg(struct shash_alg *alg) 638 { 639 struct crypto_alg *base = &alg->halg.base; 640 int err; 641 642 if (alg->descsize > HASH_MAX_DESCSIZE) 643 return -EINVAL; 644 645 if (base->cra_alignmask > MAX_SHASH_ALIGNMASK) 646 return -EINVAL; 647 648 if ((alg->export && !alg->import) || (alg->import && !alg->export)) 649 return -EINVAL; 650 651 err = hash_prepare_alg(&alg->halg); 652 if (err) 653 return err; 654 655 base->cra_type = &crypto_shash_type; 656 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; 657 658 if (!alg->finup) 659 alg->finup = shash_finup_unaligned; 660 if (!alg->digest) 661 alg->digest = shash_digest_unaligned; 662 if (!alg->export) { 663 alg->export = shash_default_export; 664 alg->import = shash_default_import; 665 alg->halg.statesize = alg->descsize; 666 } 667 if (!alg->setkey) 668 alg->setkey = shash_no_setkey; 669 670 return 0; 671 } 672 673 int crypto_register_shash(struct shash_alg *alg) 674 { 675 struct crypto_alg *base = &alg->base; 676 int err; 677 678 err = shash_prepare_alg(alg); 679 if (err) 680 return err; 681 682 return crypto_register_alg(base); 683 } 684 EXPORT_SYMBOL_GPL(crypto_register_shash); 685 686 void crypto_unregister_shash(struct shash_alg *alg) 687 { 688 crypto_unregister_alg(&alg->base); 689 } 690 EXPORT_SYMBOL_GPL(crypto_unregister_shash); 691 692 int crypto_register_shashes(struct shash_alg *algs, int count) 693 { 694 int i, ret; 695 696 for (i = 0; i < count; i++) { 697 ret = crypto_register_shash(&algs[i]); 698 if (ret) 699 goto err; 700 } 701 702 return 0; 703 704 err: 705 for (--i; i >= 0; --i) 706 crypto_unregister_shash(&algs[i]); 707 708 return ret; 709 } 710 EXPORT_SYMBOL_GPL(crypto_register_shashes); 711 712 void crypto_unregister_shashes(struct shash_alg *algs, int count) 713 { 714 int i; 715 716 for (i = count - 1; i >= 0; --i) 717 crypto_unregister_shash(&algs[i]); 718 } 719 EXPORT_SYMBOL_GPL(crypto_unregister_shashes); 720 721 int shash_register_instance(struct crypto_template *tmpl, 722 struct shash_instance *inst) 723 { 724 int err; 725 726 if (WARN_ON(!inst->free)) 727 return -EINVAL; 728 729 err = shash_prepare_alg(&inst->alg); 730 if (err) 731 return err; 732 733 return crypto_register_instance(tmpl, shash_crypto_instance(inst)); 734 } 735 EXPORT_SYMBOL_GPL(shash_register_instance); 736 737 void shash_free_singlespawn_instance(struct shash_instance *inst) 738 { 739 crypto_drop_spawn(shash_instance_ctx(inst)); 740 kfree(inst); 741 } 742 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); 743 744 MODULE_LICENSE("GPL"); 745 MODULE_DESCRIPTION("Synchronous cryptographic hash type"); 746