1 /* 2 * Synchronous Cryptographic Hash operations. 3 * 4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/scatterwalk.h> 14 #include <crypto/internal/hash.h> 15 #include <linux/err.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/slab.h> 19 #include <linux/seq_file.h> 20 #include <linux/cryptouser.h> 21 #include <net/netlink.h> 22 23 #include "internal.h" 24 25 static const struct crypto_type crypto_shash_type; 26 27 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, 28 unsigned int keylen) 29 { 30 return -ENOSYS; 31 } 32 33 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, 34 unsigned int keylen) 35 { 36 struct shash_alg *shash = crypto_shash_alg(tfm); 37 unsigned long alignmask = crypto_shash_alignmask(tfm); 38 unsigned long absize; 39 u8 *buffer, *alignbuffer; 40 int err; 41 42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 43 buffer = kmalloc(absize, GFP_KERNEL); 44 if (!buffer) 45 return -ENOMEM; 46 47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); 48 memcpy(alignbuffer, key, keylen); 49 err = shash->setkey(tfm, alignbuffer, keylen); 50 kzfree(buffer); 51 return err; 52 } 53 54 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, 55 unsigned int keylen) 56 { 57 struct shash_alg *shash = crypto_shash_alg(tfm); 58 unsigned long alignmask = crypto_shash_alignmask(tfm); 59 60 if ((unsigned long)key & alignmask) 61 return shash_setkey_unaligned(tfm, key, keylen); 62 63 return shash->setkey(tfm, key, keylen); 64 } 65 EXPORT_SYMBOL_GPL(crypto_shash_setkey); 66 67 static inline unsigned int shash_align_buffer_size(unsigned len, 68 unsigned long mask) 69 { 70 typedef u8 __attribute__ ((aligned)) u8_aligned; 71 return len + (mask & ~(__alignof__(u8_aligned) - 1)); 72 } 73 74 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, 75 unsigned int len) 76 { 77 struct crypto_shash *tfm = desc->tfm; 78 struct shash_alg *shash = crypto_shash_alg(tfm); 79 unsigned long alignmask = crypto_shash_alignmask(tfm); 80 unsigned int unaligned_len = alignmask + 1 - 81 ((unsigned long)data & alignmask); 82 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] 83 __attribute__ ((aligned)); 84 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 85 int err; 86 87 if (unaligned_len > len) 88 unaligned_len = len; 89 90 memcpy(buf, data, unaligned_len); 91 err = shash->update(desc, buf, unaligned_len); 92 memset(buf, 0, unaligned_len); 93 94 return err ?: 95 shash->update(desc, data + unaligned_len, len - unaligned_len); 96 } 97 98 int crypto_shash_update(struct shash_desc *desc, const u8 *data, 99 unsigned int len) 100 { 101 struct crypto_shash *tfm = desc->tfm; 102 struct shash_alg *shash = crypto_shash_alg(tfm); 103 unsigned long alignmask = crypto_shash_alignmask(tfm); 104 105 if ((unsigned long)data & alignmask) 106 return shash_update_unaligned(desc, data, len); 107 108 return shash->update(desc, data, len); 109 } 110 EXPORT_SYMBOL_GPL(crypto_shash_update); 111 112 static int shash_final_unaligned(struct shash_desc *desc, u8 *out) 113 { 114 struct crypto_shash *tfm = desc->tfm; 115 unsigned long alignmask = crypto_shash_alignmask(tfm); 116 struct shash_alg *shash = crypto_shash_alg(tfm); 117 unsigned int ds = crypto_shash_digestsize(tfm); 118 u8 ubuf[shash_align_buffer_size(ds, alignmask)] 119 __attribute__ ((aligned)); 120 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); 121 int err; 122 123 err = shash->final(desc, buf); 124 if (err) 125 goto out; 126 127 memcpy(out, buf, ds); 128 129 out: 130 memset(buf, 0, ds); 131 return err; 132 } 133 134 int crypto_shash_final(struct shash_desc *desc, u8 *out) 135 { 136 struct crypto_shash *tfm = desc->tfm; 137 struct shash_alg *shash = crypto_shash_alg(tfm); 138 unsigned long alignmask = crypto_shash_alignmask(tfm); 139 140 if ((unsigned long)out & alignmask) 141 return shash_final_unaligned(desc, out); 142 143 return shash->final(desc, out); 144 } 145 EXPORT_SYMBOL_GPL(crypto_shash_final); 146 147 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, 148 unsigned int len, u8 *out) 149 { 150 return crypto_shash_update(desc, data, len) ?: 151 crypto_shash_final(desc, out); 152 } 153 154 int crypto_shash_finup(struct shash_desc *desc, const u8 *data, 155 unsigned int len, u8 *out) 156 { 157 struct crypto_shash *tfm = desc->tfm; 158 struct shash_alg *shash = crypto_shash_alg(tfm); 159 unsigned long alignmask = crypto_shash_alignmask(tfm); 160 161 if (((unsigned long)data | (unsigned long)out) & alignmask) 162 return shash_finup_unaligned(desc, data, len, out); 163 164 return shash->finup(desc, data, len, out); 165 } 166 EXPORT_SYMBOL_GPL(crypto_shash_finup); 167 168 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, 169 unsigned int len, u8 *out) 170 { 171 return crypto_shash_init(desc) ?: 172 crypto_shash_finup(desc, data, len, out); 173 } 174 175 int crypto_shash_digest(struct shash_desc *desc, const u8 *data, 176 unsigned int len, u8 *out) 177 { 178 struct crypto_shash *tfm = desc->tfm; 179 struct shash_alg *shash = crypto_shash_alg(tfm); 180 unsigned long alignmask = crypto_shash_alignmask(tfm); 181 182 if (((unsigned long)data | (unsigned long)out) & alignmask) 183 return shash_digest_unaligned(desc, data, len, out); 184 185 return shash->digest(desc, data, len, out); 186 } 187 EXPORT_SYMBOL_GPL(crypto_shash_digest); 188 189 static int shash_default_export(struct shash_desc *desc, void *out) 190 { 191 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); 192 return 0; 193 } 194 195 static int shash_default_import(struct shash_desc *desc, const void *in) 196 { 197 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); 198 return 0; 199 } 200 201 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, 202 unsigned int keylen) 203 { 204 struct crypto_shash **ctx = crypto_ahash_ctx(tfm); 205 206 return crypto_shash_setkey(*ctx, key, keylen); 207 } 208 209 static int shash_async_init(struct ahash_request *req) 210 { 211 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 212 struct shash_desc *desc = ahash_request_ctx(req); 213 214 desc->tfm = *ctx; 215 desc->flags = req->base.flags; 216 217 return crypto_shash_init(desc); 218 } 219 220 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) 221 { 222 struct crypto_hash_walk walk; 223 int nbytes; 224 225 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 226 nbytes = crypto_hash_walk_done(&walk, nbytes)) 227 nbytes = crypto_shash_update(desc, walk.data, nbytes); 228 229 return nbytes; 230 } 231 EXPORT_SYMBOL_GPL(shash_ahash_update); 232 233 static int shash_async_update(struct ahash_request *req) 234 { 235 return shash_ahash_update(req, ahash_request_ctx(req)); 236 } 237 238 static int shash_async_final(struct ahash_request *req) 239 { 240 return crypto_shash_final(ahash_request_ctx(req), req->result); 241 } 242 243 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 244 { 245 struct crypto_hash_walk walk; 246 int nbytes; 247 248 nbytes = crypto_hash_walk_first(req, &walk); 249 if (!nbytes) 250 return crypto_shash_final(desc, req->result); 251 252 do { 253 nbytes = crypto_hash_walk_last(&walk) ? 254 crypto_shash_finup(desc, walk.data, nbytes, 255 req->result) : 256 crypto_shash_update(desc, walk.data, nbytes); 257 nbytes = crypto_hash_walk_done(&walk, nbytes); 258 } while (nbytes > 0); 259 260 return nbytes; 261 } 262 EXPORT_SYMBOL_GPL(shash_ahash_finup); 263 264 static int shash_async_finup(struct ahash_request *req) 265 { 266 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 267 struct shash_desc *desc = ahash_request_ctx(req); 268 269 desc->tfm = *ctx; 270 desc->flags = req->base.flags; 271 272 return shash_ahash_finup(req, desc); 273 } 274 275 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 276 { 277 struct scatterlist *sg = req->src; 278 unsigned int offset = sg->offset; 279 unsigned int nbytes = req->nbytes; 280 int err; 281 282 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { 283 void *data; 284 285 data = kmap_atomic(sg_page(sg)); 286 err = crypto_shash_digest(desc, data + offset, nbytes, 287 req->result); 288 kunmap_atomic(data); 289 crypto_yield(desc->flags); 290 } else 291 err = crypto_shash_init(desc) ?: 292 shash_ahash_finup(req, desc); 293 294 return err; 295 } 296 EXPORT_SYMBOL_GPL(shash_ahash_digest); 297 298 static int shash_async_digest(struct ahash_request *req) 299 { 300 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 301 struct shash_desc *desc = ahash_request_ctx(req); 302 303 desc->tfm = *ctx; 304 desc->flags = req->base.flags; 305 306 return shash_ahash_digest(req, desc); 307 } 308 309 static int shash_async_export(struct ahash_request *req, void *out) 310 { 311 return crypto_shash_export(ahash_request_ctx(req), out); 312 } 313 314 static int shash_async_import(struct ahash_request *req, const void *in) 315 { 316 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 317 struct shash_desc *desc = ahash_request_ctx(req); 318 319 desc->tfm = *ctx; 320 desc->flags = req->base.flags; 321 322 return crypto_shash_import(desc, in); 323 } 324 325 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) 326 { 327 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 328 329 crypto_free_shash(*ctx); 330 } 331 332 int crypto_init_shash_ops_async(struct crypto_tfm *tfm) 333 { 334 struct crypto_alg *calg = tfm->__crt_alg; 335 struct shash_alg *alg = __crypto_shash_alg(calg); 336 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 337 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 338 struct crypto_shash *shash; 339 340 if (!crypto_mod_get(calg)) 341 return -EAGAIN; 342 343 shash = crypto_create_tfm(calg, &crypto_shash_type); 344 if (IS_ERR(shash)) { 345 crypto_mod_put(calg); 346 return PTR_ERR(shash); 347 } 348 349 *ctx = shash; 350 tfm->exit = crypto_exit_shash_ops_async; 351 352 crt->init = shash_async_init; 353 crt->update = shash_async_update; 354 crt->final = shash_async_final; 355 crt->finup = shash_async_finup; 356 crt->digest = shash_async_digest; 357 358 if (alg->setkey) { 359 crt->setkey = shash_async_setkey; 360 crt->has_setkey = true; 361 } 362 if (alg->export) 363 crt->export = shash_async_export; 364 if (alg->import) 365 crt->import = shash_async_import; 366 367 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 368 369 return 0; 370 } 371 372 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, 373 unsigned int keylen) 374 { 375 struct shash_desc **descp = crypto_hash_ctx(tfm); 376 struct shash_desc *desc = *descp; 377 378 return crypto_shash_setkey(desc->tfm, key, keylen); 379 } 380 381 static int shash_compat_init(struct hash_desc *hdesc) 382 { 383 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); 384 struct shash_desc *desc = *descp; 385 386 desc->flags = hdesc->flags; 387 388 return crypto_shash_init(desc); 389 } 390 391 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, 392 unsigned int len) 393 { 394 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); 395 struct shash_desc *desc = *descp; 396 struct crypto_hash_walk walk; 397 int nbytes; 398 399 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); 400 nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) 401 nbytes = crypto_shash_update(desc, walk.data, nbytes); 402 403 return nbytes; 404 } 405 406 static int shash_compat_final(struct hash_desc *hdesc, u8 *out) 407 { 408 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); 409 410 return crypto_shash_final(*descp, out); 411 } 412 413 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, 414 unsigned int nbytes, u8 *out) 415 { 416 unsigned int offset = sg->offset; 417 int err; 418 419 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { 420 struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); 421 struct shash_desc *desc = *descp; 422 void *data; 423 424 desc->flags = hdesc->flags; 425 426 data = kmap_atomic(sg_page(sg)); 427 err = crypto_shash_digest(desc, data + offset, nbytes, out); 428 kunmap_atomic(data); 429 crypto_yield(desc->flags); 430 goto out; 431 } 432 433 err = shash_compat_init(hdesc); 434 if (err) 435 goto out; 436 437 err = shash_compat_update(hdesc, sg, nbytes); 438 if (err) 439 goto out; 440 441 err = shash_compat_final(hdesc, out); 442 443 out: 444 return err; 445 } 446 447 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) 448 { 449 struct shash_desc **descp = crypto_tfm_ctx(tfm); 450 struct shash_desc *desc = *descp; 451 452 crypto_free_shash(desc->tfm); 453 kzfree(desc); 454 } 455 456 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) 457 { 458 struct hash_tfm *crt = &tfm->crt_hash; 459 struct crypto_alg *calg = tfm->__crt_alg; 460 struct shash_alg *alg = __crypto_shash_alg(calg); 461 struct shash_desc **descp = crypto_tfm_ctx(tfm); 462 struct crypto_shash *shash; 463 struct shash_desc *desc; 464 465 if (!crypto_mod_get(calg)) 466 return -EAGAIN; 467 468 shash = crypto_create_tfm(calg, &crypto_shash_type); 469 if (IS_ERR(shash)) { 470 crypto_mod_put(calg); 471 return PTR_ERR(shash); 472 } 473 474 desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash), 475 GFP_KERNEL); 476 if (!desc) { 477 crypto_free_shash(shash); 478 return -ENOMEM; 479 } 480 481 *descp = desc; 482 desc->tfm = shash; 483 tfm->exit = crypto_exit_shash_ops_compat; 484 485 crt->init = shash_compat_init; 486 crt->update = shash_compat_update; 487 crt->final = shash_compat_final; 488 crt->digest = shash_compat_digest; 489 crt->setkey = shash_compat_setkey; 490 491 crt->digestsize = alg->digestsize; 492 493 return 0; 494 } 495 496 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 497 { 498 switch (mask & CRYPTO_ALG_TYPE_MASK) { 499 case CRYPTO_ALG_TYPE_HASH_MASK: 500 return crypto_init_shash_ops_compat(tfm); 501 } 502 503 return -EINVAL; 504 } 505 506 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, 507 u32 mask) 508 { 509 switch (mask & CRYPTO_ALG_TYPE_MASK) { 510 case CRYPTO_ALG_TYPE_HASH_MASK: 511 return sizeof(struct shash_desc *); 512 } 513 514 return 0; 515 } 516 517 static int crypto_shash_init_tfm(struct crypto_tfm *tfm) 518 { 519 struct crypto_shash *hash = __crypto_shash_cast(tfm); 520 521 hash->descsize = crypto_shash_alg(hash)->descsize; 522 return 0; 523 } 524 525 #ifdef CONFIG_NET 526 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) 527 { 528 struct crypto_report_hash rhash; 529 struct shash_alg *salg = __crypto_shash_alg(alg); 530 531 strncpy(rhash.type, "shash", sizeof(rhash.type)); 532 533 rhash.blocksize = alg->cra_blocksize; 534 rhash.digestsize = salg->digestsize; 535 536 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH, 537 sizeof(struct crypto_report_hash), &rhash)) 538 goto nla_put_failure; 539 return 0; 540 541 nla_put_failure: 542 return -EMSGSIZE; 543 } 544 #else 545 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) 546 { 547 return -ENOSYS; 548 } 549 #endif 550 551 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 552 __attribute__ ((unused)); 553 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) 554 { 555 struct shash_alg *salg = __crypto_shash_alg(alg); 556 557 seq_printf(m, "type : shash\n"); 558 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 559 seq_printf(m, "digestsize : %u\n", salg->digestsize); 560 } 561 562 static const struct crypto_type crypto_shash_type = { 563 .ctxsize = crypto_shash_ctxsize, 564 .extsize = crypto_alg_extsize, 565 .init = crypto_init_shash_ops, 566 .init_tfm = crypto_shash_init_tfm, 567 #ifdef CONFIG_PROC_FS 568 .show = crypto_shash_show, 569 #endif 570 .report = crypto_shash_report, 571 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 572 .maskset = CRYPTO_ALG_TYPE_MASK, 573 .type = CRYPTO_ALG_TYPE_SHASH, 574 .tfmsize = offsetof(struct crypto_shash, base), 575 }; 576 577 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, 578 u32 mask) 579 { 580 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); 581 } 582 EXPORT_SYMBOL_GPL(crypto_alloc_shash); 583 584 static int shash_prepare_alg(struct shash_alg *alg) 585 { 586 struct crypto_alg *base = &alg->base; 587 588 if (alg->digestsize > PAGE_SIZE / 8 || 589 alg->descsize > PAGE_SIZE / 8 || 590 alg->statesize > PAGE_SIZE / 8) 591 return -EINVAL; 592 593 base->cra_type = &crypto_shash_type; 594 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 595 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; 596 597 if (!alg->finup) 598 alg->finup = shash_finup_unaligned; 599 if (!alg->digest) 600 alg->digest = shash_digest_unaligned; 601 if (!alg->export) { 602 alg->export = shash_default_export; 603 alg->import = shash_default_import; 604 alg->statesize = alg->descsize; 605 } 606 if (!alg->setkey) 607 alg->setkey = shash_no_setkey; 608 609 return 0; 610 } 611 612 int crypto_register_shash(struct shash_alg *alg) 613 { 614 struct crypto_alg *base = &alg->base; 615 int err; 616 617 err = shash_prepare_alg(alg); 618 if (err) 619 return err; 620 621 return crypto_register_alg(base); 622 } 623 EXPORT_SYMBOL_GPL(crypto_register_shash); 624 625 int crypto_unregister_shash(struct shash_alg *alg) 626 { 627 return crypto_unregister_alg(&alg->base); 628 } 629 EXPORT_SYMBOL_GPL(crypto_unregister_shash); 630 631 int crypto_register_shashes(struct shash_alg *algs, int count) 632 { 633 int i, ret; 634 635 for (i = 0; i < count; i++) { 636 ret = crypto_register_shash(&algs[i]); 637 if (ret) 638 goto err; 639 } 640 641 return 0; 642 643 err: 644 for (--i; i >= 0; --i) 645 crypto_unregister_shash(&algs[i]); 646 647 return ret; 648 } 649 EXPORT_SYMBOL_GPL(crypto_register_shashes); 650 651 int crypto_unregister_shashes(struct shash_alg *algs, int count) 652 { 653 int i, ret; 654 655 for (i = count - 1; i >= 0; --i) { 656 ret = crypto_unregister_shash(&algs[i]); 657 if (ret) 658 pr_err("Failed to unregister %s %s: %d\n", 659 algs[i].base.cra_driver_name, 660 algs[i].base.cra_name, ret); 661 } 662 663 return 0; 664 } 665 EXPORT_SYMBOL_GPL(crypto_unregister_shashes); 666 667 int shash_register_instance(struct crypto_template *tmpl, 668 struct shash_instance *inst) 669 { 670 int err; 671 672 err = shash_prepare_alg(&inst->alg); 673 if (err) 674 return err; 675 676 return crypto_register_instance(tmpl, shash_crypto_instance(inst)); 677 } 678 EXPORT_SYMBOL_GPL(shash_register_instance); 679 680 void shash_free_instance(struct crypto_instance *inst) 681 { 682 crypto_drop_spawn(crypto_instance_ctx(inst)); 683 kfree(shash_instance(inst)); 684 } 685 EXPORT_SYMBOL_GPL(shash_free_instance); 686 687 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, 688 struct shash_alg *alg, 689 struct crypto_instance *inst) 690 { 691 return crypto_init_spawn2(&spawn->base, &alg->base, inst, 692 &crypto_shash_type); 693 } 694 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); 695 696 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) 697 { 698 struct crypto_alg *alg; 699 700 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); 701 return IS_ERR(alg) ? ERR_CAST(alg) : 702 container_of(alg, struct shash_alg, base); 703 } 704 EXPORT_SYMBOL_GPL(shash_attr_alg); 705 706 MODULE_LICENSE("GPL"); 707 MODULE_DESCRIPTION("Synchronous cryptographic hash type"); 708