1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/hash.h> 24 #include <linux/err.h> 25 #include <linux/module.h> 26 #include <linux/scatterlist.h> 27 #include <linux/slab.h> 28 #include <linux/string.h> 29 #include <crypto/rng.h> 30 31 #include "internal.h" 32 33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 34 35 /* a perfect nop */ 36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 37 { 38 return 0; 39 } 40 41 #else 42 43 #include "testmgr.h" 44 45 /* 46 * Need slab memory for testing (size in number of pages). 47 */ 48 #define XBUFSIZE 8 49 50 /* 51 * Indexes into the xbuf to simulate cross-page access. 52 */ 53 #define IDX1 32 54 #define IDX2 32400 55 #define IDX3 1 56 #define IDX4 8193 57 #define IDX5 22222 58 #define IDX6 17101 59 #define IDX7 27333 60 #define IDX8 3000 61 62 /* 63 * Used by test_cipher() 64 */ 65 #define ENCRYPT 1 66 #define DECRYPT 0 67 68 struct tcrypt_result { 69 struct completion completion; 70 int err; 71 }; 72 73 struct aead_test_suite { 74 struct { 75 struct aead_testvec *vecs; 76 unsigned int count; 77 } enc, dec; 78 }; 79 80 struct cipher_test_suite { 81 struct { 82 struct cipher_testvec *vecs; 83 unsigned int count; 84 } enc, dec; 85 }; 86 87 struct comp_test_suite { 88 struct { 89 struct comp_testvec *vecs; 90 unsigned int count; 91 } comp, decomp; 92 }; 93 94 struct pcomp_test_suite { 95 struct { 96 struct pcomp_testvec *vecs; 97 unsigned int count; 98 } comp, decomp; 99 }; 100 101 struct hash_test_suite { 102 struct hash_testvec *vecs; 103 unsigned int count; 104 }; 105 106 struct cprng_test_suite { 107 struct cprng_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct alg_test_desc { 112 const char *alg; 113 int (*test)(const struct alg_test_desc *desc, const char *driver, 114 u32 type, u32 mask); 115 int fips_allowed; /* set if alg is allowed in fips mode */ 116 117 union { 118 struct aead_test_suite aead; 119 struct cipher_test_suite cipher; 120 struct comp_test_suite comp; 121 struct pcomp_test_suite pcomp; 122 struct hash_test_suite hash; 123 struct cprng_test_suite cprng; 124 } suite; 125 }; 126 127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 128 129 static void hexdump(unsigned char *buf, unsigned int len) 130 { 131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 132 16, 1, 133 buf, len, false); 134 } 135 136 static void tcrypt_complete(struct crypto_async_request *req, int err) 137 { 138 struct tcrypt_result *res = req->data; 139 140 if (err == -EINPROGRESS) 141 return; 142 143 res->err = err; 144 complete(&res->completion); 145 } 146 147 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 148 { 149 int i; 150 151 for (i = 0; i < XBUFSIZE; i++) { 152 buf[i] = (void *)__get_free_page(GFP_KERNEL); 153 if (!buf[i]) 154 goto err_free_buf; 155 } 156 157 return 0; 158 159 err_free_buf: 160 while (i-- > 0) 161 free_page((unsigned long)buf[i]); 162 163 return -ENOMEM; 164 } 165 166 static void testmgr_free_buf(char *buf[XBUFSIZE]) 167 { 168 int i; 169 170 for (i = 0; i < XBUFSIZE; i++) 171 free_page((unsigned long)buf[i]); 172 } 173 174 static int do_one_async_hash_op(struct ahash_request *req, 175 struct tcrypt_result *tr, 176 int ret) 177 { 178 if (ret == -EINPROGRESS || ret == -EBUSY) { 179 ret = wait_for_completion_interruptible(&tr->completion); 180 if (!ret) 181 ret = tr->err; 182 reinit_completion(&tr->completion); 183 } 184 return ret; 185 } 186 187 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 188 unsigned int tcount, bool use_digest, 189 const int align_offset) 190 { 191 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 192 unsigned int i, j, k, temp; 193 struct scatterlist sg[8]; 194 char result[64]; 195 struct ahash_request *req; 196 struct tcrypt_result tresult; 197 void *hash_buff; 198 char *xbuf[XBUFSIZE]; 199 int ret = -ENOMEM; 200 201 if (testmgr_alloc_buf(xbuf)) 202 goto out_nobuf; 203 204 init_completion(&tresult.completion); 205 206 req = ahash_request_alloc(tfm, GFP_KERNEL); 207 if (!req) { 208 printk(KERN_ERR "alg: hash: Failed to allocate request for " 209 "%s\n", algo); 210 goto out_noreq; 211 } 212 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 213 tcrypt_complete, &tresult); 214 215 j = 0; 216 for (i = 0; i < tcount; i++) { 217 if (template[i].np) 218 continue; 219 220 ret = -EINVAL; 221 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 222 goto out; 223 224 j++; 225 memset(result, 0, 64); 226 227 hash_buff = xbuf[0]; 228 hash_buff += align_offset; 229 230 memcpy(hash_buff, template[i].plaintext, template[i].psize); 231 sg_init_one(&sg[0], hash_buff, template[i].psize); 232 233 if (template[i].ksize) { 234 crypto_ahash_clear_flags(tfm, ~0); 235 ret = crypto_ahash_setkey(tfm, template[i].key, 236 template[i].ksize); 237 if (ret) { 238 printk(KERN_ERR "alg: hash: setkey failed on " 239 "test %d for %s: ret=%d\n", j, algo, 240 -ret); 241 goto out; 242 } 243 } 244 245 ahash_request_set_crypt(req, sg, result, template[i].psize); 246 if (use_digest) { 247 ret = do_one_async_hash_op(req, &tresult, 248 crypto_ahash_digest(req)); 249 if (ret) { 250 pr_err("alg: hash: digest failed on test %d " 251 "for %s: ret=%d\n", j, algo, -ret); 252 goto out; 253 } 254 } else { 255 ret = do_one_async_hash_op(req, &tresult, 256 crypto_ahash_init(req)); 257 if (ret) { 258 pr_err("alt: hash: init failed on test %d " 259 "for %s: ret=%d\n", j, algo, -ret); 260 goto out; 261 } 262 ret = do_one_async_hash_op(req, &tresult, 263 crypto_ahash_update(req)); 264 if (ret) { 265 pr_err("alt: hash: update failed on test %d " 266 "for %s: ret=%d\n", j, algo, -ret); 267 goto out; 268 } 269 ret = do_one_async_hash_op(req, &tresult, 270 crypto_ahash_final(req)); 271 if (ret) { 272 pr_err("alt: hash: final failed on test %d " 273 "for %s: ret=%d\n", j, algo, -ret); 274 goto out; 275 } 276 } 277 278 if (memcmp(result, template[i].digest, 279 crypto_ahash_digestsize(tfm))) { 280 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 281 j, algo); 282 hexdump(result, crypto_ahash_digestsize(tfm)); 283 ret = -EINVAL; 284 goto out; 285 } 286 } 287 288 j = 0; 289 for (i = 0; i < tcount; i++) { 290 /* alignment tests are only done with continuous buffers */ 291 if (align_offset != 0) 292 break; 293 294 if (template[i].np) { 295 j++; 296 memset(result, 0, 64); 297 298 temp = 0; 299 sg_init_table(sg, template[i].np); 300 ret = -EINVAL; 301 for (k = 0; k < template[i].np; k++) { 302 if (WARN_ON(offset_in_page(IDX[k]) + 303 template[i].tap[k] > PAGE_SIZE)) 304 goto out; 305 sg_set_buf(&sg[k], 306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 307 offset_in_page(IDX[k]), 308 template[i].plaintext + temp, 309 template[i].tap[k]), 310 template[i].tap[k]); 311 temp += template[i].tap[k]; 312 } 313 314 if (template[i].ksize) { 315 crypto_ahash_clear_flags(tfm, ~0); 316 ret = crypto_ahash_setkey(tfm, template[i].key, 317 template[i].ksize); 318 319 if (ret) { 320 printk(KERN_ERR "alg: hash: setkey " 321 "failed on chunking test %d " 322 "for %s: ret=%d\n", j, algo, 323 -ret); 324 goto out; 325 } 326 } 327 328 ahash_request_set_crypt(req, sg, result, 329 template[i].psize); 330 ret = crypto_ahash_digest(req); 331 switch (ret) { 332 case 0: 333 break; 334 case -EINPROGRESS: 335 case -EBUSY: 336 ret = wait_for_completion_interruptible( 337 &tresult.completion); 338 if (!ret && !(ret = tresult.err)) { 339 reinit_completion(&tresult.completion); 340 break; 341 } 342 /* fall through */ 343 default: 344 printk(KERN_ERR "alg: hash: digest failed " 345 "on chunking test %d for %s: " 346 "ret=%d\n", j, algo, -ret); 347 goto out; 348 } 349 350 if (memcmp(result, template[i].digest, 351 crypto_ahash_digestsize(tfm))) { 352 printk(KERN_ERR "alg: hash: Chunking test %d " 353 "failed for %s\n", j, algo); 354 hexdump(result, crypto_ahash_digestsize(tfm)); 355 ret = -EINVAL; 356 goto out; 357 } 358 } 359 } 360 361 ret = 0; 362 363 out: 364 ahash_request_free(req); 365 out_noreq: 366 testmgr_free_buf(xbuf); 367 out_nobuf: 368 return ret; 369 } 370 371 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 372 unsigned int tcount, bool use_digest) 373 { 374 unsigned int alignmask; 375 int ret; 376 377 ret = __test_hash(tfm, template, tcount, use_digest, 0); 378 if (ret) 379 return ret; 380 381 /* test unaligned buffers, check with one byte offset */ 382 ret = __test_hash(tfm, template, tcount, use_digest, 1); 383 if (ret) 384 return ret; 385 386 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 387 if (alignmask) { 388 /* Check if alignment mask for tfm is correctly set. */ 389 ret = __test_hash(tfm, template, tcount, use_digest, 390 alignmask + 1); 391 if (ret) 392 return ret; 393 } 394 395 return 0; 396 } 397 398 static int __test_aead(struct crypto_aead *tfm, int enc, 399 struct aead_testvec *template, unsigned int tcount, 400 const bool diff_dst, const int align_offset) 401 { 402 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 403 unsigned int i, j, k, n, temp; 404 int ret = -ENOMEM; 405 char *q; 406 char *key; 407 struct aead_request *req; 408 struct scatterlist *sg; 409 struct scatterlist *asg; 410 struct scatterlist *sgout; 411 const char *e, *d; 412 struct tcrypt_result result; 413 unsigned int authsize; 414 void *input; 415 void *output; 416 void *assoc; 417 char *iv; 418 char *xbuf[XBUFSIZE]; 419 char *xoutbuf[XBUFSIZE]; 420 char *axbuf[XBUFSIZE]; 421 422 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 423 if (!iv) 424 return ret; 425 if (testmgr_alloc_buf(xbuf)) 426 goto out_noxbuf; 427 if (testmgr_alloc_buf(axbuf)) 428 goto out_noaxbuf; 429 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 430 goto out_nooutbuf; 431 432 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 433 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL); 434 if (!sg) 435 goto out_nosg; 436 asg = &sg[8]; 437 sgout = &asg[8]; 438 439 if (diff_dst) 440 d = "-ddst"; 441 else 442 d = ""; 443 444 if (enc == ENCRYPT) 445 e = "encryption"; 446 else 447 e = "decryption"; 448 449 init_completion(&result.completion); 450 451 req = aead_request_alloc(tfm, GFP_KERNEL); 452 if (!req) { 453 pr_err("alg: aead%s: Failed to allocate request for %s\n", 454 d, algo); 455 goto out; 456 } 457 458 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 459 tcrypt_complete, &result); 460 461 for (i = 0, j = 0; i < tcount; i++) { 462 if (!template[i].np) { 463 j++; 464 465 /* some templates have no input data but they will 466 * touch input 467 */ 468 input = xbuf[0]; 469 input += align_offset; 470 assoc = axbuf[0]; 471 472 ret = -EINVAL; 473 if (WARN_ON(align_offset + template[i].ilen > 474 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 475 goto out; 476 477 memcpy(input, template[i].input, template[i].ilen); 478 memcpy(assoc, template[i].assoc, template[i].alen); 479 if (template[i].iv) 480 memcpy(iv, template[i].iv, MAX_IVLEN); 481 else 482 memset(iv, 0, MAX_IVLEN); 483 484 crypto_aead_clear_flags(tfm, ~0); 485 if (template[i].wk) 486 crypto_aead_set_flags( 487 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 488 489 key = template[i].key; 490 491 ret = crypto_aead_setkey(tfm, key, 492 template[i].klen); 493 if (!ret == template[i].fail) { 494 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 495 d, j, algo, crypto_aead_get_flags(tfm)); 496 goto out; 497 } else if (ret) 498 continue; 499 500 authsize = abs(template[i].rlen - template[i].ilen); 501 ret = crypto_aead_setauthsize(tfm, authsize); 502 if (ret) { 503 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 504 d, authsize, j, algo); 505 goto out; 506 } 507 508 if (diff_dst) { 509 output = xoutbuf[0]; 510 output += align_offset; 511 sg_init_one(&sg[0], input, template[i].ilen); 512 sg_init_one(&sgout[0], output, 513 template[i].rlen); 514 } else { 515 sg_init_one(&sg[0], input, 516 template[i].ilen + 517 (enc ? authsize : 0)); 518 output = input; 519 } 520 521 sg_init_one(&asg[0], assoc, template[i].alen); 522 523 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 524 template[i].ilen, iv); 525 526 aead_request_set_assoc(req, asg, template[i].alen); 527 528 ret = enc ? 529 crypto_aead_encrypt(req) : 530 crypto_aead_decrypt(req); 531 532 switch (ret) { 533 case 0: 534 if (template[i].novrfy) { 535 /* verification was supposed to fail */ 536 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 537 d, e, j, algo); 538 /* so really, we got a bad message */ 539 ret = -EBADMSG; 540 goto out; 541 } 542 break; 543 case -EINPROGRESS: 544 case -EBUSY: 545 ret = wait_for_completion_interruptible( 546 &result.completion); 547 if (!ret && !(ret = result.err)) { 548 reinit_completion(&result.completion); 549 break; 550 } 551 case -EBADMSG: 552 if (template[i].novrfy) 553 /* verification failure was expected */ 554 continue; 555 /* fall through */ 556 default: 557 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 558 d, e, j, algo, -ret); 559 goto out; 560 } 561 562 q = output; 563 if (memcmp(q, template[i].result, template[i].rlen)) { 564 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 565 d, j, e, algo); 566 hexdump(q, template[i].rlen); 567 ret = -EINVAL; 568 goto out; 569 } 570 } 571 } 572 573 for (i = 0, j = 0; i < tcount; i++) { 574 /* alignment tests are only done with continuous buffers */ 575 if (align_offset != 0) 576 break; 577 578 if (template[i].np) { 579 j++; 580 581 if (template[i].iv) 582 memcpy(iv, template[i].iv, MAX_IVLEN); 583 else 584 memset(iv, 0, MAX_IVLEN); 585 586 crypto_aead_clear_flags(tfm, ~0); 587 if (template[i].wk) 588 crypto_aead_set_flags( 589 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 590 key = template[i].key; 591 592 ret = crypto_aead_setkey(tfm, key, template[i].klen); 593 if (!ret == template[i].fail) { 594 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 595 d, j, algo, crypto_aead_get_flags(tfm)); 596 goto out; 597 } else if (ret) 598 continue; 599 600 authsize = abs(template[i].rlen - template[i].ilen); 601 602 ret = -EINVAL; 603 sg_init_table(sg, template[i].np); 604 if (diff_dst) 605 sg_init_table(sgout, template[i].np); 606 for (k = 0, temp = 0; k < template[i].np; k++) { 607 if (WARN_ON(offset_in_page(IDX[k]) + 608 template[i].tap[k] > PAGE_SIZE)) 609 goto out; 610 611 q = xbuf[IDX[k] >> PAGE_SHIFT] + 612 offset_in_page(IDX[k]); 613 614 memcpy(q, template[i].input + temp, 615 template[i].tap[k]); 616 617 sg_set_buf(&sg[k], q, template[i].tap[k]); 618 619 if (diff_dst) { 620 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 621 offset_in_page(IDX[k]); 622 623 memset(q, 0, template[i].tap[k]); 624 625 sg_set_buf(&sgout[k], q, 626 template[i].tap[k]); 627 } 628 629 n = template[i].tap[k]; 630 if (k == template[i].np - 1 && enc) 631 n += authsize; 632 if (offset_in_page(q) + n < PAGE_SIZE) 633 q[n] = 0; 634 635 temp += template[i].tap[k]; 636 } 637 638 ret = crypto_aead_setauthsize(tfm, authsize); 639 if (ret) { 640 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 641 d, authsize, j, algo); 642 goto out; 643 } 644 645 if (enc) { 646 if (WARN_ON(sg[k - 1].offset + 647 sg[k - 1].length + authsize > 648 PAGE_SIZE)) { 649 ret = -EINVAL; 650 goto out; 651 } 652 653 if (diff_dst) 654 sgout[k - 1].length += authsize; 655 else 656 sg[k - 1].length += authsize; 657 } 658 659 sg_init_table(asg, template[i].anp); 660 ret = -EINVAL; 661 for (k = 0, temp = 0; k < template[i].anp; k++) { 662 if (WARN_ON(offset_in_page(IDX[k]) + 663 template[i].atap[k] > PAGE_SIZE)) 664 goto out; 665 sg_set_buf(&asg[k], 666 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 667 offset_in_page(IDX[k]), 668 template[i].assoc + temp, 669 template[i].atap[k]), 670 template[i].atap[k]); 671 temp += template[i].atap[k]; 672 } 673 674 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 675 template[i].ilen, 676 iv); 677 678 aead_request_set_assoc(req, asg, template[i].alen); 679 680 ret = enc ? 681 crypto_aead_encrypt(req) : 682 crypto_aead_decrypt(req); 683 684 switch (ret) { 685 case 0: 686 if (template[i].novrfy) { 687 /* verification was supposed to fail */ 688 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 689 d, e, j, algo); 690 /* so really, we got a bad message */ 691 ret = -EBADMSG; 692 goto out; 693 } 694 break; 695 case -EINPROGRESS: 696 case -EBUSY: 697 ret = wait_for_completion_interruptible( 698 &result.completion); 699 if (!ret && !(ret = result.err)) { 700 reinit_completion(&result.completion); 701 break; 702 } 703 case -EBADMSG: 704 if (template[i].novrfy) 705 /* verification failure was expected */ 706 continue; 707 /* fall through */ 708 default: 709 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 710 d, e, j, algo, -ret); 711 goto out; 712 } 713 714 ret = -EINVAL; 715 for (k = 0, temp = 0; k < template[i].np; k++) { 716 if (diff_dst) 717 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 718 offset_in_page(IDX[k]); 719 else 720 q = xbuf[IDX[k] >> PAGE_SHIFT] + 721 offset_in_page(IDX[k]); 722 723 n = template[i].tap[k]; 724 if (k == template[i].np - 1) 725 n += enc ? authsize : -authsize; 726 727 if (memcmp(q, template[i].result + temp, n)) { 728 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 729 d, j, e, k, algo); 730 hexdump(q, n); 731 goto out; 732 } 733 734 q += n; 735 if (k == template[i].np - 1 && !enc) { 736 if (!diff_dst && 737 memcmp(q, template[i].input + 738 temp + n, authsize)) 739 n = authsize; 740 else 741 n = 0; 742 } else { 743 for (n = 0; offset_in_page(q + n) && 744 q[n]; n++) 745 ; 746 } 747 if (n) { 748 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 749 d, j, e, k, algo, n); 750 hexdump(q, n); 751 goto out; 752 } 753 754 temp += template[i].tap[k]; 755 } 756 } 757 } 758 759 ret = 0; 760 761 out: 762 aead_request_free(req); 763 kfree(sg); 764 out_nosg: 765 if (diff_dst) 766 testmgr_free_buf(xoutbuf); 767 out_nooutbuf: 768 testmgr_free_buf(axbuf); 769 out_noaxbuf: 770 testmgr_free_buf(xbuf); 771 out_noxbuf: 772 kfree(iv); 773 return ret; 774 } 775 776 static int test_aead(struct crypto_aead *tfm, int enc, 777 struct aead_testvec *template, unsigned int tcount) 778 { 779 unsigned int alignmask; 780 int ret; 781 782 /* test 'dst == src' case */ 783 ret = __test_aead(tfm, enc, template, tcount, false, 0); 784 if (ret) 785 return ret; 786 787 /* test 'dst != src' case */ 788 ret = __test_aead(tfm, enc, template, tcount, true, 0); 789 if (ret) 790 return ret; 791 792 /* test unaligned buffers, check with one byte offset */ 793 ret = __test_aead(tfm, enc, template, tcount, true, 1); 794 if (ret) 795 return ret; 796 797 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 798 if (alignmask) { 799 /* Check if alignment mask for tfm is correctly set. */ 800 ret = __test_aead(tfm, enc, template, tcount, true, 801 alignmask + 1); 802 if (ret) 803 return ret; 804 } 805 806 return 0; 807 } 808 809 static int test_cipher(struct crypto_cipher *tfm, int enc, 810 struct cipher_testvec *template, unsigned int tcount) 811 { 812 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 813 unsigned int i, j, k; 814 char *q; 815 const char *e; 816 void *data; 817 char *xbuf[XBUFSIZE]; 818 int ret = -ENOMEM; 819 820 if (testmgr_alloc_buf(xbuf)) 821 goto out_nobuf; 822 823 if (enc == ENCRYPT) 824 e = "encryption"; 825 else 826 e = "decryption"; 827 828 j = 0; 829 for (i = 0; i < tcount; i++) { 830 if (template[i].np) 831 continue; 832 833 j++; 834 835 ret = -EINVAL; 836 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 837 goto out; 838 839 data = xbuf[0]; 840 memcpy(data, template[i].input, template[i].ilen); 841 842 crypto_cipher_clear_flags(tfm, ~0); 843 if (template[i].wk) 844 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 845 846 ret = crypto_cipher_setkey(tfm, template[i].key, 847 template[i].klen); 848 if (!ret == template[i].fail) { 849 printk(KERN_ERR "alg: cipher: setkey failed " 850 "on test %d for %s: flags=%x\n", j, 851 algo, crypto_cipher_get_flags(tfm)); 852 goto out; 853 } else if (ret) 854 continue; 855 856 for (k = 0; k < template[i].ilen; 857 k += crypto_cipher_blocksize(tfm)) { 858 if (enc) 859 crypto_cipher_encrypt_one(tfm, data + k, 860 data + k); 861 else 862 crypto_cipher_decrypt_one(tfm, data + k, 863 data + k); 864 } 865 866 q = data; 867 if (memcmp(q, template[i].result, template[i].rlen)) { 868 printk(KERN_ERR "alg: cipher: Test %d failed " 869 "on %s for %s\n", j, e, algo); 870 hexdump(q, template[i].rlen); 871 ret = -EINVAL; 872 goto out; 873 } 874 } 875 876 ret = 0; 877 878 out: 879 testmgr_free_buf(xbuf); 880 out_nobuf: 881 return ret; 882 } 883 884 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, 885 struct cipher_testvec *template, unsigned int tcount, 886 const bool diff_dst, const int align_offset) 887 { 888 const char *algo = 889 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 890 unsigned int i, j, k, n, temp; 891 char *q; 892 struct ablkcipher_request *req; 893 struct scatterlist sg[8]; 894 struct scatterlist sgout[8]; 895 const char *e, *d; 896 struct tcrypt_result result; 897 void *data; 898 char iv[MAX_IVLEN]; 899 char *xbuf[XBUFSIZE]; 900 char *xoutbuf[XBUFSIZE]; 901 int ret = -ENOMEM; 902 903 if (testmgr_alloc_buf(xbuf)) 904 goto out_nobuf; 905 906 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 907 goto out_nooutbuf; 908 909 if (diff_dst) 910 d = "-ddst"; 911 else 912 d = ""; 913 914 if (enc == ENCRYPT) 915 e = "encryption"; 916 else 917 e = "decryption"; 918 919 init_completion(&result.completion); 920 921 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 922 if (!req) { 923 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 924 d, algo); 925 goto out; 926 } 927 928 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 929 tcrypt_complete, &result); 930 931 j = 0; 932 for (i = 0; i < tcount; i++) { 933 if (template[i].iv) 934 memcpy(iv, template[i].iv, MAX_IVLEN); 935 else 936 memset(iv, 0, MAX_IVLEN); 937 938 if (!(template[i].np) || (template[i].also_non_np)) { 939 j++; 940 941 ret = -EINVAL; 942 if (WARN_ON(align_offset + template[i].ilen > 943 PAGE_SIZE)) 944 goto out; 945 946 data = xbuf[0]; 947 data += align_offset; 948 memcpy(data, template[i].input, template[i].ilen); 949 950 crypto_ablkcipher_clear_flags(tfm, ~0); 951 if (template[i].wk) 952 crypto_ablkcipher_set_flags( 953 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 954 955 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 956 template[i].klen); 957 if (!ret == template[i].fail) { 958 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 959 d, j, algo, 960 crypto_ablkcipher_get_flags(tfm)); 961 goto out; 962 } else if (ret) 963 continue; 964 965 sg_init_one(&sg[0], data, template[i].ilen); 966 if (diff_dst) { 967 data = xoutbuf[0]; 968 data += align_offset; 969 sg_init_one(&sgout[0], data, template[i].ilen); 970 } 971 972 ablkcipher_request_set_crypt(req, sg, 973 (diff_dst) ? sgout : sg, 974 template[i].ilen, iv); 975 ret = enc ? 976 crypto_ablkcipher_encrypt(req) : 977 crypto_ablkcipher_decrypt(req); 978 979 switch (ret) { 980 case 0: 981 break; 982 case -EINPROGRESS: 983 case -EBUSY: 984 ret = wait_for_completion_interruptible( 985 &result.completion); 986 if (!ret && !((ret = result.err))) { 987 reinit_completion(&result.completion); 988 break; 989 } 990 /* fall through */ 991 default: 992 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 993 d, e, j, algo, -ret); 994 goto out; 995 } 996 997 q = data; 998 if (memcmp(q, template[i].result, template[i].rlen)) { 999 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n", 1000 d, j, e, algo); 1001 hexdump(q, template[i].rlen); 1002 ret = -EINVAL; 1003 goto out; 1004 } 1005 } 1006 } 1007 1008 j = 0; 1009 for (i = 0; i < tcount; i++) { 1010 /* alignment tests are only done with continuous buffers */ 1011 if (align_offset != 0) 1012 break; 1013 1014 if (template[i].iv) 1015 memcpy(iv, template[i].iv, MAX_IVLEN); 1016 else 1017 memset(iv, 0, MAX_IVLEN); 1018 1019 if (template[i].np) { 1020 j++; 1021 1022 crypto_ablkcipher_clear_flags(tfm, ~0); 1023 if (template[i].wk) 1024 crypto_ablkcipher_set_flags( 1025 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1026 1027 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 1028 template[i].klen); 1029 if (!ret == template[i].fail) { 1030 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1031 d, j, algo, 1032 crypto_ablkcipher_get_flags(tfm)); 1033 goto out; 1034 } else if (ret) 1035 continue; 1036 1037 temp = 0; 1038 ret = -EINVAL; 1039 sg_init_table(sg, template[i].np); 1040 if (diff_dst) 1041 sg_init_table(sgout, template[i].np); 1042 for (k = 0; k < template[i].np; k++) { 1043 if (WARN_ON(offset_in_page(IDX[k]) + 1044 template[i].tap[k] > PAGE_SIZE)) 1045 goto out; 1046 1047 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1048 offset_in_page(IDX[k]); 1049 1050 memcpy(q, template[i].input + temp, 1051 template[i].tap[k]); 1052 1053 if (offset_in_page(q) + template[i].tap[k] < 1054 PAGE_SIZE) 1055 q[template[i].tap[k]] = 0; 1056 1057 sg_set_buf(&sg[k], q, template[i].tap[k]); 1058 if (diff_dst) { 1059 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1060 offset_in_page(IDX[k]); 1061 1062 sg_set_buf(&sgout[k], q, 1063 template[i].tap[k]); 1064 1065 memset(q, 0, template[i].tap[k]); 1066 if (offset_in_page(q) + 1067 template[i].tap[k] < PAGE_SIZE) 1068 q[template[i].tap[k]] = 0; 1069 } 1070 1071 temp += template[i].tap[k]; 1072 } 1073 1074 ablkcipher_request_set_crypt(req, sg, 1075 (diff_dst) ? sgout : sg, 1076 template[i].ilen, iv); 1077 1078 ret = enc ? 1079 crypto_ablkcipher_encrypt(req) : 1080 crypto_ablkcipher_decrypt(req); 1081 1082 switch (ret) { 1083 case 0: 1084 break; 1085 case -EINPROGRESS: 1086 case -EBUSY: 1087 ret = wait_for_completion_interruptible( 1088 &result.completion); 1089 if (!ret && !((ret = result.err))) { 1090 reinit_completion(&result.completion); 1091 break; 1092 } 1093 /* fall through */ 1094 default: 1095 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1096 d, e, j, algo, -ret); 1097 goto out; 1098 } 1099 1100 temp = 0; 1101 ret = -EINVAL; 1102 for (k = 0; k < template[i].np; k++) { 1103 if (diff_dst) 1104 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1105 offset_in_page(IDX[k]); 1106 else 1107 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1108 offset_in_page(IDX[k]); 1109 1110 if (memcmp(q, template[i].result + temp, 1111 template[i].tap[k])) { 1112 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1113 d, j, e, k, algo); 1114 hexdump(q, template[i].tap[k]); 1115 goto out; 1116 } 1117 1118 q += template[i].tap[k]; 1119 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1120 ; 1121 if (n) { 1122 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1123 d, j, e, k, algo, n); 1124 hexdump(q, n); 1125 goto out; 1126 } 1127 temp += template[i].tap[k]; 1128 } 1129 } 1130 } 1131 1132 ret = 0; 1133 1134 out: 1135 ablkcipher_request_free(req); 1136 if (diff_dst) 1137 testmgr_free_buf(xoutbuf); 1138 out_nooutbuf: 1139 testmgr_free_buf(xbuf); 1140 out_nobuf: 1141 return ret; 1142 } 1143 1144 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 1145 struct cipher_testvec *template, unsigned int tcount) 1146 { 1147 unsigned int alignmask; 1148 int ret; 1149 1150 /* test 'dst == src' case */ 1151 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1152 if (ret) 1153 return ret; 1154 1155 /* test 'dst != src' case */ 1156 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1157 if (ret) 1158 return ret; 1159 1160 /* test unaligned buffers, check with one byte offset */ 1161 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1162 if (ret) 1163 return ret; 1164 1165 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1166 if (alignmask) { 1167 /* Check if alignment mask for tfm is correctly set. */ 1168 ret = __test_skcipher(tfm, enc, template, tcount, true, 1169 alignmask + 1); 1170 if (ret) 1171 return ret; 1172 } 1173 1174 return 0; 1175 } 1176 1177 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1178 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1179 { 1180 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1181 unsigned int i; 1182 char result[COMP_BUF_SIZE]; 1183 int ret; 1184 1185 for (i = 0; i < ctcount; i++) { 1186 int ilen; 1187 unsigned int dlen = COMP_BUF_SIZE; 1188 1189 memset(result, 0, sizeof (result)); 1190 1191 ilen = ctemplate[i].inlen; 1192 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1193 ilen, result, &dlen); 1194 if (ret) { 1195 printk(KERN_ERR "alg: comp: compression failed " 1196 "on test %d for %s: ret=%d\n", i + 1, algo, 1197 -ret); 1198 goto out; 1199 } 1200 1201 if (dlen != ctemplate[i].outlen) { 1202 printk(KERN_ERR "alg: comp: Compression test %d " 1203 "failed for %s: output len = %d\n", i + 1, algo, 1204 dlen); 1205 ret = -EINVAL; 1206 goto out; 1207 } 1208 1209 if (memcmp(result, ctemplate[i].output, dlen)) { 1210 printk(KERN_ERR "alg: comp: Compression test %d " 1211 "failed for %s\n", i + 1, algo); 1212 hexdump(result, dlen); 1213 ret = -EINVAL; 1214 goto out; 1215 } 1216 } 1217 1218 for (i = 0; i < dtcount; i++) { 1219 int ilen; 1220 unsigned int dlen = COMP_BUF_SIZE; 1221 1222 memset(result, 0, sizeof (result)); 1223 1224 ilen = dtemplate[i].inlen; 1225 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1226 ilen, result, &dlen); 1227 if (ret) { 1228 printk(KERN_ERR "alg: comp: decompression failed " 1229 "on test %d for %s: ret=%d\n", i + 1, algo, 1230 -ret); 1231 goto out; 1232 } 1233 1234 if (dlen != dtemplate[i].outlen) { 1235 printk(KERN_ERR "alg: comp: Decompression test %d " 1236 "failed for %s: output len = %d\n", i + 1, algo, 1237 dlen); 1238 ret = -EINVAL; 1239 goto out; 1240 } 1241 1242 if (memcmp(result, dtemplate[i].output, dlen)) { 1243 printk(KERN_ERR "alg: comp: Decompression test %d " 1244 "failed for %s\n", i + 1, algo); 1245 hexdump(result, dlen); 1246 ret = -EINVAL; 1247 goto out; 1248 } 1249 } 1250 1251 ret = 0; 1252 1253 out: 1254 return ret; 1255 } 1256 1257 static int test_pcomp(struct crypto_pcomp *tfm, 1258 struct pcomp_testvec *ctemplate, 1259 struct pcomp_testvec *dtemplate, int ctcount, 1260 int dtcount) 1261 { 1262 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1263 unsigned int i; 1264 char result[COMP_BUF_SIZE]; 1265 int res; 1266 1267 for (i = 0; i < ctcount; i++) { 1268 struct comp_request req; 1269 unsigned int produced = 0; 1270 1271 res = crypto_compress_setup(tfm, ctemplate[i].params, 1272 ctemplate[i].paramsize); 1273 if (res) { 1274 pr_err("alg: pcomp: compression setup failed on test " 1275 "%d for %s: error=%d\n", i + 1, algo, res); 1276 return res; 1277 } 1278 1279 res = crypto_compress_init(tfm); 1280 if (res) { 1281 pr_err("alg: pcomp: compression init failed on test " 1282 "%d for %s: error=%d\n", i + 1, algo, res); 1283 return res; 1284 } 1285 1286 memset(result, 0, sizeof(result)); 1287 1288 req.next_in = ctemplate[i].input; 1289 req.avail_in = ctemplate[i].inlen / 2; 1290 req.next_out = result; 1291 req.avail_out = ctemplate[i].outlen / 2; 1292 1293 res = crypto_compress_update(tfm, &req); 1294 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1295 pr_err("alg: pcomp: compression update failed on test " 1296 "%d for %s: error=%d\n", i + 1, algo, res); 1297 return res; 1298 } 1299 if (res > 0) 1300 produced += res; 1301 1302 /* Add remaining input data */ 1303 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1304 1305 res = crypto_compress_update(tfm, &req); 1306 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1307 pr_err("alg: pcomp: compression update failed on test " 1308 "%d for %s: error=%d\n", i + 1, algo, res); 1309 return res; 1310 } 1311 if (res > 0) 1312 produced += res; 1313 1314 /* Provide remaining output space */ 1315 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1316 1317 res = crypto_compress_final(tfm, &req); 1318 if (res < 0) { 1319 pr_err("alg: pcomp: compression final failed on test " 1320 "%d for %s: error=%d\n", i + 1, algo, res); 1321 return res; 1322 } 1323 produced += res; 1324 1325 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1326 pr_err("alg: comp: Compression test %d failed for %s: " 1327 "output len = %d (expected %d)\n", i + 1, algo, 1328 COMP_BUF_SIZE - req.avail_out, 1329 ctemplate[i].outlen); 1330 return -EINVAL; 1331 } 1332 1333 if (produced != ctemplate[i].outlen) { 1334 pr_err("alg: comp: Compression test %d failed for %s: " 1335 "returned len = %u (expected %d)\n", i + 1, 1336 algo, produced, ctemplate[i].outlen); 1337 return -EINVAL; 1338 } 1339 1340 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1341 pr_err("alg: pcomp: Compression test %d failed for " 1342 "%s\n", i + 1, algo); 1343 hexdump(result, ctemplate[i].outlen); 1344 return -EINVAL; 1345 } 1346 } 1347 1348 for (i = 0; i < dtcount; i++) { 1349 struct comp_request req; 1350 unsigned int produced = 0; 1351 1352 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1353 dtemplate[i].paramsize); 1354 if (res) { 1355 pr_err("alg: pcomp: decompression setup failed on " 1356 "test %d for %s: error=%d\n", i + 1, algo, res); 1357 return res; 1358 } 1359 1360 res = crypto_decompress_init(tfm); 1361 if (res) { 1362 pr_err("alg: pcomp: decompression init failed on test " 1363 "%d for %s: error=%d\n", i + 1, algo, res); 1364 return res; 1365 } 1366 1367 memset(result, 0, sizeof(result)); 1368 1369 req.next_in = dtemplate[i].input; 1370 req.avail_in = dtemplate[i].inlen / 2; 1371 req.next_out = result; 1372 req.avail_out = dtemplate[i].outlen / 2; 1373 1374 res = crypto_decompress_update(tfm, &req); 1375 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1376 pr_err("alg: pcomp: decompression update failed on " 1377 "test %d for %s: error=%d\n", i + 1, algo, res); 1378 return res; 1379 } 1380 if (res > 0) 1381 produced += res; 1382 1383 /* Add remaining input data */ 1384 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1385 1386 res = crypto_decompress_update(tfm, &req); 1387 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1388 pr_err("alg: pcomp: decompression update failed on " 1389 "test %d for %s: error=%d\n", i + 1, algo, res); 1390 return res; 1391 } 1392 if (res > 0) 1393 produced += res; 1394 1395 /* Provide remaining output space */ 1396 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1397 1398 res = crypto_decompress_final(tfm, &req); 1399 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1400 pr_err("alg: pcomp: decompression final failed on " 1401 "test %d for %s: error=%d\n", i + 1, algo, res); 1402 return res; 1403 } 1404 if (res > 0) 1405 produced += res; 1406 1407 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1408 pr_err("alg: comp: Decompression test %d failed for " 1409 "%s: output len = %d (expected %d)\n", i + 1, 1410 algo, COMP_BUF_SIZE - req.avail_out, 1411 dtemplate[i].outlen); 1412 return -EINVAL; 1413 } 1414 1415 if (produced != dtemplate[i].outlen) { 1416 pr_err("alg: comp: Decompression test %d failed for " 1417 "%s: returned len = %u (expected %d)\n", i + 1, 1418 algo, produced, dtemplate[i].outlen); 1419 return -EINVAL; 1420 } 1421 1422 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1423 pr_err("alg: pcomp: Decompression test %d failed for " 1424 "%s\n", i + 1, algo); 1425 hexdump(result, dtemplate[i].outlen); 1426 return -EINVAL; 1427 } 1428 } 1429 1430 return 0; 1431 } 1432 1433 1434 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1435 unsigned int tcount) 1436 { 1437 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1438 int err = 0, i, j, seedsize; 1439 u8 *seed; 1440 char result[32]; 1441 1442 seedsize = crypto_rng_seedsize(tfm); 1443 1444 seed = kmalloc(seedsize, GFP_KERNEL); 1445 if (!seed) { 1446 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1447 "for %s\n", algo); 1448 return -ENOMEM; 1449 } 1450 1451 for (i = 0; i < tcount; i++) { 1452 memset(result, 0, 32); 1453 1454 memcpy(seed, template[i].v, template[i].vlen); 1455 memcpy(seed + template[i].vlen, template[i].key, 1456 template[i].klen); 1457 memcpy(seed + template[i].vlen + template[i].klen, 1458 template[i].dt, template[i].dtlen); 1459 1460 err = crypto_rng_reset(tfm, seed, seedsize); 1461 if (err) { 1462 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1463 "for %s\n", algo); 1464 goto out; 1465 } 1466 1467 for (j = 0; j < template[i].loops; j++) { 1468 err = crypto_rng_get_bytes(tfm, result, 1469 template[i].rlen); 1470 if (err != template[i].rlen) { 1471 printk(KERN_ERR "alg: cprng: Failed to obtain " 1472 "the correct amount of random data for " 1473 "%s (requested %d, got %d)\n", algo, 1474 template[i].rlen, err); 1475 goto out; 1476 } 1477 } 1478 1479 err = memcmp(result, template[i].result, 1480 template[i].rlen); 1481 if (err) { 1482 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1483 i, algo); 1484 hexdump(result, template[i].rlen); 1485 err = -EINVAL; 1486 goto out; 1487 } 1488 } 1489 1490 out: 1491 kfree(seed); 1492 return err; 1493 } 1494 1495 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1496 u32 type, u32 mask) 1497 { 1498 struct crypto_aead *tfm; 1499 int err = 0; 1500 1501 tfm = crypto_alloc_aead(driver, type, mask); 1502 if (IS_ERR(tfm)) { 1503 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1504 "%ld\n", driver, PTR_ERR(tfm)); 1505 return PTR_ERR(tfm); 1506 } 1507 1508 if (desc->suite.aead.enc.vecs) { 1509 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1510 desc->suite.aead.enc.count); 1511 if (err) 1512 goto out; 1513 } 1514 1515 if (!err && desc->suite.aead.dec.vecs) 1516 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1517 desc->suite.aead.dec.count); 1518 1519 out: 1520 crypto_free_aead(tfm); 1521 return err; 1522 } 1523 1524 static int alg_test_cipher(const struct alg_test_desc *desc, 1525 const char *driver, u32 type, u32 mask) 1526 { 1527 struct crypto_cipher *tfm; 1528 int err = 0; 1529 1530 tfm = crypto_alloc_cipher(driver, type, mask); 1531 if (IS_ERR(tfm)) { 1532 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1533 "%s: %ld\n", driver, PTR_ERR(tfm)); 1534 return PTR_ERR(tfm); 1535 } 1536 1537 if (desc->suite.cipher.enc.vecs) { 1538 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1539 desc->suite.cipher.enc.count); 1540 if (err) 1541 goto out; 1542 } 1543 1544 if (desc->suite.cipher.dec.vecs) 1545 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1546 desc->suite.cipher.dec.count); 1547 1548 out: 1549 crypto_free_cipher(tfm); 1550 return err; 1551 } 1552 1553 static int alg_test_skcipher(const struct alg_test_desc *desc, 1554 const char *driver, u32 type, u32 mask) 1555 { 1556 struct crypto_ablkcipher *tfm; 1557 int err = 0; 1558 1559 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1560 if (IS_ERR(tfm)) { 1561 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1562 "%s: %ld\n", driver, PTR_ERR(tfm)); 1563 return PTR_ERR(tfm); 1564 } 1565 1566 if (desc->suite.cipher.enc.vecs) { 1567 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1568 desc->suite.cipher.enc.count); 1569 if (err) 1570 goto out; 1571 } 1572 1573 if (desc->suite.cipher.dec.vecs) 1574 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1575 desc->suite.cipher.dec.count); 1576 1577 out: 1578 crypto_free_ablkcipher(tfm); 1579 return err; 1580 } 1581 1582 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1583 u32 type, u32 mask) 1584 { 1585 struct crypto_comp *tfm; 1586 int err; 1587 1588 tfm = crypto_alloc_comp(driver, type, mask); 1589 if (IS_ERR(tfm)) { 1590 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1591 "%ld\n", driver, PTR_ERR(tfm)); 1592 return PTR_ERR(tfm); 1593 } 1594 1595 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1596 desc->suite.comp.decomp.vecs, 1597 desc->suite.comp.comp.count, 1598 desc->suite.comp.decomp.count); 1599 1600 crypto_free_comp(tfm); 1601 return err; 1602 } 1603 1604 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1605 u32 type, u32 mask) 1606 { 1607 struct crypto_pcomp *tfm; 1608 int err; 1609 1610 tfm = crypto_alloc_pcomp(driver, type, mask); 1611 if (IS_ERR(tfm)) { 1612 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1613 driver, PTR_ERR(tfm)); 1614 return PTR_ERR(tfm); 1615 } 1616 1617 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1618 desc->suite.pcomp.decomp.vecs, 1619 desc->suite.pcomp.comp.count, 1620 desc->suite.pcomp.decomp.count); 1621 1622 crypto_free_pcomp(tfm); 1623 return err; 1624 } 1625 1626 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1627 u32 type, u32 mask) 1628 { 1629 struct crypto_ahash *tfm; 1630 int err; 1631 1632 tfm = crypto_alloc_ahash(driver, type, mask); 1633 if (IS_ERR(tfm)) { 1634 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1635 "%ld\n", driver, PTR_ERR(tfm)); 1636 return PTR_ERR(tfm); 1637 } 1638 1639 err = test_hash(tfm, desc->suite.hash.vecs, 1640 desc->suite.hash.count, true); 1641 if (!err) 1642 err = test_hash(tfm, desc->suite.hash.vecs, 1643 desc->suite.hash.count, false); 1644 1645 crypto_free_ahash(tfm); 1646 return err; 1647 } 1648 1649 static int alg_test_crc32c(const struct alg_test_desc *desc, 1650 const char *driver, u32 type, u32 mask) 1651 { 1652 struct crypto_shash *tfm; 1653 u32 val; 1654 int err; 1655 1656 err = alg_test_hash(desc, driver, type, mask); 1657 if (err) 1658 goto out; 1659 1660 tfm = crypto_alloc_shash(driver, type, mask); 1661 if (IS_ERR(tfm)) { 1662 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1663 "%ld\n", driver, PTR_ERR(tfm)); 1664 err = PTR_ERR(tfm); 1665 goto out; 1666 } 1667 1668 do { 1669 struct { 1670 struct shash_desc shash; 1671 char ctx[crypto_shash_descsize(tfm)]; 1672 } sdesc; 1673 1674 sdesc.shash.tfm = tfm; 1675 sdesc.shash.flags = 0; 1676 1677 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1678 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1679 if (err) { 1680 printk(KERN_ERR "alg: crc32c: Operation failed for " 1681 "%s: %d\n", driver, err); 1682 break; 1683 } 1684 1685 if (val != ~420553207) { 1686 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1687 "%d\n", driver, val); 1688 err = -EINVAL; 1689 } 1690 } while (0); 1691 1692 crypto_free_shash(tfm); 1693 1694 out: 1695 return err; 1696 } 1697 1698 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1699 u32 type, u32 mask) 1700 { 1701 struct crypto_rng *rng; 1702 int err; 1703 1704 rng = crypto_alloc_rng(driver, type, mask); 1705 if (IS_ERR(rng)) { 1706 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1707 "%ld\n", driver, PTR_ERR(rng)); 1708 return PTR_ERR(rng); 1709 } 1710 1711 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1712 1713 crypto_free_rng(rng); 1714 1715 return err; 1716 } 1717 1718 static int alg_test_null(const struct alg_test_desc *desc, 1719 const char *driver, u32 type, u32 mask) 1720 { 1721 return 0; 1722 } 1723 1724 /* Please keep this list sorted by algorithm name. */ 1725 static const struct alg_test_desc alg_test_descs[] = { 1726 { 1727 .alg = "__cbc-cast5-avx", 1728 .test = alg_test_null, 1729 }, { 1730 .alg = "__cbc-cast6-avx", 1731 .test = alg_test_null, 1732 }, { 1733 .alg = "__cbc-serpent-avx", 1734 .test = alg_test_null, 1735 }, { 1736 .alg = "__cbc-serpent-avx2", 1737 .test = alg_test_null, 1738 }, { 1739 .alg = "__cbc-serpent-sse2", 1740 .test = alg_test_null, 1741 }, { 1742 .alg = "__cbc-twofish-avx", 1743 .test = alg_test_null, 1744 }, { 1745 .alg = "__driver-cbc-aes-aesni", 1746 .test = alg_test_null, 1747 .fips_allowed = 1, 1748 }, { 1749 .alg = "__driver-cbc-camellia-aesni", 1750 .test = alg_test_null, 1751 }, { 1752 .alg = "__driver-cbc-camellia-aesni-avx2", 1753 .test = alg_test_null, 1754 }, { 1755 .alg = "__driver-cbc-cast5-avx", 1756 .test = alg_test_null, 1757 }, { 1758 .alg = "__driver-cbc-cast6-avx", 1759 .test = alg_test_null, 1760 }, { 1761 .alg = "__driver-cbc-serpent-avx", 1762 .test = alg_test_null, 1763 }, { 1764 .alg = "__driver-cbc-serpent-avx2", 1765 .test = alg_test_null, 1766 }, { 1767 .alg = "__driver-cbc-serpent-sse2", 1768 .test = alg_test_null, 1769 }, { 1770 .alg = "__driver-cbc-twofish-avx", 1771 .test = alg_test_null, 1772 }, { 1773 .alg = "__driver-ecb-aes-aesni", 1774 .test = alg_test_null, 1775 .fips_allowed = 1, 1776 }, { 1777 .alg = "__driver-ecb-camellia-aesni", 1778 .test = alg_test_null, 1779 }, { 1780 .alg = "__driver-ecb-camellia-aesni-avx2", 1781 .test = alg_test_null, 1782 }, { 1783 .alg = "__driver-ecb-cast5-avx", 1784 .test = alg_test_null, 1785 }, { 1786 .alg = "__driver-ecb-cast6-avx", 1787 .test = alg_test_null, 1788 }, { 1789 .alg = "__driver-ecb-serpent-avx", 1790 .test = alg_test_null, 1791 }, { 1792 .alg = "__driver-ecb-serpent-avx2", 1793 .test = alg_test_null, 1794 }, { 1795 .alg = "__driver-ecb-serpent-sse2", 1796 .test = alg_test_null, 1797 }, { 1798 .alg = "__driver-ecb-twofish-avx", 1799 .test = alg_test_null, 1800 }, { 1801 .alg = "__ghash-pclmulqdqni", 1802 .test = alg_test_null, 1803 .fips_allowed = 1, 1804 }, { 1805 .alg = "ansi_cprng", 1806 .test = alg_test_cprng, 1807 .fips_allowed = 1, 1808 .suite = { 1809 .cprng = { 1810 .vecs = ansi_cprng_aes_tv_template, 1811 .count = ANSI_CPRNG_AES_TEST_VECTORS 1812 } 1813 } 1814 }, { 1815 .alg = "authenc(hmac(md5),ecb(cipher_null))", 1816 .test = alg_test_aead, 1817 .fips_allowed = 1, 1818 .suite = { 1819 .aead = { 1820 .enc = { 1821 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 1822 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 1823 }, 1824 .dec = { 1825 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 1826 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 1827 } 1828 } 1829 } 1830 }, { 1831 .alg = "authenc(hmac(sha1),cbc(aes))", 1832 .test = alg_test_aead, 1833 .fips_allowed = 1, 1834 .suite = { 1835 .aead = { 1836 .enc = { 1837 .vecs = 1838 hmac_sha1_aes_cbc_enc_tv_temp, 1839 .count = 1840 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 1841 } 1842 } 1843 } 1844 }, { 1845 .alg = "authenc(hmac(sha1),cbc(des))", 1846 .test = alg_test_aead, 1847 .fips_allowed = 1, 1848 .suite = { 1849 .aead = { 1850 .enc = { 1851 .vecs = 1852 hmac_sha1_des_cbc_enc_tv_temp, 1853 .count = 1854 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 1855 } 1856 } 1857 } 1858 }, { 1859 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 1860 .test = alg_test_aead, 1861 .fips_allowed = 1, 1862 .suite = { 1863 .aead = { 1864 .enc = { 1865 .vecs = 1866 hmac_sha1_des3_ede_cbc_enc_tv_temp, 1867 .count = 1868 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 1869 } 1870 } 1871 } 1872 }, { 1873 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 1874 .test = alg_test_aead, 1875 .fips_allowed = 1, 1876 .suite = { 1877 .aead = { 1878 .enc = { 1879 .vecs = 1880 hmac_sha1_ecb_cipher_null_enc_tv_temp, 1881 .count = 1882 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 1883 }, 1884 .dec = { 1885 .vecs = 1886 hmac_sha1_ecb_cipher_null_dec_tv_temp, 1887 .count = 1888 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 1889 } 1890 } 1891 } 1892 }, { 1893 .alg = "authenc(hmac(sha224),cbc(des))", 1894 .test = alg_test_aead, 1895 .fips_allowed = 1, 1896 .suite = { 1897 .aead = { 1898 .enc = { 1899 .vecs = 1900 hmac_sha224_des_cbc_enc_tv_temp, 1901 .count = 1902 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 1903 } 1904 } 1905 } 1906 }, { 1907 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 1908 .test = alg_test_aead, 1909 .fips_allowed = 1, 1910 .suite = { 1911 .aead = { 1912 .enc = { 1913 .vecs = 1914 hmac_sha224_des3_ede_cbc_enc_tv_temp, 1915 .count = 1916 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 1917 } 1918 } 1919 } 1920 }, { 1921 .alg = "authenc(hmac(sha256),cbc(aes))", 1922 .test = alg_test_aead, 1923 .fips_allowed = 1, 1924 .suite = { 1925 .aead = { 1926 .enc = { 1927 .vecs = 1928 hmac_sha256_aes_cbc_enc_tv_temp, 1929 .count = 1930 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 1931 } 1932 } 1933 } 1934 }, { 1935 .alg = "authenc(hmac(sha256),cbc(des))", 1936 .test = alg_test_aead, 1937 .fips_allowed = 1, 1938 .suite = { 1939 .aead = { 1940 .enc = { 1941 .vecs = 1942 hmac_sha256_des_cbc_enc_tv_temp, 1943 .count = 1944 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 1945 } 1946 } 1947 } 1948 }, { 1949 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 1950 .test = alg_test_aead, 1951 .fips_allowed = 1, 1952 .suite = { 1953 .aead = { 1954 .enc = { 1955 .vecs = 1956 hmac_sha256_des3_ede_cbc_enc_tv_temp, 1957 .count = 1958 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 1959 } 1960 } 1961 } 1962 }, { 1963 .alg = "authenc(hmac(sha384),cbc(des))", 1964 .test = alg_test_aead, 1965 .fips_allowed = 1, 1966 .suite = { 1967 .aead = { 1968 .enc = { 1969 .vecs = 1970 hmac_sha384_des_cbc_enc_tv_temp, 1971 .count = 1972 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 1973 } 1974 } 1975 } 1976 }, { 1977 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 1978 .test = alg_test_aead, 1979 .fips_allowed = 1, 1980 .suite = { 1981 .aead = { 1982 .enc = { 1983 .vecs = 1984 hmac_sha384_des3_ede_cbc_enc_tv_temp, 1985 .count = 1986 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 1987 } 1988 } 1989 } 1990 }, { 1991 .alg = "authenc(hmac(sha512),cbc(aes))", 1992 .test = alg_test_aead, 1993 .fips_allowed = 1, 1994 .suite = { 1995 .aead = { 1996 .enc = { 1997 .vecs = 1998 hmac_sha512_aes_cbc_enc_tv_temp, 1999 .count = 2000 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2001 } 2002 } 2003 } 2004 }, { 2005 .alg = "authenc(hmac(sha512),cbc(des))", 2006 .test = alg_test_aead, 2007 .fips_allowed = 1, 2008 .suite = { 2009 .aead = { 2010 .enc = { 2011 .vecs = 2012 hmac_sha512_des_cbc_enc_tv_temp, 2013 .count = 2014 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2015 } 2016 } 2017 } 2018 }, { 2019 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2020 .test = alg_test_aead, 2021 .fips_allowed = 1, 2022 .suite = { 2023 .aead = { 2024 .enc = { 2025 .vecs = 2026 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2027 .count = 2028 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2029 } 2030 } 2031 } 2032 }, { 2033 .alg = "cbc(aes)", 2034 .test = alg_test_skcipher, 2035 .fips_allowed = 1, 2036 .suite = { 2037 .cipher = { 2038 .enc = { 2039 .vecs = aes_cbc_enc_tv_template, 2040 .count = AES_CBC_ENC_TEST_VECTORS 2041 }, 2042 .dec = { 2043 .vecs = aes_cbc_dec_tv_template, 2044 .count = AES_CBC_DEC_TEST_VECTORS 2045 } 2046 } 2047 } 2048 }, { 2049 .alg = "cbc(anubis)", 2050 .test = alg_test_skcipher, 2051 .suite = { 2052 .cipher = { 2053 .enc = { 2054 .vecs = anubis_cbc_enc_tv_template, 2055 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2056 }, 2057 .dec = { 2058 .vecs = anubis_cbc_dec_tv_template, 2059 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2060 } 2061 } 2062 } 2063 }, { 2064 .alg = "cbc(blowfish)", 2065 .test = alg_test_skcipher, 2066 .suite = { 2067 .cipher = { 2068 .enc = { 2069 .vecs = bf_cbc_enc_tv_template, 2070 .count = BF_CBC_ENC_TEST_VECTORS 2071 }, 2072 .dec = { 2073 .vecs = bf_cbc_dec_tv_template, 2074 .count = BF_CBC_DEC_TEST_VECTORS 2075 } 2076 } 2077 } 2078 }, { 2079 .alg = "cbc(camellia)", 2080 .test = alg_test_skcipher, 2081 .suite = { 2082 .cipher = { 2083 .enc = { 2084 .vecs = camellia_cbc_enc_tv_template, 2085 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2086 }, 2087 .dec = { 2088 .vecs = camellia_cbc_dec_tv_template, 2089 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2090 } 2091 } 2092 } 2093 }, { 2094 .alg = "cbc(cast5)", 2095 .test = alg_test_skcipher, 2096 .suite = { 2097 .cipher = { 2098 .enc = { 2099 .vecs = cast5_cbc_enc_tv_template, 2100 .count = CAST5_CBC_ENC_TEST_VECTORS 2101 }, 2102 .dec = { 2103 .vecs = cast5_cbc_dec_tv_template, 2104 .count = CAST5_CBC_DEC_TEST_VECTORS 2105 } 2106 } 2107 } 2108 }, { 2109 .alg = "cbc(cast6)", 2110 .test = alg_test_skcipher, 2111 .suite = { 2112 .cipher = { 2113 .enc = { 2114 .vecs = cast6_cbc_enc_tv_template, 2115 .count = CAST6_CBC_ENC_TEST_VECTORS 2116 }, 2117 .dec = { 2118 .vecs = cast6_cbc_dec_tv_template, 2119 .count = CAST6_CBC_DEC_TEST_VECTORS 2120 } 2121 } 2122 } 2123 }, { 2124 .alg = "cbc(des)", 2125 .test = alg_test_skcipher, 2126 .suite = { 2127 .cipher = { 2128 .enc = { 2129 .vecs = des_cbc_enc_tv_template, 2130 .count = DES_CBC_ENC_TEST_VECTORS 2131 }, 2132 .dec = { 2133 .vecs = des_cbc_dec_tv_template, 2134 .count = DES_CBC_DEC_TEST_VECTORS 2135 } 2136 } 2137 } 2138 }, { 2139 .alg = "cbc(des3_ede)", 2140 .test = alg_test_skcipher, 2141 .fips_allowed = 1, 2142 .suite = { 2143 .cipher = { 2144 .enc = { 2145 .vecs = des3_ede_cbc_enc_tv_template, 2146 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2147 }, 2148 .dec = { 2149 .vecs = des3_ede_cbc_dec_tv_template, 2150 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2151 } 2152 } 2153 } 2154 }, { 2155 .alg = "cbc(serpent)", 2156 .test = alg_test_skcipher, 2157 .suite = { 2158 .cipher = { 2159 .enc = { 2160 .vecs = serpent_cbc_enc_tv_template, 2161 .count = SERPENT_CBC_ENC_TEST_VECTORS 2162 }, 2163 .dec = { 2164 .vecs = serpent_cbc_dec_tv_template, 2165 .count = SERPENT_CBC_DEC_TEST_VECTORS 2166 } 2167 } 2168 } 2169 }, { 2170 .alg = "cbc(twofish)", 2171 .test = alg_test_skcipher, 2172 .suite = { 2173 .cipher = { 2174 .enc = { 2175 .vecs = tf_cbc_enc_tv_template, 2176 .count = TF_CBC_ENC_TEST_VECTORS 2177 }, 2178 .dec = { 2179 .vecs = tf_cbc_dec_tv_template, 2180 .count = TF_CBC_DEC_TEST_VECTORS 2181 } 2182 } 2183 } 2184 }, { 2185 .alg = "ccm(aes)", 2186 .test = alg_test_aead, 2187 .fips_allowed = 1, 2188 .suite = { 2189 .aead = { 2190 .enc = { 2191 .vecs = aes_ccm_enc_tv_template, 2192 .count = AES_CCM_ENC_TEST_VECTORS 2193 }, 2194 .dec = { 2195 .vecs = aes_ccm_dec_tv_template, 2196 .count = AES_CCM_DEC_TEST_VECTORS 2197 } 2198 } 2199 } 2200 }, { 2201 .alg = "cmac(aes)", 2202 .test = alg_test_hash, 2203 .suite = { 2204 .hash = { 2205 .vecs = aes_cmac128_tv_template, 2206 .count = CMAC_AES_TEST_VECTORS 2207 } 2208 } 2209 }, { 2210 .alg = "cmac(des3_ede)", 2211 .test = alg_test_hash, 2212 .suite = { 2213 .hash = { 2214 .vecs = des3_ede_cmac64_tv_template, 2215 .count = CMAC_DES3_EDE_TEST_VECTORS 2216 } 2217 } 2218 }, { 2219 .alg = "compress_null", 2220 .test = alg_test_null, 2221 }, { 2222 .alg = "crc32c", 2223 .test = alg_test_crc32c, 2224 .fips_allowed = 1, 2225 .suite = { 2226 .hash = { 2227 .vecs = crc32c_tv_template, 2228 .count = CRC32C_TEST_VECTORS 2229 } 2230 } 2231 }, { 2232 .alg = "crct10dif", 2233 .test = alg_test_hash, 2234 .fips_allowed = 1, 2235 .suite = { 2236 .hash = { 2237 .vecs = crct10dif_tv_template, 2238 .count = CRCT10DIF_TEST_VECTORS 2239 } 2240 } 2241 }, { 2242 .alg = "cryptd(__driver-cbc-aes-aesni)", 2243 .test = alg_test_null, 2244 .fips_allowed = 1, 2245 }, { 2246 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2247 .test = alg_test_null, 2248 }, { 2249 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2250 .test = alg_test_null, 2251 }, { 2252 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2253 .test = alg_test_null, 2254 }, { 2255 .alg = "cryptd(__driver-ecb-aes-aesni)", 2256 .test = alg_test_null, 2257 .fips_allowed = 1, 2258 }, { 2259 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2260 .test = alg_test_null, 2261 }, { 2262 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2263 .test = alg_test_null, 2264 }, { 2265 .alg = "cryptd(__driver-ecb-cast5-avx)", 2266 .test = alg_test_null, 2267 }, { 2268 .alg = "cryptd(__driver-ecb-cast6-avx)", 2269 .test = alg_test_null, 2270 }, { 2271 .alg = "cryptd(__driver-ecb-serpent-avx)", 2272 .test = alg_test_null, 2273 }, { 2274 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2275 .test = alg_test_null, 2276 }, { 2277 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2278 .test = alg_test_null, 2279 }, { 2280 .alg = "cryptd(__driver-ecb-twofish-avx)", 2281 .test = alg_test_null, 2282 }, { 2283 .alg = "cryptd(__driver-gcm-aes-aesni)", 2284 .test = alg_test_null, 2285 .fips_allowed = 1, 2286 }, { 2287 .alg = "cryptd(__ghash-pclmulqdqni)", 2288 .test = alg_test_null, 2289 .fips_allowed = 1, 2290 }, { 2291 .alg = "ctr(aes)", 2292 .test = alg_test_skcipher, 2293 .fips_allowed = 1, 2294 .suite = { 2295 .cipher = { 2296 .enc = { 2297 .vecs = aes_ctr_enc_tv_template, 2298 .count = AES_CTR_ENC_TEST_VECTORS 2299 }, 2300 .dec = { 2301 .vecs = aes_ctr_dec_tv_template, 2302 .count = AES_CTR_DEC_TEST_VECTORS 2303 } 2304 } 2305 } 2306 }, { 2307 .alg = "ctr(blowfish)", 2308 .test = alg_test_skcipher, 2309 .suite = { 2310 .cipher = { 2311 .enc = { 2312 .vecs = bf_ctr_enc_tv_template, 2313 .count = BF_CTR_ENC_TEST_VECTORS 2314 }, 2315 .dec = { 2316 .vecs = bf_ctr_dec_tv_template, 2317 .count = BF_CTR_DEC_TEST_VECTORS 2318 } 2319 } 2320 } 2321 }, { 2322 .alg = "ctr(camellia)", 2323 .test = alg_test_skcipher, 2324 .suite = { 2325 .cipher = { 2326 .enc = { 2327 .vecs = camellia_ctr_enc_tv_template, 2328 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2329 }, 2330 .dec = { 2331 .vecs = camellia_ctr_dec_tv_template, 2332 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2333 } 2334 } 2335 } 2336 }, { 2337 .alg = "ctr(cast5)", 2338 .test = alg_test_skcipher, 2339 .suite = { 2340 .cipher = { 2341 .enc = { 2342 .vecs = cast5_ctr_enc_tv_template, 2343 .count = CAST5_CTR_ENC_TEST_VECTORS 2344 }, 2345 .dec = { 2346 .vecs = cast5_ctr_dec_tv_template, 2347 .count = CAST5_CTR_DEC_TEST_VECTORS 2348 } 2349 } 2350 } 2351 }, { 2352 .alg = "ctr(cast6)", 2353 .test = alg_test_skcipher, 2354 .suite = { 2355 .cipher = { 2356 .enc = { 2357 .vecs = cast6_ctr_enc_tv_template, 2358 .count = CAST6_CTR_ENC_TEST_VECTORS 2359 }, 2360 .dec = { 2361 .vecs = cast6_ctr_dec_tv_template, 2362 .count = CAST6_CTR_DEC_TEST_VECTORS 2363 } 2364 } 2365 } 2366 }, { 2367 .alg = "ctr(des)", 2368 .test = alg_test_skcipher, 2369 .suite = { 2370 .cipher = { 2371 .enc = { 2372 .vecs = des_ctr_enc_tv_template, 2373 .count = DES_CTR_ENC_TEST_VECTORS 2374 }, 2375 .dec = { 2376 .vecs = des_ctr_dec_tv_template, 2377 .count = DES_CTR_DEC_TEST_VECTORS 2378 } 2379 } 2380 } 2381 }, { 2382 .alg = "ctr(des3_ede)", 2383 .test = alg_test_skcipher, 2384 .suite = { 2385 .cipher = { 2386 .enc = { 2387 .vecs = des3_ede_ctr_enc_tv_template, 2388 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2389 }, 2390 .dec = { 2391 .vecs = des3_ede_ctr_dec_tv_template, 2392 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2393 } 2394 } 2395 } 2396 }, { 2397 .alg = "ctr(serpent)", 2398 .test = alg_test_skcipher, 2399 .suite = { 2400 .cipher = { 2401 .enc = { 2402 .vecs = serpent_ctr_enc_tv_template, 2403 .count = SERPENT_CTR_ENC_TEST_VECTORS 2404 }, 2405 .dec = { 2406 .vecs = serpent_ctr_dec_tv_template, 2407 .count = SERPENT_CTR_DEC_TEST_VECTORS 2408 } 2409 } 2410 } 2411 }, { 2412 .alg = "ctr(twofish)", 2413 .test = alg_test_skcipher, 2414 .suite = { 2415 .cipher = { 2416 .enc = { 2417 .vecs = tf_ctr_enc_tv_template, 2418 .count = TF_CTR_ENC_TEST_VECTORS 2419 }, 2420 .dec = { 2421 .vecs = tf_ctr_dec_tv_template, 2422 .count = TF_CTR_DEC_TEST_VECTORS 2423 } 2424 } 2425 } 2426 }, { 2427 .alg = "cts(cbc(aes))", 2428 .test = alg_test_skcipher, 2429 .suite = { 2430 .cipher = { 2431 .enc = { 2432 .vecs = cts_mode_enc_tv_template, 2433 .count = CTS_MODE_ENC_TEST_VECTORS 2434 }, 2435 .dec = { 2436 .vecs = cts_mode_dec_tv_template, 2437 .count = CTS_MODE_DEC_TEST_VECTORS 2438 } 2439 } 2440 } 2441 }, { 2442 .alg = "deflate", 2443 .test = alg_test_comp, 2444 .fips_allowed = 1, 2445 .suite = { 2446 .comp = { 2447 .comp = { 2448 .vecs = deflate_comp_tv_template, 2449 .count = DEFLATE_COMP_TEST_VECTORS 2450 }, 2451 .decomp = { 2452 .vecs = deflate_decomp_tv_template, 2453 .count = DEFLATE_DECOMP_TEST_VECTORS 2454 } 2455 } 2456 } 2457 }, { 2458 .alg = "digest_null", 2459 .test = alg_test_null, 2460 }, { 2461 .alg = "ecb(__aes-aesni)", 2462 .test = alg_test_null, 2463 .fips_allowed = 1, 2464 }, { 2465 .alg = "ecb(aes)", 2466 .test = alg_test_skcipher, 2467 .fips_allowed = 1, 2468 .suite = { 2469 .cipher = { 2470 .enc = { 2471 .vecs = aes_enc_tv_template, 2472 .count = AES_ENC_TEST_VECTORS 2473 }, 2474 .dec = { 2475 .vecs = aes_dec_tv_template, 2476 .count = AES_DEC_TEST_VECTORS 2477 } 2478 } 2479 } 2480 }, { 2481 .alg = "ecb(anubis)", 2482 .test = alg_test_skcipher, 2483 .suite = { 2484 .cipher = { 2485 .enc = { 2486 .vecs = anubis_enc_tv_template, 2487 .count = ANUBIS_ENC_TEST_VECTORS 2488 }, 2489 .dec = { 2490 .vecs = anubis_dec_tv_template, 2491 .count = ANUBIS_DEC_TEST_VECTORS 2492 } 2493 } 2494 } 2495 }, { 2496 .alg = "ecb(arc4)", 2497 .test = alg_test_skcipher, 2498 .suite = { 2499 .cipher = { 2500 .enc = { 2501 .vecs = arc4_enc_tv_template, 2502 .count = ARC4_ENC_TEST_VECTORS 2503 }, 2504 .dec = { 2505 .vecs = arc4_dec_tv_template, 2506 .count = ARC4_DEC_TEST_VECTORS 2507 } 2508 } 2509 } 2510 }, { 2511 .alg = "ecb(blowfish)", 2512 .test = alg_test_skcipher, 2513 .suite = { 2514 .cipher = { 2515 .enc = { 2516 .vecs = bf_enc_tv_template, 2517 .count = BF_ENC_TEST_VECTORS 2518 }, 2519 .dec = { 2520 .vecs = bf_dec_tv_template, 2521 .count = BF_DEC_TEST_VECTORS 2522 } 2523 } 2524 } 2525 }, { 2526 .alg = "ecb(camellia)", 2527 .test = alg_test_skcipher, 2528 .suite = { 2529 .cipher = { 2530 .enc = { 2531 .vecs = camellia_enc_tv_template, 2532 .count = CAMELLIA_ENC_TEST_VECTORS 2533 }, 2534 .dec = { 2535 .vecs = camellia_dec_tv_template, 2536 .count = CAMELLIA_DEC_TEST_VECTORS 2537 } 2538 } 2539 } 2540 }, { 2541 .alg = "ecb(cast5)", 2542 .test = alg_test_skcipher, 2543 .suite = { 2544 .cipher = { 2545 .enc = { 2546 .vecs = cast5_enc_tv_template, 2547 .count = CAST5_ENC_TEST_VECTORS 2548 }, 2549 .dec = { 2550 .vecs = cast5_dec_tv_template, 2551 .count = CAST5_DEC_TEST_VECTORS 2552 } 2553 } 2554 } 2555 }, { 2556 .alg = "ecb(cast6)", 2557 .test = alg_test_skcipher, 2558 .suite = { 2559 .cipher = { 2560 .enc = { 2561 .vecs = cast6_enc_tv_template, 2562 .count = CAST6_ENC_TEST_VECTORS 2563 }, 2564 .dec = { 2565 .vecs = cast6_dec_tv_template, 2566 .count = CAST6_DEC_TEST_VECTORS 2567 } 2568 } 2569 } 2570 }, { 2571 .alg = "ecb(cipher_null)", 2572 .test = alg_test_null, 2573 }, { 2574 .alg = "ecb(des)", 2575 .test = alg_test_skcipher, 2576 .fips_allowed = 1, 2577 .suite = { 2578 .cipher = { 2579 .enc = { 2580 .vecs = des_enc_tv_template, 2581 .count = DES_ENC_TEST_VECTORS 2582 }, 2583 .dec = { 2584 .vecs = des_dec_tv_template, 2585 .count = DES_DEC_TEST_VECTORS 2586 } 2587 } 2588 } 2589 }, { 2590 .alg = "ecb(des3_ede)", 2591 .test = alg_test_skcipher, 2592 .fips_allowed = 1, 2593 .suite = { 2594 .cipher = { 2595 .enc = { 2596 .vecs = des3_ede_enc_tv_template, 2597 .count = DES3_EDE_ENC_TEST_VECTORS 2598 }, 2599 .dec = { 2600 .vecs = des3_ede_dec_tv_template, 2601 .count = DES3_EDE_DEC_TEST_VECTORS 2602 } 2603 } 2604 } 2605 }, { 2606 .alg = "ecb(fcrypt)", 2607 .test = alg_test_skcipher, 2608 .suite = { 2609 .cipher = { 2610 .enc = { 2611 .vecs = fcrypt_pcbc_enc_tv_template, 2612 .count = 1 2613 }, 2614 .dec = { 2615 .vecs = fcrypt_pcbc_dec_tv_template, 2616 .count = 1 2617 } 2618 } 2619 } 2620 }, { 2621 .alg = "ecb(khazad)", 2622 .test = alg_test_skcipher, 2623 .suite = { 2624 .cipher = { 2625 .enc = { 2626 .vecs = khazad_enc_tv_template, 2627 .count = KHAZAD_ENC_TEST_VECTORS 2628 }, 2629 .dec = { 2630 .vecs = khazad_dec_tv_template, 2631 .count = KHAZAD_DEC_TEST_VECTORS 2632 } 2633 } 2634 } 2635 }, { 2636 .alg = "ecb(seed)", 2637 .test = alg_test_skcipher, 2638 .suite = { 2639 .cipher = { 2640 .enc = { 2641 .vecs = seed_enc_tv_template, 2642 .count = SEED_ENC_TEST_VECTORS 2643 }, 2644 .dec = { 2645 .vecs = seed_dec_tv_template, 2646 .count = SEED_DEC_TEST_VECTORS 2647 } 2648 } 2649 } 2650 }, { 2651 .alg = "ecb(serpent)", 2652 .test = alg_test_skcipher, 2653 .suite = { 2654 .cipher = { 2655 .enc = { 2656 .vecs = serpent_enc_tv_template, 2657 .count = SERPENT_ENC_TEST_VECTORS 2658 }, 2659 .dec = { 2660 .vecs = serpent_dec_tv_template, 2661 .count = SERPENT_DEC_TEST_VECTORS 2662 } 2663 } 2664 } 2665 }, { 2666 .alg = "ecb(tea)", 2667 .test = alg_test_skcipher, 2668 .suite = { 2669 .cipher = { 2670 .enc = { 2671 .vecs = tea_enc_tv_template, 2672 .count = TEA_ENC_TEST_VECTORS 2673 }, 2674 .dec = { 2675 .vecs = tea_dec_tv_template, 2676 .count = TEA_DEC_TEST_VECTORS 2677 } 2678 } 2679 } 2680 }, { 2681 .alg = "ecb(tnepres)", 2682 .test = alg_test_skcipher, 2683 .suite = { 2684 .cipher = { 2685 .enc = { 2686 .vecs = tnepres_enc_tv_template, 2687 .count = TNEPRES_ENC_TEST_VECTORS 2688 }, 2689 .dec = { 2690 .vecs = tnepres_dec_tv_template, 2691 .count = TNEPRES_DEC_TEST_VECTORS 2692 } 2693 } 2694 } 2695 }, { 2696 .alg = "ecb(twofish)", 2697 .test = alg_test_skcipher, 2698 .suite = { 2699 .cipher = { 2700 .enc = { 2701 .vecs = tf_enc_tv_template, 2702 .count = TF_ENC_TEST_VECTORS 2703 }, 2704 .dec = { 2705 .vecs = tf_dec_tv_template, 2706 .count = TF_DEC_TEST_VECTORS 2707 } 2708 } 2709 } 2710 }, { 2711 .alg = "ecb(xeta)", 2712 .test = alg_test_skcipher, 2713 .suite = { 2714 .cipher = { 2715 .enc = { 2716 .vecs = xeta_enc_tv_template, 2717 .count = XETA_ENC_TEST_VECTORS 2718 }, 2719 .dec = { 2720 .vecs = xeta_dec_tv_template, 2721 .count = XETA_DEC_TEST_VECTORS 2722 } 2723 } 2724 } 2725 }, { 2726 .alg = "ecb(xtea)", 2727 .test = alg_test_skcipher, 2728 .suite = { 2729 .cipher = { 2730 .enc = { 2731 .vecs = xtea_enc_tv_template, 2732 .count = XTEA_ENC_TEST_VECTORS 2733 }, 2734 .dec = { 2735 .vecs = xtea_dec_tv_template, 2736 .count = XTEA_DEC_TEST_VECTORS 2737 } 2738 } 2739 } 2740 }, { 2741 .alg = "gcm(aes)", 2742 .test = alg_test_aead, 2743 .fips_allowed = 1, 2744 .suite = { 2745 .aead = { 2746 .enc = { 2747 .vecs = aes_gcm_enc_tv_template, 2748 .count = AES_GCM_ENC_TEST_VECTORS 2749 }, 2750 .dec = { 2751 .vecs = aes_gcm_dec_tv_template, 2752 .count = AES_GCM_DEC_TEST_VECTORS 2753 } 2754 } 2755 } 2756 }, { 2757 .alg = "ghash", 2758 .test = alg_test_hash, 2759 .fips_allowed = 1, 2760 .suite = { 2761 .hash = { 2762 .vecs = ghash_tv_template, 2763 .count = GHASH_TEST_VECTORS 2764 } 2765 } 2766 }, { 2767 .alg = "hmac(crc32)", 2768 .test = alg_test_hash, 2769 .suite = { 2770 .hash = { 2771 .vecs = bfin_crc_tv_template, 2772 .count = BFIN_CRC_TEST_VECTORS 2773 } 2774 } 2775 }, { 2776 .alg = "hmac(md5)", 2777 .test = alg_test_hash, 2778 .suite = { 2779 .hash = { 2780 .vecs = hmac_md5_tv_template, 2781 .count = HMAC_MD5_TEST_VECTORS 2782 } 2783 } 2784 }, { 2785 .alg = "hmac(rmd128)", 2786 .test = alg_test_hash, 2787 .suite = { 2788 .hash = { 2789 .vecs = hmac_rmd128_tv_template, 2790 .count = HMAC_RMD128_TEST_VECTORS 2791 } 2792 } 2793 }, { 2794 .alg = "hmac(rmd160)", 2795 .test = alg_test_hash, 2796 .suite = { 2797 .hash = { 2798 .vecs = hmac_rmd160_tv_template, 2799 .count = HMAC_RMD160_TEST_VECTORS 2800 } 2801 } 2802 }, { 2803 .alg = "hmac(sha1)", 2804 .test = alg_test_hash, 2805 .fips_allowed = 1, 2806 .suite = { 2807 .hash = { 2808 .vecs = hmac_sha1_tv_template, 2809 .count = HMAC_SHA1_TEST_VECTORS 2810 } 2811 } 2812 }, { 2813 .alg = "hmac(sha224)", 2814 .test = alg_test_hash, 2815 .fips_allowed = 1, 2816 .suite = { 2817 .hash = { 2818 .vecs = hmac_sha224_tv_template, 2819 .count = HMAC_SHA224_TEST_VECTORS 2820 } 2821 } 2822 }, { 2823 .alg = "hmac(sha256)", 2824 .test = alg_test_hash, 2825 .fips_allowed = 1, 2826 .suite = { 2827 .hash = { 2828 .vecs = hmac_sha256_tv_template, 2829 .count = HMAC_SHA256_TEST_VECTORS 2830 } 2831 } 2832 }, { 2833 .alg = "hmac(sha384)", 2834 .test = alg_test_hash, 2835 .fips_allowed = 1, 2836 .suite = { 2837 .hash = { 2838 .vecs = hmac_sha384_tv_template, 2839 .count = HMAC_SHA384_TEST_VECTORS 2840 } 2841 } 2842 }, { 2843 .alg = "hmac(sha512)", 2844 .test = alg_test_hash, 2845 .fips_allowed = 1, 2846 .suite = { 2847 .hash = { 2848 .vecs = hmac_sha512_tv_template, 2849 .count = HMAC_SHA512_TEST_VECTORS 2850 } 2851 } 2852 }, { 2853 .alg = "lrw(aes)", 2854 .test = alg_test_skcipher, 2855 .suite = { 2856 .cipher = { 2857 .enc = { 2858 .vecs = aes_lrw_enc_tv_template, 2859 .count = AES_LRW_ENC_TEST_VECTORS 2860 }, 2861 .dec = { 2862 .vecs = aes_lrw_dec_tv_template, 2863 .count = AES_LRW_DEC_TEST_VECTORS 2864 } 2865 } 2866 } 2867 }, { 2868 .alg = "lrw(camellia)", 2869 .test = alg_test_skcipher, 2870 .suite = { 2871 .cipher = { 2872 .enc = { 2873 .vecs = camellia_lrw_enc_tv_template, 2874 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 2875 }, 2876 .dec = { 2877 .vecs = camellia_lrw_dec_tv_template, 2878 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 2879 } 2880 } 2881 } 2882 }, { 2883 .alg = "lrw(cast6)", 2884 .test = alg_test_skcipher, 2885 .suite = { 2886 .cipher = { 2887 .enc = { 2888 .vecs = cast6_lrw_enc_tv_template, 2889 .count = CAST6_LRW_ENC_TEST_VECTORS 2890 }, 2891 .dec = { 2892 .vecs = cast6_lrw_dec_tv_template, 2893 .count = CAST6_LRW_DEC_TEST_VECTORS 2894 } 2895 } 2896 } 2897 }, { 2898 .alg = "lrw(serpent)", 2899 .test = alg_test_skcipher, 2900 .suite = { 2901 .cipher = { 2902 .enc = { 2903 .vecs = serpent_lrw_enc_tv_template, 2904 .count = SERPENT_LRW_ENC_TEST_VECTORS 2905 }, 2906 .dec = { 2907 .vecs = serpent_lrw_dec_tv_template, 2908 .count = SERPENT_LRW_DEC_TEST_VECTORS 2909 } 2910 } 2911 } 2912 }, { 2913 .alg = "lrw(twofish)", 2914 .test = alg_test_skcipher, 2915 .suite = { 2916 .cipher = { 2917 .enc = { 2918 .vecs = tf_lrw_enc_tv_template, 2919 .count = TF_LRW_ENC_TEST_VECTORS 2920 }, 2921 .dec = { 2922 .vecs = tf_lrw_dec_tv_template, 2923 .count = TF_LRW_DEC_TEST_VECTORS 2924 } 2925 } 2926 } 2927 }, { 2928 .alg = "lzo", 2929 .test = alg_test_comp, 2930 .fips_allowed = 1, 2931 .suite = { 2932 .comp = { 2933 .comp = { 2934 .vecs = lzo_comp_tv_template, 2935 .count = LZO_COMP_TEST_VECTORS 2936 }, 2937 .decomp = { 2938 .vecs = lzo_decomp_tv_template, 2939 .count = LZO_DECOMP_TEST_VECTORS 2940 } 2941 } 2942 } 2943 }, { 2944 .alg = "md4", 2945 .test = alg_test_hash, 2946 .suite = { 2947 .hash = { 2948 .vecs = md4_tv_template, 2949 .count = MD4_TEST_VECTORS 2950 } 2951 } 2952 }, { 2953 .alg = "md5", 2954 .test = alg_test_hash, 2955 .suite = { 2956 .hash = { 2957 .vecs = md5_tv_template, 2958 .count = MD5_TEST_VECTORS 2959 } 2960 } 2961 }, { 2962 .alg = "michael_mic", 2963 .test = alg_test_hash, 2964 .suite = { 2965 .hash = { 2966 .vecs = michael_mic_tv_template, 2967 .count = MICHAEL_MIC_TEST_VECTORS 2968 } 2969 } 2970 }, { 2971 .alg = "ofb(aes)", 2972 .test = alg_test_skcipher, 2973 .fips_allowed = 1, 2974 .suite = { 2975 .cipher = { 2976 .enc = { 2977 .vecs = aes_ofb_enc_tv_template, 2978 .count = AES_OFB_ENC_TEST_VECTORS 2979 }, 2980 .dec = { 2981 .vecs = aes_ofb_dec_tv_template, 2982 .count = AES_OFB_DEC_TEST_VECTORS 2983 } 2984 } 2985 } 2986 }, { 2987 .alg = "pcbc(fcrypt)", 2988 .test = alg_test_skcipher, 2989 .suite = { 2990 .cipher = { 2991 .enc = { 2992 .vecs = fcrypt_pcbc_enc_tv_template, 2993 .count = FCRYPT_ENC_TEST_VECTORS 2994 }, 2995 .dec = { 2996 .vecs = fcrypt_pcbc_dec_tv_template, 2997 .count = FCRYPT_DEC_TEST_VECTORS 2998 } 2999 } 3000 } 3001 }, { 3002 .alg = "rfc3686(ctr(aes))", 3003 .test = alg_test_skcipher, 3004 .fips_allowed = 1, 3005 .suite = { 3006 .cipher = { 3007 .enc = { 3008 .vecs = aes_ctr_rfc3686_enc_tv_template, 3009 .count = AES_CTR_3686_ENC_TEST_VECTORS 3010 }, 3011 .dec = { 3012 .vecs = aes_ctr_rfc3686_dec_tv_template, 3013 .count = AES_CTR_3686_DEC_TEST_VECTORS 3014 } 3015 } 3016 } 3017 }, { 3018 .alg = "rfc4106(gcm(aes))", 3019 .test = alg_test_aead, 3020 .suite = { 3021 .aead = { 3022 .enc = { 3023 .vecs = aes_gcm_rfc4106_enc_tv_template, 3024 .count = AES_GCM_4106_ENC_TEST_VECTORS 3025 }, 3026 .dec = { 3027 .vecs = aes_gcm_rfc4106_dec_tv_template, 3028 .count = AES_GCM_4106_DEC_TEST_VECTORS 3029 } 3030 } 3031 } 3032 }, { 3033 .alg = "rfc4309(ccm(aes))", 3034 .test = alg_test_aead, 3035 .fips_allowed = 1, 3036 .suite = { 3037 .aead = { 3038 .enc = { 3039 .vecs = aes_ccm_rfc4309_enc_tv_template, 3040 .count = AES_CCM_4309_ENC_TEST_VECTORS 3041 }, 3042 .dec = { 3043 .vecs = aes_ccm_rfc4309_dec_tv_template, 3044 .count = AES_CCM_4309_DEC_TEST_VECTORS 3045 } 3046 } 3047 } 3048 }, { 3049 .alg = "rfc4543(gcm(aes))", 3050 .test = alg_test_aead, 3051 .suite = { 3052 .aead = { 3053 .enc = { 3054 .vecs = aes_gcm_rfc4543_enc_tv_template, 3055 .count = AES_GCM_4543_ENC_TEST_VECTORS 3056 }, 3057 .dec = { 3058 .vecs = aes_gcm_rfc4543_dec_tv_template, 3059 .count = AES_GCM_4543_DEC_TEST_VECTORS 3060 }, 3061 } 3062 } 3063 }, { 3064 .alg = "rmd128", 3065 .test = alg_test_hash, 3066 .suite = { 3067 .hash = { 3068 .vecs = rmd128_tv_template, 3069 .count = RMD128_TEST_VECTORS 3070 } 3071 } 3072 }, { 3073 .alg = "rmd160", 3074 .test = alg_test_hash, 3075 .suite = { 3076 .hash = { 3077 .vecs = rmd160_tv_template, 3078 .count = RMD160_TEST_VECTORS 3079 } 3080 } 3081 }, { 3082 .alg = "rmd256", 3083 .test = alg_test_hash, 3084 .suite = { 3085 .hash = { 3086 .vecs = rmd256_tv_template, 3087 .count = RMD256_TEST_VECTORS 3088 } 3089 } 3090 }, { 3091 .alg = "rmd320", 3092 .test = alg_test_hash, 3093 .suite = { 3094 .hash = { 3095 .vecs = rmd320_tv_template, 3096 .count = RMD320_TEST_VECTORS 3097 } 3098 } 3099 }, { 3100 .alg = "salsa20", 3101 .test = alg_test_skcipher, 3102 .suite = { 3103 .cipher = { 3104 .enc = { 3105 .vecs = salsa20_stream_enc_tv_template, 3106 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3107 } 3108 } 3109 } 3110 }, { 3111 .alg = "sha1", 3112 .test = alg_test_hash, 3113 .fips_allowed = 1, 3114 .suite = { 3115 .hash = { 3116 .vecs = sha1_tv_template, 3117 .count = SHA1_TEST_VECTORS 3118 } 3119 } 3120 }, { 3121 .alg = "sha224", 3122 .test = alg_test_hash, 3123 .fips_allowed = 1, 3124 .suite = { 3125 .hash = { 3126 .vecs = sha224_tv_template, 3127 .count = SHA224_TEST_VECTORS 3128 } 3129 } 3130 }, { 3131 .alg = "sha256", 3132 .test = alg_test_hash, 3133 .fips_allowed = 1, 3134 .suite = { 3135 .hash = { 3136 .vecs = sha256_tv_template, 3137 .count = SHA256_TEST_VECTORS 3138 } 3139 } 3140 }, { 3141 .alg = "sha384", 3142 .test = alg_test_hash, 3143 .fips_allowed = 1, 3144 .suite = { 3145 .hash = { 3146 .vecs = sha384_tv_template, 3147 .count = SHA384_TEST_VECTORS 3148 } 3149 } 3150 }, { 3151 .alg = "sha512", 3152 .test = alg_test_hash, 3153 .fips_allowed = 1, 3154 .suite = { 3155 .hash = { 3156 .vecs = sha512_tv_template, 3157 .count = SHA512_TEST_VECTORS 3158 } 3159 } 3160 }, { 3161 .alg = "tgr128", 3162 .test = alg_test_hash, 3163 .suite = { 3164 .hash = { 3165 .vecs = tgr128_tv_template, 3166 .count = TGR128_TEST_VECTORS 3167 } 3168 } 3169 }, { 3170 .alg = "tgr160", 3171 .test = alg_test_hash, 3172 .suite = { 3173 .hash = { 3174 .vecs = tgr160_tv_template, 3175 .count = TGR160_TEST_VECTORS 3176 } 3177 } 3178 }, { 3179 .alg = "tgr192", 3180 .test = alg_test_hash, 3181 .suite = { 3182 .hash = { 3183 .vecs = tgr192_tv_template, 3184 .count = TGR192_TEST_VECTORS 3185 } 3186 } 3187 }, { 3188 .alg = "vmac(aes)", 3189 .test = alg_test_hash, 3190 .suite = { 3191 .hash = { 3192 .vecs = aes_vmac128_tv_template, 3193 .count = VMAC_AES_TEST_VECTORS 3194 } 3195 } 3196 }, { 3197 .alg = "wp256", 3198 .test = alg_test_hash, 3199 .suite = { 3200 .hash = { 3201 .vecs = wp256_tv_template, 3202 .count = WP256_TEST_VECTORS 3203 } 3204 } 3205 }, { 3206 .alg = "wp384", 3207 .test = alg_test_hash, 3208 .suite = { 3209 .hash = { 3210 .vecs = wp384_tv_template, 3211 .count = WP384_TEST_VECTORS 3212 } 3213 } 3214 }, { 3215 .alg = "wp512", 3216 .test = alg_test_hash, 3217 .suite = { 3218 .hash = { 3219 .vecs = wp512_tv_template, 3220 .count = WP512_TEST_VECTORS 3221 } 3222 } 3223 }, { 3224 .alg = "xcbc(aes)", 3225 .test = alg_test_hash, 3226 .suite = { 3227 .hash = { 3228 .vecs = aes_xcbc128_tv_template, 3229 .count = XCBC_AES_TEST_VECTORS 3230 } 3231 } 3232 }, { 3233 .alg = "xts(aes)", 3234 .test = alg_test_skcipher, 3235 .fips_allowed = 1, 3236 .suite = { 3237 .cipher = { 3238 .enc = { 3239 .vecs = aes_xts_enc_tv_template, 3240 .count = AES_XTS_ENC_TEST_VECTORS 3241 }, 3242 .dec = { 3243 .vecs = aes_xts_dec_tv_template, 3244 .count = AES_XTS_DEC_TEST_VECTORS 3245 } 3246 } 3247 } 3248 }, { 3249 .alg = "xts(camellia)", 3250 .test = alg_test_skcipher, 3251 .suite = { 3252 .cipher = { 3253 .enc = { 3254 .vecs = camellia_xts_enc_tv_template, 3255 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3256 }, 3257 .dec = { 3258 .vecs = camellia_xts_dec_tv_template, 3259 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3260 } 3261 } 3262 } 3263 }, { 3264 .alg = "xts(cast6)", 3265 .test = alg_test_skcipher, 3266 .suite = { 3267 .cipher = { 3268 .enc = { 3269 .vecs = cast6_xts_enc_tv_template, 3270 .count = CAST6_XTS_ENC_TEST_VECTORS 3271 }, 3272 .dec = { 3273 .vecs = cast6_xts_dec_tv_template, 3274 .count = CAST6_XTS_DEC_TEST_VECTORS 3275 } 3276 } 3277 } 3278 }, { 3279 .alg = "xts(serpent)", 3280 .test = alg_test_skcipher, 3281 .suite = { 3282 .cipher = { 3283 .enc = { 3284 .vecs = serpent_xts_enc_tv_template, 3285 .count = SERPENT_XTS_ENC_TEST_VECTORS 3286 }, 3287 .dec = { 3288 .vecs = serpent_xts_dec_tv_template, 3289 .count = SERPENT_XTS_DEC_TEST_VECTORS 3290 } 3291 } 3292 } 3293 }, { 3294 .alg = "xts(twofish)", 3295 .test = alg_test_skcipher, 3296 .suite = { 3297 .cipher = { 3298 .enc = { 3299 .vecs = tf_xts_enc_tv_template, 3300 .count = TF_XTS_ENC_TEST_VECTORS 3301 }, 3302 .dec = { 3303 .vecs = tf_xts_dec_tv_template, 3304 .count = TF_XTS_DEC_TEST_VECTORS 3305 } 3306 } 3307 } 3308 }, { 3309 .alg = "zlib", 3310 .test = alg_test_pcomp, 3311 .fips_allowed = 1, 3312 .suite = { 3313 .pcomp = { 3314 .comp = { 3315 .vecs = zlib_comp_tv_template, 3316 .count = ZLIB_COMP_TEST_VECTORS 3317 }, 3318 .decomp = { 3319 .vecs = zlib_decomp_tv_template, 3320 .count = ZLIB_DECOMP_TEST_VECTORS 3321 } 3322 } 3323 } 3324 } 3325 }; 3326 3327 static bool alg_test_descs_checked; 3328 3329 static void alg_test_descs_check_order(void) 3330 { 3331 int i; 3332 3333 /* only check once */ 3334 if (alg_test_descs_checked) 3335 return; 3336 3337 alg_test_descs_checked = true; 3338 3339 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3340 int diff = strcmp(alg_test_descs[i - 1].alg, 3341 alg_test_descs[i].alg); 3342 3343 if (WARN_ON(diff > 0)) { 3344 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3345 alg_test_descs[i - 1].alg, 3346 alg_test_descs[i].alg); 3347 } 3348 3349 if (WARN_ON(diff == 0)) { 3350 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3351 alg_test_descs[i].alg); 3352 } 3353 } 3354 } 3355 3356 static int alg_find_test(const char *alg) 3357 { 3358 int start = 0; 3359 int end = ARRAY_SIZE(alg_test_descs); 3360 3361 while (start < end) { 3362 int i = (start + end) / 2; 3363 int diff = strcmp(alg_test_descs[i].alg, alg); 3364 3365 if (diff > 0) { 3366 end = i; 3367 continue; 3368 } 3369 3370 if (diff < 0) { 3371 start = i + 1; 3372 continue; 3373 } 3374 3375 return i; 3376 } 3377 3378 return -1; 3379 } 3380 3381 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3382 { 3383 int i; 3384 int j; 3385 int rc; 3386 3387 alg_test_descs_check_order(); 3388 3389 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3390 char nalg[CRYPTO_MAX_ALG_NAME]; 3391 3392 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3393 sizeof(nalg)) 3394 return -ENAMETOOLONG; 3395 3396 i = alg_find_test(nalg); 3397 if (i < 0) 3398 goto notest; 3399 3400 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3401 goto non_fips_alg; 3402 3403 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3404 goto test_done; 3405 } 3406 3407 i = alg_find_test(alg); 3408 j = alg_find_test(driver); 3409 if (i < 0 && j < 0) 3410 goto notest; 3411 3412 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3413 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3414 goto non_fips_alg; 3415 3416 rc = 0; 3417 if (i >= 0) 3418 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3419 type, mask); 3420 if (j >= 0 && j != i) 3421 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3422 type, mask); 3423 3424 test_done: 3425 if (fips_enabled && rc) 3426 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3427 3428 if (fips_enabled && !rc) 3429 pr_info(KERN_INFO "alg: self-tests for %s (%s) passed\n", 3430 driver, alg); 3431 3432 return rc; 3433 3434 notest: 3435 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3436 return 0; 3437 non_fips_alg: 3438 return -EINVAL; 3439 } 3440 3441 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3442 3443 EXPORT_SYMBOL_GPL(alg_test); 3444