1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/hash.h> 24 #include <linux/err.h> 25 #include <linux/module.h> 26 #include <linux/scatterlist.h> 27 #include <linux/slab.h> 28 #include <linux/string.h> 29 #include <crypto/rng.h> 30 31 #include "internal.h" 32 33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 34 35 /* a perfect nop */ 36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 37 { 38 return 0; 39 } 40 41 #else 42 43 #include "testmgr.h" 44 45 /* 46 * Need slab memory for testing (size in number of pages). 47 */ 48 #define XBUFSIZE 8 49 50 /* 51 * Indexes into the xbuf to simulate cross-page access. 52 */ 53 #define IDX1 32 54 #define IDX2 32400 55 #define IDX3 1 56 #define IDX4 8193 57 #define IDX5 22222 58 #define IDX6 17101 59 #define IDX7 27333 60 #define IDX8 3000 61 62 /* 63 * Used by test_cipher() 64 */ 65 #define ENCRYPT 1 66 #define DECRYPT 0 67 68 struct tcrypt_result { 69 struct completion completion; 70 int err; 71 }; 72 73 struct aead_test_suite { 74 struct { 75 struct aead_testvec *vecs; 76 unsigned int count; 77 } enc, dec; 78 }; 79 80 struct cipher_test_suite { 81 struct { 82 struct cipher_testvec *vecs; 83 unsigned int count; 84 } enc, dec; 85 }; 86 87 struct comp_test_suite { 88 struct { 89 struct comp_testvec *vecs; 90 unsigned int count; 91 } comp, decomp; 92 }; 93 94 struct pcomp_test_suite { 95 struct { 96 struct pcomp_testvec *vecs; 97 unsigned int count; 98 } comp, decomp; 99 }; 100 101 struct hash_test_suite { 102 struct hash_testvec *vecs; 103 unsigned int count; 104 }; 105 106 struct cprng_test_suite { 107 struct cprng_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct alg_test_desc { 112 const char *alg; 113 int (*test)(const struct alg_test_desc *desc, const char *driver, 114 u32 type, u32 mask); 115 int fips_allowed; /* set if alg is allowed in fips mode */ 116 117 union { 118 struct aead_test_suite aead; 119 struct cipher_test_suite cipher; 120 struct comp_test_suite comp; 121 struct pcomp_test_suite pcomp; 122 struct hash_test_suite hash; 123 struct cprng_test_suite cprng; 124 } suite; 125 }; 126 127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 128 129 static void hexdump(unsigned char *buf, unsigned int len) 130 { 131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 132 16, 1, 133 buf, len, false); 134 } 135 136 static void tcrypt_complete(struct crypto_async_request *req, int err) 137 { 138 struct tcrypt_result *res = req->data; 139 140 if (err == -EINPROGRESS) 141 return; 142 143 res->err = err; 144 complete(&res->completion); 145 } 146 147 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 148 { 149 int i; 150 151 for (i = 0; i < XBUFSIZE; i++) { 152 buf[i] = (void *)__get_free_page(GFP_KERNEL); 153 if (!buf[i]) 154 goto err_free_buf; 155 } 156 157 return 0; 158 159 err_free_buf: 160 while (i-- > 0) 161 free_page((unsigned long)buf[i]); 162 163 return -ENOMEM; 164 } 165 166 static void testmgr_free_buf(char *buf[XBUFSIZE]) 167 { 168 int i; 169 170 for (i = 0; i < XBUFSIZE; i++) 171 free_page((unsigned long)buf[i]); 172 } 173 174 static int do_one_async_hash_op(struct ahash_request *req, 175 struct tcrypt_result *tr, 176 int ret) 177 { 178 if (ret == -EINPROGRESS || ret == -EBUSY) { 179 ret = wait_for_completion_interruptible(&tr->completion); 180 if (!ret) 181 ret = tr->err; 182 INIT_COMPLETION(tr->completion); 183 } 184 return ret; 185 } 186 187 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 188 unsigned int tcount, bool use_digest, 189 const int align_offset) 190 { 191 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 192 unsigned int i, j, k, temp; 193 struct scatterlist sg[8]; 194 char result[64]; 195 struct ahash_request *req; 196 struct tcrypt_result tresult; 197 void *hash_buff; 198 char *xbuf[XBUFSIZE]; 199 int ret = -ENOMEM; 200 201 if (testmgr_alloc_buf(xbuf)) 202 goto out_nobuf; 203 204 init_completion(&tresult.completion); 205 206 req = ahash_request_alloc(tfm, GFP_KERNEL); 207 if (!req) { 208 printk(KERN_ERR "alg: hash: Failed to allocate request for " 209 "%s\n", algo); 210 goto out_noreq; 211 } 212 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 213 tcrypt_complete, &tresult); 214 215 j = 0; 216 for (i = 0; i < tcount; i++) { 217 if (template[i].np) 218 continue; 219 220 ret = -EINVAL; 221 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 222 goto out; 223 224 j++; 225 memset(result, 0, 64); 226 227 hash_buff = xbuf[0]; 228 hash_buff += align_offset; 229 230 memcpy(hash_buff, template[i].plaintext, template[i].psize); 231 sg_init_one(&sg[0], hash_buff, template[i].psize); 232 233 if (template[i].ksize) { 234 crypto_ahash_clear_flags(tfm, ~0); 235 ret = crypto_ahash_setkey(tfm, template[i].key, 236 template[i].ksize); 237 if (ret) { 238 printk(KERN_ERR "alg: hash: setkey failed on " 239 "test %d for %s: ret=%d\n", j, algo, 240 -ret); 241 goto out; 242 } 243 } 244 245 ahash_request_set_crypt(req, sg, result, template[i].psize); 246 if (use_digest) { 247 ret = do_one_async_hash_op(req, &tresult, 248 crypto_ahash_digest(req)); 249 if (ret) { 250 pr_err("alg: hash: digest failed on test %d " 251 "for %s: ret=%d\n", j, algo, -ret); 252 goto out; 253 } 254 } else { 255 ret = do_one_async_hash_op(req, &tresult, 256 crypto_ahash_init(req)); 257 if (ret) { 258 pr_err("alt: hash: init failed on test %d " 259 "for %s: ret=%d\n", j, algo, -ret); 260 goto out; 261 } 262 ret = do_one_async_hash_op(req, &tresult, 263 crypto_ahash_update(req)); 264 if (ret) { 265 pr_err("alt: hash: update failed on test %d " 266 "for %s: ret=%d\n", j, algo, -ret); 267 goto out; 268 } 269 ret = do_one_async_hash_op(req, &tresult, 270 crypto_ahash_final(req)); 271 if (ret) { 272 pr_err("alt: hash: final failed on test %d " 273 "for %s: ret=%d\n", j, algo, -ret); 274 goto out; 275 } 276 } 277 278 if (memcmp(result, template[i].digest, 279 crypto_ahash_digestsize(tfm))) { 280 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 281 j, algo); 282 hexdump(result, crypto_ahash_digestsize(tfm)); 283 ret = -EINVAL; 284 goto out; 285 } 286 } 287 288 j = 0; 289 for (i = 0; i < tcount; i++) { 290 /* alignment tests are only done with continuous buffers */ 291 if (align_offset != 0) 292 break; 293 294 if (template[i].np) { 295 j++; 296 memset(result, 0, 64); 297 298 temp = 0; 299 sg_init_table(sg, template[i].np); 300 ret = -EINVAL; 301 for (k = 0; k < template[i].np; k++) { 302 if (WARN_ON(offset_in_page(IDX[k]) + 303 template[i].tap[k] > PAGE_SIZE)) 304 goto out; 305 sg_set_buf(&sg[k], 306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 307 offset_in_page(IDX[k]), 308 template[i].plaintext + temp, 309 template[i].tap[k]), 310 template[i].tap[k]); 311 temp += template[i].tap[k]; 312 } 313 314 if (template[i].ksize) { 315 crypto_ahash_clear_flags(tfm, ~0); 316 ret = crypto_ahash_setkey(tfm, template[i].key, 317 template[i].ksize); 318 319 if (ret) { 320 printk(KERN_ERR "alg: hash: setkey " 321 "failed on chunking test %d " 322 "for %s: ret=%d\n", j, algo, 323 -ret); 324 goto out; 325 } 326 } 327 328 ahash_request_set_crypt(req, sg, result, 329 template[i].psize); 330 ret = crypto_ahash_digest(req); 331 switch (ret) { 332 case 0: 333 break; 334 case -EINPROGRESS: 335 case -EBUSY: 336 ret = wait_for_completion_interruptible( 337 &tresult.completion); 338 if (!ret && !(ret = tresult.err)) { 339 INIT_COMPLETION(tresult.completion); 340 break; 341 } 342 /* fall through */ 343 default: 344 printk(KERN_ERR "alg: hash: digest failed " 345 "on chunking test %d for %s: " 346 "ret=%d\n", j, algo, -ret); 347 goto out; 348 } 349 350 if (memcmp(result, template[i].digest, 351 crypto_ahash_digestsize(tfm))) { 352 printk(KERN_ERR "alg: hash: Chunking test %d " 353 "failed for %s\n", j, algo); 354 hexdump(result, crypto_ahash_digestsize(tfm)); 355 ret = -EINVAL; 356 goto out; 357 } 358 } 359 } 360 361 ret = 0; 362 363 out: 364 ahash_request_free(req); 365 out_noreq: 366 testmgr_free_buf(xbuf); 367 out_nobuf: 368 return ret; 369 } 370 371 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 372 unsigned int tcount, bool use_digest) 373 { 374 unsigned int alignmask; 375 int ret; 376 377 ret = __test_hash(tfm, template, tcount, use_digest, 0); 378 if (ret) 379 return ret; 380 381 /* test unaligned buffers, check with one byte offset */ 382 ret = __test_hash(tfm, template, tcount, use_digest, 1); 383 if (ret) 384 return ret; 385 386 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 387 if (alignmask) { 388 /* Check if alignment mask for tfm is correctly set. */ 389 ret = __test_hash(tfm, template, tcount, use_digest, 390 alignmask + 1); 391 if (ret) 392 return ret; 393 } 394 395 return 0; 396 } 397 398 static int __test_aead(struct crypto_aead *tfm, int enc, 399 struct aead_testvec *template, unsigned int tcount, 400 const bool diff_dst, const int align_offset) 401 { 402 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 403 unsigned int i, j, k, n, temp; 404 int ret = -ENOMEM; 405 char *q; 406 char *key; 407 struct aead_request *req; 408 struct scatterlist *sg; 409 struct scatterlist *asg; 410 struct scatterlist *sgout; 411 const char *e, *d; 412 struct tcrypt_result result; 413 unsigned int authsize; 414 void *input; 415 void *output; 416 void *assoc; 417 char iv[MAX_IVLEN]; 418 char *xbuf[XBUFSIZE]; 419 char *xoutbuf[XBUFSIZE]; 420 char *axbuf[XBUFSIZE]; 421 422 if (testmgr_alloc_buf(xbuf)) 423 goto out_noxbuf; 424 if (testmgr_alloc_buf(axbuf)) 425 goto out_noaxbuf; 426 427 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 428 goto out_nooutbuf; 429 430 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 431 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL); 432 if (!sg) 433 goto out_nosg; 434 asg = &sg[8]; 435 sgout = &asg[8]; 436 437 if (diff_dst) 438 d = "-ddst"; 439 else 440 d = ""; 441 442 if (enc == ENCRYPT) 443 e = "encryption"; 444 else 445 e = "decryption"; 446 447 init_completion(&result.completion); 448 449 req = aead_request_alloc(tfm, GFP_KERNEL); 450 if (!req) { 451 pr_err("alg: aead%s: Failed to allocate request for %s\n", 452 d, algo); 453 goto out; 454 } 455 456 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 457 tcrypt_complete, &result); 458 459 for (i = 0, j = 0; i < tcount; i++) { 460 if (!template[i].np) { 461 j++; 462 463 /* some templates have no input data but they will 464 * touch input 465 */ 466 input = xbuf[0]; 467 input += align_offset; 468 assoc = axbuf[0]; 469 470 ret = -EINVAL; 471 if (WARN_ON(align_offset + template[i].ilen > 472 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 473 goto out; 474 475 memcpy(input, template[i].input, template[i].ilen); 476 memcpy(assoc, template[i].assoc, template[i].alen); 477 if (template[i].iv) 478 memcpy(iv, template[i].iv, MAX_IVLEN); 479 else 480 memset(iv, 0, MAX_IVLEN); 481 482 crypto_aead_clear_flags(tfm, ~0); 483 if (template[i].wk) 484 crypto_aead_set_flags( 485 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 486 487 key = template[i].key; 488 489 ret = crypto_aead_setkey(tfm, key, 490 template[i].klen); 491 if (!ret == template[i].fail) { 492 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 493 d, j, algo, crypto_aead_get_flags(tfm)); 494 goto out; 495 } else if (ret) 496 continue; 497 498 authsize = abs(template[i].rlen - template[i].ilen); 499 ret = crypto_aead_setauthsize(tfm, authsize); 500 if (ret) { 501 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 502 d, authsize, j, algo); 503 goto out; 504 } 505 506 sg_init_one(&sg[0], input, 507 template[i].ilen + (enc ? authsize : 0)); 508 509 if (diff_dst) { 510 output = xoutbuf[0]; 511 output += align_offset; 512 sg_init_one(&sgout[0], output, 513 template[i].ilen + 514 (enc ? authsize : 0)); 515 } else { 516 output = input; 517 } 518 519 sg_init_one(&asg[0], assoc, template[i].alen); 520 521 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 522 template[i].ilen, iv); 523 524 aead_request_set_assoc(req, asg, template[i].alen); 525 526 ret = enc ? 527 crypto_aead_encrypt(req) : 528 crypto_aead_decrypt(req); 529 530 switch (ret) { 531 case 0: 532 if (template[i].novrfy) { 533 /* verification was supposed to fail */ 534 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 535 d, e, j, algo); 536 /* so really, we got a bad message */ 537 ret = -EBADMSG; 538 goto out; 539 } 540 break; 541 case -EINPROGRESS: 542 case -EBUSY: 543 ret = wait_for_completion_interruptible( 544 &result.completion); 545 if (!ret && !(ret = result.err)) { 546 INIT_COMPLETION(result.completion); 547 break; 548 } 549 case -EBADMSG: 550 if (template[i].novrfy) 551 /* verification failure was expected */ 552 continue; 553 /* fall through */ 554 default: 555 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 556 d, e, j, algo, -ret); 557 goto out; 558 } 559 560 q = output; 561 if (memcmp(q, template[i].result, template[i].rlen)) { 562 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 563 d, j, e, algo); 564 hexdump(q, template[i].rlen); 565 ret = -EINVAL; 566 goto out; 567 } 568 } 569 } 570 571 for (i = 0, j = 0; i < tcount; i++) { 572 /* alignment tests are only done with continuous buffers */ 573 if (align_offset != 0) 574 break; 575 576 if (template[i].np) { 577 j++; 578 579 if (template[i].iv) 580 memcpy(iv, template[i].iv, MAX_IVLEN); 581 else 582 memset(iv, 0, MAX_IVLEN); 583 584 crypto_aead_clear_flags(tfm, ~0); 585 if (template[i].wk) 586 crypto_aead_set_flags( 587 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 588 key = template[i].key; 589 590 ret = crypto_aead_setkey(tfm, key, template[i].klen); 591 if (!ret == template[i].fail) { 592 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 593 d, j, algo, crypto_aead_get_flags(tfm)); 594 goto out; 595 } else if (ret) 596 continue; 597 598 authsize = abs(template[i].rlen - template[i].ilen); 599 600 ret = -EINVAL; 601 sg_init_table(sg, template[i].np); 602 if (diff_dst) 603 sg_init_table(sgout, template[i].np); 604 for (k = 0, temp = 0; k < template[i].np; k++) { 605 if (WARN_ON(offset_in_page(IDX[k]) + 606 template[i].tap[k] > PAGE_SIZE)) 607 goto out; 608 609 q = xbuf[IDX[k] >> PAGE_SHIFT] + 610 offset_in_page(IDX[k]); 611 612 memcpy(q, template[i].input + temp, 613 template[i].tap[k]); 614 615 n = template[i].tap[k]; 616 if (k == template[i].np - 1 && enc) 617 n += authsize; 618 if (offset_in_page(q) + n < PAGE_SIZE) 619 q[n] = 0; 620 621 sg_set_buf(&sg[k], q, template[i].tap[k]); 622 623 if (diff_dst) { 624 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 625 offset_in_page(IDX[k]); 626 627 memset(q, 0, template[i].tap[k]); 628 if (offset_in_page(q) + n < PAGE_SIZE) 629 q[n] = 0; 630 631 sg_set_buf(&sgout[k], q, 632 template[i].tap[k]); 633 } 634 635 temp += template[i].tap[k]; 636 } 637 638 ret = crypto_aead_setauthsize(tfm, authsize); 639 if (ret) { 640 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 641 d, authsize, j, algo); 642 goto out; 643 } 644 645 if (enc) { 646 if (WARN_ON(sg[k - 1].offset + 647 sg[k - 1].length + authsize > 648 PAGE_SIZE)) { 649 ret = -EINVAL; 650 goto out; 651 } 652 653 sg[k - 1].length += authsize; 654 655 if (diff_dst) 656 sgout[k - 1].length += authsize; 657 } 658 659 sg_init_table(asg, template[i].anp); 660 ret = -EINVAL; 661 for (k = 0, temp = 0; k < template[i].anp; k++) { 662 if (WARN_ON(offset_in_page(IDX[k]) + 663 template[i].atap[k] > PAGE_SIZE)) 664 goto out; 665 sg_set_buf(&asg[k], 666 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 667 offset_in_page(IDX[k]), 668 template[i].assoc + temp, 669 template[i].atap[k]), 670 template[i].atap[k]); 671 temp += template[i].atap[k]; 672 } 673 674 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 675 template[i].ilen, 676 iv); 677 678 aead_request_set_assoc(req, asg, template[i].alen); 679 680 ret = enc ? 681 crypto_aead_encrypt(req) : 682 crypto_aead_decrypt(req); 683 684 switch (ret) { 685 case 0: 686 if (template[i].novrfy) { 687 /* verification was supposed to fail */ 688 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 689 d, e, j, algo); 690 /* so really, we got a bad message */ 691 ret = -EBADMSG; 692 goto out; 693 } 694 break; 695 case -EINPROGRESS: 696 case -EBUSY: 697 ret = wait_for_completion_interruptible( 698 &result.completion); 699 if (!ret && !(ret = result.err)) { 700 INIT_COMPLETION(result.completion); 701 break; 702 } 703 case -EBADMSG: 704 if (template[i].novrfy) 705 /* verification failure was expected */ 706 continue; 707 /* fall through */ 708 default: 709 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 710 d, e, j, algo, -ret); 711 goto out; 712 } 713 714 ret = -EINVAL; 715 for (k = 0, temp = 0; k < template[i].np; k++) { 716 if (diff_dst) 717 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 718 offset_in_page(IDX[k]); 719 else 720 q = xbuf[IDX[k] >> PAGE_SHIFT] + 721 offset_in_page(IDX[k]); 722 723 n = template[i].tap[k]; 724 if (k == template[i].np - 1) 725 n += enc ? authsize : -authsize; 726 727 if (memcmp(q, template[i].result + temp, n)) { 728 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 729 d, j, e, k, algo); 730 hexdump(q, n); 731 goto out; 732 } 733 734 q += n; 735 if (k == template[i].np - 1 && !enc) { 736 if (!diff_dst && 737 memcmp(q, template[i].input + 738 temp + n, authsize)) 739 n = authsize; 740 else 741 n = 0; 742 } else { 743 for (n = 0; offset_in_page(q + n) && 744 q[n]; n++) 745 ; 746 } 747 if (n) { 748 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 749 d, j, e, k, algo, n); 750 hexdump(q, n); 751 goto out; 752 } 753 754 temp += template[i].tap[k]; 755 } 756 } 757 } 758 759 ret = 0; 760 761 out: 762 aead_request_free(req); 763 kfree(sg); 764 out_nosg: 765 if (diff_dst) 766 testmgr_free_buf(xoutbuf); 767 out_nooutbuf: 768 testmgr_free_buf(axbuf); 769 out_noaxbuf: 770 testmgr_free_buf(xbuf); 771 out_noxbuf: 772 return ret; 773 } 774 775 static int test_aead(struct crypto_aead *tfm, int enc, 776 struct aead_testvec *template, unsigned int tcount) 777 { 778 unsigned int alignmask; 779 int ret; 780 781 /* test 'dst == src' case */ 782 ret = __test_aead(tfm, enc, template, tcount, false, 0); 783 if (ret) 784 return ret; 785 786 /* test 'dst != src' case */ 787 ret = __test_aead(tfm, enc, template, tcount, true, 0); 788 if (ret) 789 return ret; 790 791 /* test unaligned buffers, check with one byte offset */ 792 ret = __test_aead(tfm, enc, template, tcount, true, 1); 793 if (ret) 794 return ret; 795 796 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 797 if (alignmask) { 798 /* Check if alignment mask for tfm is correctly set. */ 799 ret = __test_aead(tfm, enc, template, tcount, true, 800 alignmask + 1); 801 if (ret) 802 return ret; 803 } 804 805 return 0; 806 } 807 808 static int test_cipher(struct crypto_cipher *tfm, int enc, 809 struct cipher_testvec *template, unsigned int tcount) 810 { 811 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 812 unsigned int i, j, k; 813 char *q; 814 const char *e; 815 void *data; 816 char *xbuf[XBUFSIZE]; 817 int ret = -ENOMEM; 818 819 if (testmgr_alloc_buf(xbuf)) 820 goto out_nobuf; 821 822 if (enc == ENCRYPT) 823 e = "encryption"; 824 else 825 e = "decryption"; 826 827 j = 0; 828 for (i = 0; i < tcount; i++) { 829 if (template[i].np) 830 continue; 831 832 j++; 833 834 ret = -EINVAL; 835 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 836 goto out; 837 838 data = xbuf[0]; 839 memcpy(data, template[i].input, template[i].ilen); 840 841 crypto_cipher_clear_flags(tfm, ~0); 842 if (template[i].wk) 843 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 844 845 ret = crypto_cipher_setkey(tfm, template[i].key, 846 template[i].klen); 847 if (!ret == template[i].fail) { 848 printk(KERN_ERR "alg: cipher: setkey failed " 849 "on test %d for %s: flags=%x\n", j, 850 algo, crypto_cipher_get_flags(tfm)); 851 goto out; 852 } else if (ret) 853 continue; 854 855 for (k = 0; k < template[i].ilen; 856 k += crypto_cipher_blocksize(tfm)) { 857 if (enc) 858 crypto_cipher_encrypt_one(tfm, data + k, 859 data + k); 860 else 861 crypto_cipher_decrypt_one(tfm, data + k, 862 data + k); 863 } 864 865 q = data; 866 if (memcmp(q, template[i].result, template[i].rlen)) { 867 printk(KERN_ERR "alg: cipher: Test %d failed " 868 "on %s for %s\n", j, e, algo); 869 hexdump(q, template[i].rlen); 870 ret = -EINVAL; 871 goto out; 872 } 873 } 874 875 ret = 0; 876 877 out: 878 testmgr_free_buf(xbuf); 879 out_nobuf: 880 return ret; 881 } 882 883 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc, 884 struct cipher_testvec *template, unsigned int tcount, 885 const bool diff_dst, const int align_offset) 886 { 887 const char *algo = 888 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 889 unsigned int i, j, k, n, temp; 890 char *q; 891 struct ablkcipher_request *req; 892 struct scatterlist sg[8]; 893 struct scatterlist sgout[8]; 894 const char *e, *d; 895 struct tcrypt_result result; 896 void *data; 897 char iv[MAX_IVLEN]; 898 char *xbuf[XBUFSIZE]; 899 char *xoutbuf[XBUFSIZE]; 900 int ret = -ENOMEM; 901 902 if (testmgr_alloc_buf(xbuf)) 903 goto out_nobuf; 904 905 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 906 goto out_nooutbuf; 907 908 if (diff_dst) 909 d = "-ddst"; 910 else 911 d = ""; 912 913 if (enc == ENCRYPT) 914 e = "encryption"; 915 else 916 e = "decryption"; 917 918 init_completion(&result.completion); 919 920 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 921 if (!req) { 922 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 923 d, algo); 924 goto out; 925 } 926 927 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 928 tcrypt_complete, &result); 929 930 j = 0; 931 for (i = 0; i < tcount; i++) { 932 if (template[i].iv) 933 memcpy(iv, template[i].iv, MAX_IVLEN); 934 else 935 memset(iv, 0, MAX_IVLEN); 936 937 if (!(template[i].np) || (template[i].also_non_np)) { 938 j++; 939 940 ret = -EINVAL; 941 if (WARN_ON(align_offset + template[i].ilen > 942 PAGE_SIZE)) 943 goto out; 944 945 data = xbuf[0]; 946 data += align_offset; 947 memcpy(data, template[i].input, template[i].ilen); 948 949 crypto_ablkcipher_clear_flags(tfm, ~0); 950 if (template[i].wk) 951 crypto_ablkcipher_set_flags( 952 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 953 954 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 955 template[i].klen); 956 if (!ret == template[i].fail) { 957 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 958 d, j, algo, 959 crypto_ablkcipher_get_flags(tfm)); 960 goto out; 961 } else if (ret) 962 continue; 963 964 sg_init_one(&sg[0], data, template[i].ilen); 965 if (diff_dst) { 966 data = xoutbuf[0]; 967 data += align_offset; 968 sg_init_one(&sgout[0], data, template[i].ilen); 969 } 970 971 ablkcipher_request_set_crypt(req, sg, 972 (diff_dst) ? sgout : sg, 973 template[i].ilen, iv); 974 ret = enc ? 975 crypto_ablkcipher_encrypt(req) : 976 crypto_ablkcipher_decrypt(req); 977 978 switch (ret) { 979 case 0: 980 break; 981 case -EINPROGRESS: 982 case -EBUSY: 983 ret = wait_for_completion_interruptible( 984 &result.completion); 985 if (!ret && !((ret = result.err))) { 986 INIT_COMPLETION(result.completion); 987 break; 988 } 989 /* fall through */ 990 default: 991 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 992 d, e, j, algo, -ret); 993 goto out; 994 } 995 996 q = data; 997 if (memcmp(q, template[i].result, template[i].rlen)) { 998 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n", 999 d, j, e, algo); 1000 hexdump(q, template[i].rlen); 1001 ret = -EINVAL; 1002 goto out; 1003 } 1004 } 1005 } 1006 1007 j = 0; 1008 for (i = 0; i < tcount; i++) { 1009 /* alignment tests are only done with continuous buffers */ 1010 if (align_offset != 0) 1011 break; 1012 1013 if (template[i].iv) 1014 memcpy(iv, template[i].iv, MAX_IVLEN); 1015 else 1016 memset(iv, 0, MAX_IVLEN); 1017 1018 if (template[i].np) { 1019 j++; 1020 1021 crypto_ablkcipher_clear_flags(tfm, ~0); 1022 if (template[i].wk) 1023 crypto_ablkcipher_set_flags( 1024 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1025 1026 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 1027 template[i].klen); 1028 if (!ret == template[i].fail) { 1029 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1030 d, j, algo, 1031 crypto_ablkcipher_get_flags(tfm)); 1032 goto out; 1033 } else if (ret) 1034 continue; 1035 1036 temp = 0; 1037 ret = -EINVAL; 1038 sg_init_table(sg, template[i].np); 1039 if (diff_dst) 1040 sg_init_table(sgout, template[i].np); 1041 for (k = 0; k < template[i].np; k++) { 1042 if (WARN_ON(offset_in_page(IDX[k]) + 1043 template[i].tap[k] > PAGE_SIZE)) 1044 goto out; 1045 1046 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1047 offset_in_page(IDX[k]); 1048 1049 memcpy(q, template[i].input + temp, 1050 template[i].tap[k]); 1051 1052 if (offset_in_page(q) + template[i].tap[k] < 1053 PAGE_SIZE) 1054 q[template[i].tap[k]] = 0; 1055 1056 sg_set_buf(&sg[k], q, template[i].tap[k]); 1057 if (diff_dst) { 1058 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1059 offset_in_page(IDX[k]); 1060 1061 sg_set_buf(&sgout[k], q, 1062 template[i].tap[k]); 1063 1064 memset(q, 0, template[i].tap[k]); 1065 if (offset_in_page(q) + 1066 template[i].tap[k] < PAGE_SIZE) 1067 q[template[i].tap[k]] = 0; 1068 } 1069 1070 temp += template[i].tap[k]; 1071 } 1072 1073 ablkcipher_request_set_crypt(req, sg, 1074 (diff_dst) ? sgout : sg, 1075 template[i].ilen, iv); 1076 1077 ret = enc ? 1078 crypto_ablkcipher_encrypt(req) : 1079 crypto_ablkcipher_decrypt(req); 1080 1081 switch (ret) { 1082 case 0: 1083 break; 1084 case -EINPROGRESS: 1085 case -EBUSY: 1086 ret = wait_for_completion_interruptible( 1087 &result.completion); 1088 if (!ret && !((ret = result.err))) { 1089 INIT_COMPLETION(result.completion); 1090 break; 1091 } 1092 /* fall through */ 1093 default: 1094 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1095 d, e, j, algo, -ret); 1096 goto out; 1097 } 1098 1099 temp = 0; 1100 ret = -EINVAL; 1101 for (k = 0; k < template[i].np; k++) { 1102 if (diff_dst) 1103 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1104 offset_in_page(IDX[k]); 1105 else 1106 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1107 offset_in_page(IDX[k]); 1108 1109 if (memcmp(q, template[i].result + temp, 1110 template[i].tap[k])) { 1111 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1112 d, j, e, k, algo); 1113 hexdump(q, template[i].tap[k]); 1114 goto out; 1115 } 1116 1117 q += template[i].tap[k]; 1118 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1119 ; 1120 if (n) { 1121 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1122 d, j, e, k, algo, n); 1123 hexdump(q, n); 1124 goto out; 1125 } 1126 temp += template[i].tap[k]; 1127 } 1128 } 1129 } 1130 1131 ret = 0; 1132 1133 out: 1134 ablkcipher_request_free(req); 1135 if (diff_dst) 1136 testmgr_free_buf(xoutbuf); 1137 out_nooutbuf: 1138 testmgr_free_buf(xbuf); 1139 out_nobuf: 1140 return ret; 1141 } 1142 1143 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 1144 struct cipher_testvec *template, unsigned int tcount) 1145 { 1146 unsigned int alignmask; 1147 int ret; 1148 1149 /* test 'dst == src' case */ 1150 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1151 if (ret) 1152 return ret; 1153 1154 /* test 'dst != src' case */ 1155 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1156 if (ret) 1157 return ret; 1158 1159 /* test unaligned buffers, check with one byte offset */ 1160 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1161 if (ret) 1162 return ret; 1163 1164 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1165 if (alignmask) { 1166 /* Check if alignment mask for tfm is correctly set. */ 1167 ret = __test_skcipher(tfm, enc, template, tcount, true, 1168 alignmask + 1); 1169 if (ret) 1170 return ret; 1171 } 1172 1173 return 0; 1174 } 1175 1176 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1177 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1178 { 1179 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1180 unsigned int i; 1181 char result[COMP_BUF_SIZE]; 1182 int ret; 1183 1184 for (i = 0; i < ctcount; i++) { 1185 int ilen; 1186 unsigned int dlen = COMP_BUF_SIZE; 1187 1188 memset(result, 0, sizeof (result)); 1189 1190 ilen = ctemplate[i].inlen; 1191 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1192 ilen, result, &dlen); 1193 if (ret) { 1194 printk(KERN_ERR "alg: comp: compression failed " 1195 "on test %d for %s: ret=%d\n", i + 1, algo, 1196 -ret); 1197 goto out; 1198 } 1199 1200 if (dlen != ctemplate[i].outlen) { 1201 printk(KERN_ERR "alg: comp: Compression test %d " 1202 "failed for %s: output len = %d\n", i + 1, algo, 1203 dlen); 1204 ret = -EINVAL; 1205 goto out; 1206 } 1207 1208 if (memcmp(result, ctemplate[i].output, dlen)) { 1209 printk(KERN_ERR "alg: comp: Compression test %d " 1210 "failed for %s\n", i + 1, algo); 1211 hexdump(result, dlen); 1212 ret = -EINVAL; 1213 goto out; 1214 } 1215 } 1216 1217 for (i = 0; i < dtcount; i++) { 1218 int ilen; 1219 unsigned int dlen = COMP_BUF_SIZE; 1220 1221 memset(result, 0, sizeof (result)); 1222 1223 ilen = dtemplate[i].inlen; 1224 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1225 ilen, result, &dlen); 1226 if (ret) { 1227 printk(KERN_ERR "alg: comp: decompression failed " 1228 "on test %d for %s: ret=%d\n", i + 1, algo, 1229 -ret); 1230 goto out; 1231 } 1232 1233 if (dlen != dtemplate[i].outlen) { 1234 printk(KERN_ERR "alg: comp: Decompression test %d " 1235 "failed for %s: output len = %d\n", i + 1, algo, 1236 dlen); 1237 ret = -EINVAL; 1238 goto out; 1239 } 1240 1241 if (memcmp(result, dtemplate[i].output, dlen)) { 1242 printk(KERN_ERR "alg: comp: Decompression test %d " 1243 "failed for %s\n", i + 1, algo); 1244 hexdump(result, dlen); 1245 ret = -EINVAL; 1246 goto out; 1247 } 1248 } 1249 1250 ret = 0; 1251 1252 out: 1253 return ret; 1254 } 1255 1256 static int test_pcomp(struct crypto_pcomp *tfm, 1257 struct pcomp_testvec *ctemplate, 1258 struct pcomp_testvec *dtemplate, int ctcount, 1259 int dtcount) 1260 { 1261 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1262 unsigned int i; 1263 char result[COMP_BUF_SIZE]; 1264 int res; 1265 1266 for (i = 0; i < ctcount; i++) { 1267 struct comp_request req; 1268 unsigned int produced = 0; 1269 1270 res = crypto_compress_setup(tfm, ctemplate[i].params, 1271 ctemplate[i].paramsize); 1272 if (res) { 1273 pr_err("alg: pcomp: compression setup failed on test " 1274 "%d for %s: error=%d\n", i + 1, algo, res); 1275 return res; 1276 } 1277 1278 res = crypto_compress_init(tfm); 1279 if (res) { 1280 pr_err("alg: pcomp: compression init failed on test " 1281 "%d for %s: error=%d\n", i + 1, algo, res); 1282 return res; 1283 } 1284 1285 memset(result, 0, sizeof(result)); 1286 1287 req.next_in = ctemplate[i].input; 1288 req.avail_in = ctemplate[i].inlen / 2; 1289 req.next_out = result; 1290 req.avail_out = ctemplate[i].outlen / 2; 1291 1292 res = crypto_compress_update(tfm, &req); 1293 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1294 pr_err("alg: pcomp: compression update failed on test " 1295 "%d for %s: error=%d\n", i + 1, algo, res); 1296 return res; 1297 } 1298 if (res > 0) 1299 produced += res; 1300 1301 /* Add remaining input data */ 1302 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1303 1304 res = crypto_compress_update(tfm, &req); 1305 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1306 pr_err("alg: pcomp: compression update failed on test " 1307 "%d for %s: error=%d\n", i + 1, algo, res); 1308 return res; 1309 } 1310 if (res > 0) 1311 produced += res; 1312 1313 /* Provide remaining output space */ 1314 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1315 1316 res = crypto_compress_final(tfm, &req); 1317 if (res < 0) { 1318 pr_err("alg: pcomp: compression final failed on test " 1319 "%d for %s: error=%d\n", i + 1, algo, res); 1320 return res; 1321 } 1322 produced += res; 1323 1324 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1325 pr_err("alg: comp: Compression test %d failed for %s: " 1326 "output len = %d (expected %d)\n", i + 1, algo, 1327 COMP_BUF_SIZE - req.avail_out, 1328 ctemplate[i].outlen); 1329 return -EINVAL; 1330 } 1331 1332 if (produced != ctemplate[i].outlen) { 1333 pr_err("alg: comp: Compression test %d failed for %s: " 1334 "returned len = %u (expected %d)\n", i + 1, 1335 algo, produced, ctemplate[i].outlen); 1336 return -EINVAL; 1337 } 1338 1339 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1340 pr_err("alg: pcomp: Compression test %d failed for " 1341 "%s\n", i + 1, algo); 1342 hexdump(result, ctemplate[i].outlen); 1343 return -EINVAL; 1344 } 1345 } 1346 1347 for (i = 0; i < dtcount; i++) { 1348 struct comp_request req; 1349 unsigned int produced = 0; 1350 1351 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1352 dtemplate[i].paramsize); 1353 if (res) { 1354 pr_err("alg: pcomp: decompression setup failed on " 1355 "test %d for %s: error=%d\n", i + 1, algo, res); 1356 return res; 1357 } 1358 1359 res = crypto_decompress_init(tfm); 1360 if (res) { 1361 pr_err("alg: pcomp: decompression init failed on test " 1362 "%d for %s: error=%d\n", i + 1, algo, res); 1363 return res; 1364 } 1365 1366 memset(result, 0, sizeof(result)); 1367 1368 req.next_in = dtemplate[i].input; 1369 req.avail_in = dtemplate[i].inlen / 2; 1370 req.next_out = result; 1371 req.avail_out = dtemplate[i].outlen / 2; 1372 1373 res = crypto_decompress_update(tfm, &req); 1374 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1375 pr_err("alg: pcomp: decompression update failed on " 1376 "test %d for %s: error=%d\n", i + 1, algo, res); 1377 return res; 1378 } 1379 if (res > 0) 1380 produced += res; 1381 1382 /* Add remaining input data */ 1383 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1384 1385 res = crypto_decompress_update(tfm, &req); 1386 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1387 pr_err("alg: pcomp: decompression update failed on " 1388 "test %d for %s: error=%d\n", i + 1, algo, res); 1389 return res; 1390 } 1391 if (res > 0) 1392 produced += res; 1393 1394 /* Provide remaining output space */ 1395 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1396 1397 res = crypto_decompress_final(tfm, &req); 1398 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1399 pr_err("alg: pcomp: decompression final failed on " 1400 "test %d for %s: error=%d\n", i + 1, algo, res); 1401 return res; 1402 } 1403 if (res > 0) 1404 produced += res; 1405 1406 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1407 pr_err("alg: comp: Decompression test %d failed for " 1408 "%s: output len = %d (expected %d)\n", i + 1, 1409 algo, COMP_BUF_SIZE - req.avail_out, 1410 dtemplate[i].outlen); 1411 return -EINVAL; 1412 } 1413 1414 if (produced != dtemplate[i].outlen) { 1415 pr_err("alg: comp: Decompression test %d failed for " 1416 "%s: returned len = %u (expected %d)\n", i + 1, 1417 algo, produced, dtemplate[i].outlen); 1418 return -EINVAL; 1419 } 1420 1421 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1422 pr_err("alg: pcomp: Decompression test %d failed for " 1423 "%s\n", i + 1, algo); 1424 hexdump(result, dtemplate[i].outlen); 1425 return -EINVAL; 1426 } 1427 } 1428 1429 return 0; 1430 } 1431 1432 1433 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1434 unsigned int tcount) 1435 { 1436 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1437 int err = 0, i, j, seedsize; 1438 u8 *seed; 1439 char result[32]; 1440 1441 seedsize = crypto_rng_seedsize(tfm); 1442 1443 seed = kmalloc(seedsize, GFP_KERNEL); 1444 if (!seed) { 1445 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1446 "for %s\n", algo); 1447 return -ENOMEM; 1448 } 1449 1450 for (i = 0; i < tcount; i++) { 1451 memset(result, 0, 32); 1452 1453 memcpy(seed, template[i].v, template[i].vlen); 1454 memcpy(seed + template[i].vlen, template[i].key, 1455 template[i].klen); 1456 memcpy(seed + template[i].vlen + template[i].klen, 1457 template[i].dt, template[i].dtlen); 1458 1459 err = crypto_rng_reset(tfm, seed, seedsize); 1460 if (err) { 1461 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1462 "for %s\n", algo); 1463 goto out; 1464 } 1465 1466 for (j = 0; j < template[i].loops; j++) { 1467 err = crypto_rng_get_bytes(tfm, result, 1468 template[i].rlen); 1469 if (err != template[i].rlen) { 1470 printk(KERN_ERR "alg: cprng: Failed to obtain " 1471 "the correct amount of random data for " 1472 "%s (requested %d, got %d)\n", algo, 1473 template[i].rlen, err); 1474 goto out; 1475 } 1476 } 1477 1478 err = memcmp(result, template[i].result, 1479 template[i].rlen); 1480 if (err) { 1481 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1482 i, algo); 1483 hexdump(result, template[i].rlen); 1484 err = -EINVAL; 1485 goto out; 1486 } 1487 } 1488 1489 out: 1490 kfree(seed); 1491 return err; 1492 } 1493 1494 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1495 u32 type, u32 mask) 1496 { 1497 struct crypto_aead *tfm; 1498 int err = 0; 1499 1500 tfm = crypto_alloc_aead(driver, type, mask); 1501 if (IS_ERR(tfm)) { 1502 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1503 "%ld\n", driver, PTR_ERR(tfm)); 1504 return PTR_ERR(tfm); 1505 } 1506 1507 if (desc->suite.aead.enc.vecs) { 1508 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1509 desc->suite.aead.enc.count); 1510 if (err) 1511 goto out; 1512 } 1513 1514 if (!err && desc->suite.aead.dec.vecs) 1515 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1516 desc->suite.aead.dec.count); 1517 1518 out: 1519 crypto_free_aead(tfm); 1520 return err; 1521 } 1522 1523 static int alg_test_cipher(const struct alg_test_desc *desc, 1524 const char *driver, u32 type, u32 mask) 1525 { 1526 struct crypto_cipher *tfm; 1527 int err = 0; 1528 1529 tfm = crypto_alloc_cipher(driver, type, mask); 1530 if (IS_ERR(tfm)) { 1531 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1532 "%s: %ld\n", driver, PTR_ERR(tfm)); 1533 return PTR_ERR(tfm); 1534 } 1535 1536 if (desc->suite.cipher.enc.vecs) { 1537 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1538 desc->suite.cipher.enc.count); 1539 if (err) 1540 goto out; 1541 } 1542 1543 if (desc->suite.cipher.dec.vecs) 1544 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1545 desc->suite.cipher.dec.count); 1546 1547 out: 1548 crypto_free_cipher(tfm); 1549 return err; 1550 } 1551 1552 static int alg_test_skcipher(const struct alg_test_desc *desc, 1553 const char *driver, u32 type, u32 mask) 1554 { 1555 struct crypto_ablkcipher *tfm; 1556 int err = 0; 1557 1558 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1559 if (IS_ERR(tfm)) { 1560 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1561 "%s: %ld\n", driver, PTR_ERR(tfm)); 1562 return PTR_ERR(tfm); 1563 } 1564 1565 if (desc->suite.cipher.enc.vecs) { 1566 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1567 desc->suite.cipher.enc.count); 1568 if (err) 1569 goto out; 1570 } 1571 1572 if (desc->suite.cipher.dec.vecs) 1573 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1574 desc->suite.cipher.dec.count); 1575 1576 out: 1577 crypto_free_ablkcipher(tfm); 1578 return err; 1579 } 1580 1581 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1582 u32 type, u32 mask) 1583 { 1584 struct crypto_comp *tfm; 1585 int err; 1586 1587 tfm = crypto_alloc_comp(driver, type, mask); 1588 if (IS_ERR(tfm)) { 1589 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1590 "%ld\n", driver, PTR_ERR(tfm)); 1591 return PTR_ERR(tfm); 1592 } 1593 1594 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1595 desc->suite.comp.decomp.vecs, 1596 desc->suite.comp.comp.count, 1597 desc->suite.comp.decomp.count); 1598 1599 crypto_free_comp(tfm); 1600 return err; 1601 } 1602 1603 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1604 u32 type, u32 mask) 1605 { 1606 struct crypto_pcomp *tfm; 1607 int err; 1608 1609 tfm = crypto_alloc_pcomp(driver, type, mask); 1610 if (IS_ERR(tfm)) { 1611 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1612 driver, PTR_ERR(tfm)); 1613 return PTR_ERR(tfm); 1614 } 1615 1616 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1617 desc->suite.pcomp.decomp.vecs, 1618 desc->suite.pcomp.comp.count, 1619 desc->suite.pcomp.decomp.count); 1620 1621 crypto_free_pcomp(tfm); 1622 return err; 1623 } 1624 1625 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1626 u32 type, u32 mask) 1627 { 1628 struct crypto_ahash *tfm; 1629 int err; 1630 1631 tfm = crypto_alloc_ahash(driver, type, mask); 1632 if (IS_ERR(tfm)) { 1633 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1634 "%ld\n", driver, PTR_ERR(tfm)); 1635 return PTR_ERR(tfm); 1636 } 1637 1638 err = test_hash(tfm, desc->suite.hash.vecs, 1639 desc->suite.hash.count, true); 1640 if (!err) 1641 err = test_hash(tfm, desc->suite.hash.vecs, 1642 desc->suite.hash.count, false); 1643 1644 crypto_free_ahash(tfm); 1645 return err; 1646 } 1647 1648 static int alg_test_crc32c(const struct alg_test_desc *desc, 1649 const char *driver, u32 type, u32 mask) 1650 { 1651 struct crypto_shash *tfm; 1652 u32 val; 1653 int err; 1654 1655 err = alg_test_hash(desc, driver, type, mask); 1656 if (err) 1657 goto out; 1658 1659 tfm = crypto_alloc_shash(driver, type, mask); 1660 if (IS_ERR(tfm)) { 1661 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1662 "%ld\n", driver, PTR_ERR(tfm)); 1663 err = PTR_ERR(tfm); 1664 goto out; 1665 } 1666 1667 do { 1668 struct { 1669 struct shash_desc shash; 1670 char ctx[crypto_shash_descsize(tfm)]; 1671 } sdesc; 1672 1673 sdesc.shash.tfm = tfm; 1674 sdesc.shash.flags = 0; 1675 1676 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1677 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1678 if (err) { 1679 printk(KERN_ERR "alg: crc32c: Operation failed for " 1680 "%s: %d\n", driver, err); 1681 break; 1682 } 1683 1684 if (val != ~420553207) { 1685 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1686 "%d\n", driver, val); 1687 err = -EINVAL; 1688 } 1689 } while (0); 1690 1691 crypto_free_shash(tfm); 1692 1693 out: 1694 return err; 1695 } 1696 1697 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1698 u32 type, u32 mask) 1699 { 1700 struct crypto_rng *rng; 1701 int err; 1702 1703 rng = crypto_alloc_rng(driver, type, mask); 1704 if (IS_ERR(rng)) { 1705 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1706 "%ld\n", driver, PTR_ERR(rng)); 1707 return PTR_ERR(rng); 1708 } 1709 1710 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1711 1712 crypto_free_rng(rng); 1713 1714 return err; 1715 } 1716 1717 static int alg_test_null(const struct alg_test_desc *desc, 1718 const char *driver, u32 type, u32 mask) 1719 { 1720 return 0; 1721 } 1722 1723 /* Please keep this list sorted by algorithm name. */ 1724 static const struct alg_test_desc alg_test_descs[] = { 1725 { 1726 .alg = "__cbc-cast5-avx", 1727 .test = alg_test_null, 1728 }, { 1729 .alg = "__cbc-cast6-avx", 1730 .test = alg_test_null, 1731 }, { 1732 .alg = "__cbc-serpent-avx", 1733 .test = alg_test_null, 1734 }, { 1735 .alg = "__cbc-serpent-avx2", 1736 .test = alg_test_null, 1737 }, { 1738 .alg = "__cbc-serpent-sse2", 1739 .test = alg_test_null, 1740 }, { 1741 .alg = "__cbc-twofish-avx", 1742 .test = alg_test_null, 1743 }, { 1744 .alg = "__driver-cbc-aes-aesni", 1745 .test = alg_test_null, 1746 .fips_allowed = 1, 1747 }, { 1748 .alg = "__driver-cbc-camellia-aesni", 1749 .test = alg_test_null, 1750 }, { 1751 .alg = "__driver-cbc-camellia-aesni-avx2", 1752 .test = alg_test_null, 1753 }, { 1754 .alg = "__driver-cbc-cast5-avx", 1755 .test = alg_test_null, 1756 }, { 1757 .alg = "__driver-cbc-cast6-avx", 1758 .test = alg_test_null, 1759 }, { 1760 .alg = "__driver-cbc-serpent-avx", 1761 .test = alg_test_null, 1762 }, { 1763 .alg = "__driver-cbc-serpent-avx2", 1764 .test = alg_test_null, 1765 }, { 1766 .alg = "__driver-cbc-serpent-sse2", 1767 .test = alg_test_null, 1768 }, { 1769 .alg = "__driver-cbc-twofish-avx", 1770 .test = alg_test_null, 1771 }, { 1772 .alg = "__driver-ecb-aes-aesni", 1773 .test = alg_test_null, 1774 .fips_allowed = 1, 1775 }, { 1776 .alg = "__driver-ecb-camellia-aesni", 1777 .test = alg_test_null, 1778 }, { 1779 .alg = "__driver-ecb-camellia-aesni-avx2", 1780 .test = alg_test_null, 1781 }, { 1782 .alg = "__driver-ecb-cast5-avx", 1783 .test = alg_test_null, 1784 }, { 1785 .alg = "__driver-ecb-cast6-avx", 1786 .test = alg_test_null, 1787 }, { 1788 .alg = "__driver-ecb-serpent-avx", 1789 .test = alg_test_null, 1790 }, { 1791 .alg = "__driver-ecb-serpent-avx2", 1792 .test = alg_test_null, 1793 }, { 1794 .alg = "__driver-ecb-serpent-sse2", 1795 .test = alg_test_null, 1796 }, { 1797 .alg = "__driver-ecb-twofish-avx", 1798 .test = alg_test_null, 1799 }, { 1800 .alg = "__ghash-pclmulqdqni", 1801 .test = alg_test_null, 1802 .fips_allowed = 1, 1803 }, { 1804 .alg = "ansi_cprng", 1805 .test = alg_test_cprng, 1806 .fips_allowed = 1, 1807 .suite = { 1808 .cprng = { 1809 .vecs = ansi_cprng_aes_tv_template, 1810 .count = ANSI_CPRNG_AES_TEST_VECTORS 1811 } 1812 } 1813 }, { 1814 .alg = "authenc(hmac(sha1),cbc(aes))", 1815 .test = alg_test_aead, 1816 .fips_allowed = 1, 1817 .suite = { 1818 .aead = { 1819 .enc = { 1820 .vecs = hmac_sha1_aes_cbc_enc_tv_template, 1821 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS 1822 } 1823 } 1824 } 1825 }, { 1826 .alg = "authenc(hmac(sha256),cbc(aes))", 1827 .test = alg_test_aead, 1828 .fips_allowed = 1, 1829 .suite = { 1830 .aead = { 1831 .enc = { 1832 .vecs = hmac_sha256_aes_cbc_enc_tv_template, 1833 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS 1834 } 1835 } 1836 } 1837 }, { 1838 .alg = "authenc(hmac(sha512),cbc(aes))", 1839 .test = alg_test_aead, 1840 .fips_allowed = 1, 1841 .suite = { 1842 .aead = { 1843 .enc = { 1844 .vecs = hmac_sha512_aes_cbc_enc_tv_template, 1845 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS 1846 } 1847 } 1848 } 1849 }, { 1850 .alg = "cbc(aes)", 1851 .test = alg_test_skcipher, 1852 .fips_allowed = 1, 1853 .suite = { 1854 .cipher = { 1855 .enc = { 1856 .vecs = aes_cbc_enc_tv_template, 1857 .count = AES_CBC_ENC_TEST_VECTORS 1858 }, 1859 .dec = { 1860 .vecs = aes_cbc_dec_tv_template, 1861 .count = AES_CBC_DEC_TEST_VECTORS 1862 } 1863 } 1864 } 1865 }, { 1866 .alg = "cbc(anubis)", 1867 .test = alg_test_skcipher, 1868 .suite = { 1869 .cipher = { 1870 .enc = { 1871 .vecs = anubis_cbc_enc_tv_template, 1872 .count = ANUBIS_CBC_ENC_TEST_VECTORS 1873 }, 1874 .dec = { 1875 .vecs = anubis_cbc_dec_tv_template, 1876 .count = ANUBIS_CBC_DEC_TEST_VECTORS 1877 } 1878 } 1879 } 1880 }, { 1881 .alg = "cbc(blowfish)", 1882 .test = alg_test_skcipher, 1883 .suite = { 1884 .cipher = { 1885 .enc = { 1886 .vecs = bf_cbc_enc_tv_template, 1887 .count = BF_CBC_ENC_TEST_VECTORS 1888 }, 1889 .dec = { 1890 .vecs = bf_cbc_dec_tv_template, 1891 .count = BF_CBC_DEC_TEST_VECTORS 1892 } 1893 } 1894 } 1895 }, { 1896 .alg = "cbc(camellia)", 1897 .test = alg_test_skcipher, 1898 .suite = { 1899 .cipher = { 1900 .enc = { 1901 .vecs = camellia_cbc_enc_tv_template, 1902 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 1903 }, 1904 .dec = { 1905 .vecs = camellia_cbc_dec_tv_template, 1906 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 1907 } 1908 } 1909 } 1910 }, { 1911 .alg = "cbc(cast5)", 1912 .test = alg_test_skcipher, 1913 .suite = { 1914 .cipher = { 1915 .enc = { 1916 .vecs = cast5_cbc_enc_tv_template, 1917 .count = CAST5_CBC_ENC_TEST_VECTORS 1918 }, 1919 .dec = { 1920 .vecs = cast5_cbc_dec_tv_template, 1921 .count = CAST5_CBC_DEC_TEST_VECTORS 1922 } 1923 } 1924 } 1925 }, { 1926 .alg = "cbc(cast6)", 1927 .test = alg_test_skcipher, 1928 .suite = { 1929 .cipher = { 1930 .enc = { 1931 .vecs = cast6_cbc_enc_tv_template, 1932 .count = CAST6_CBC_ENC_TEST_VECTORS 1933 }, 1934 .dec = { 1935 .vecs = cast6_cbc_dec_tv_template, 1936 .count = CAST6_CBC_DEC_TEST_VECTORS 1937 } 1938 } 1939 } 1940 }, { 1941 .alg = "cbc(des)", 1942 .test = alg_test_skcipher, 1943 .suite = { 1944 .cipher = { 1945 .enc = { 1946 .vecs = des_cbc_enc_tv_template, 1947 .count = DES_CBC_ENC_TEST_VECTORS 1948 }, 1949 .dec = { 1950 .vecs = des_cbc_dec_tv_template, 1951 .count = DES_CBC_DEC_TEST_VECTORS 1952 } 1953 } 1954 } 1955 }, { 1956 .alg = "cbc(des3_ede)", 1957 .test = alg_test_skcipher, 1958 .fips_allowed = 1, 1959 .suite = { 1960 .cipher = { 1961 .enc = { 1962 .vecs = des3_ede_cbc_enc_tv_template, 1963 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 1964 }, 1965 .dec = { 1966 .vecs = des3_ede_cbc_dec_tv_template, 1967 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 1968 } 1969 } 1970 } 1971 }, { 1972 .alg = "cbc(serpent)", 1973 .test = alg_test_skcipher, 1974 .suite = { 1975 .cipher = { 1976 .enc = { 1977 .vecs = serpent_cbc_enc_tv_template, 1978 .count = SERPENT_CBC_ENC_TEST_VECTORS 1979 }, 1980 .dec = { 1981 .vecs = serpent_cbc_dec_tv_template, 1982 .count = SERPENT_CBC_DEC_TEST_VECTORS 1983 } 1984 } 1985 } 1986 }, { 1987 .alg = "cbc(twofish)", 1988 .test = alg_test_skcipher, 1989 .suite = { 1990 .cipher = { 1991 .enc = { 1992 .vecs = tf_cbc_enc_tv_template, 1993 .count = TF_CBC_ENC_TEST_VECTORS 1994 }, 1995 .dec = { 1996 .vecs = tf_cbc_dec_tv_template, 1997 .count = TF_CBC_DEC_TEST_VECTORS 1998 } 1999 } 2000 } 2001 }, { 2002 .alg = "ccm(aes)", 2003 .test = alg_test_aead, 2004 .fips_allowed = 1, 2005 .suite = { 2006 .aead = { 2007 .enc = { 2008 .vecs = aes_ccm_enc_tv_template, 2009 .count = AES_CCM_ENC_TEST_VECTORS 2010 }, 2011 .dec = { 2012 .vecs = aes_ccm_dec_tv_template, 2013 .count = AES_CCM_DEC_TEST_VECTORS 2014 } 2015 } 2016 } 2017 }, { 2018 .alg = "cmac(aes)", 2019 .test = alg_test_hash, 2020 .suite = { 2021 .hash = { 2022 .vecs = aes_cmac128_tv_template, 2023 .count = CMAC_AES_TEST_VECTORS 2024 } 2025 } 2026 }, { 2027 .alg = "cmac(des3_ede)", 2028 .test = alg_test_hash, 2029 .suite = { 2030 .hash = { 2031 .vecs = des3_ede_cmac64_tv_template, 2032 .count = CMAC_DES3_EDE_TEST_VECTORS 2033 } 2034 } 2035 }, { 2036 .alg = "compress_null", 2037 .test = alg_test_null, 2038 }, { 2039 .alg = "crc32c", 2040 .test = alg_test_crc32c, 2041 .fips_allowed = 1, 2042 .suite = { 2043 .hash = { 2044 .vecs = crc32c_tv_template, 2045 .count = CRC32C_TEST_VECTORS 2046 } 2047 } 2048 }, { 2049 .alg = "crct10dif", 2050 .test = alg_test_hash, 2051 .fips_allowed = 1, 2052 .suite = { 2053 .hash = { 2054 .vecs = crct10dif_tv_template, 2055 .count = CRCT10DIF_TEST_VECTORS 2056 } 2057 } 2058 }, { 2059 .alg = "cryptd(__driver-cbc-aes-aesni)", 2060 .test = alg_test_null, 2061 .fips_allowed = 1, 2062 }, { 2063 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2064 .test = alg_test_null, 2065 }, { 2066 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2067 .test = alg_test_null, 2068 }, { 2069 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2070 .test = alg_test_null, 2071 }, { 2072 .alg = "cryptd(__driver-ecb-aes-aesni)", 2073 .test = alg_test_null, 2074 .fips_allowed = 1, 2075 }, { 2076 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2077 .test = alg_test_null, 2078 }, { 2079 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2080 .test = alg_test_null, 2081 }, { 2082 .alg = "cryptd(__driver-ecb-cast5-avx)", 2083 .test = alg_test_null, 2084 }, { 2085 .alg = "cryptd(__driver-ecb-cast6-avx)", 2086 .test = alg_test_null, 2087 }, { 2088 .alg = "cryptd(__driver-ecb-serpent-avx)", 2089 .test = alg_test_null, 2090 }, { 2091 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2092 .test = alg_test_null, 2093 }, { 2094 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2095 .test = alg_test_null, 2096 }, { 2097 .alg = "cryptd(__driver-ecb-twofish-avx)", 2098 .test = alg_test_null, 2099 }, { 2100 .alg = "cryptd(__driver-gcm-aes-aesni)", 2101 .test = alg_test_null, 2102 .fips_allowed = 1, 2103 }, { 2104 .alg = "cryptd(__ghash-pclmulqdqni)", 2105 .test = alg_test_null, 2106 .fips_allowed = 1, 2107 }, { 2108 .alg = "ctr(aes)", 2109 .test = alg_test_skcipher, 2110 .fips_allowed = 1, 2111 .suite = { 2112 .cipher = { 2113 .enc = { 2114 .vecs = aes_ctr_enc_tv_template, 2115 .count = AES_CTR_ENC_TEST_VECTORS 2116 }, 2117 .dec = { 2118 .vecs = aes_ctr_dec_tv_template, 2119 .count = AES_CTR_DEC_TEST_VECTORS 2120 } 2121 } 2122 } 2123 }, { 2124 .alg = "ctr(blowfish)", 2125 .test = alg_test_skcipher, 2126 .suite = { 2127 .cipher = { 2128 .enc = { 2129 .vecs = bf_ctr_enc_tv_template, 2130 .count = BF_CTR_ENC_TEST_VECTORS 2131 }, 2132 .dec = { 2133 .vecs = bf_ctr_dec_tv_template, 2134 .count = BF_CTR_DEC_TEST_VECTORS 2135 } 2136 } 2137 } 2138 }, { 2139 .alg = "ctr(camellia)", 2140 .test = alg_test_skcipher, 2141 .suite = { 2142 .cipher = { 2143 .enc = { 2144 .vecs = camellia_ctr_enc_tv_template, 2145 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2146 }, 2147 .dec = { 2148 .vecs = camellia_ctr_dec_tv_template, 2149 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2150 } 2151 } 2152 } 2153 }, { 2154 .alg = "ctr(cast5)", 2155 .test = alg_test_skcipher, 2156 .suite = { 2157 .cipher = { 2158 .enc = { 2159 .vecs = cast5_ctr_enc_tv_template, 2160 .count = CAST5_CTR_ENC_TEST_VECTORS 2161 }, 2162 .dec = { 2163 .vecs = cast5_ctr_dec_tv_template, 2164 .count = CAST5_CTR_DEC_TEST_VECTORS 2165 } 2166 } 2167 } 2168 }, { 2169 .alg = "ctr(cast6)", 2170 .test = alg_test_skcipher, 2171 .suite = { 2172 .cipher = { 2173 .enc = { 2174 .vecs = cast6_ctr_enc_tv_template, 2175 .count = CAST6_CTR_ENC_TEST_VECTORS 2176 }, 2177 .dec = { 2178 .vecs = cast6_ctr_dec_tv_template, 2179 .count = CAST6_CTR_DEC_TEST_VECTORS 2180 } 2181 } 2182 } 2183 }, { 2184 .alg = "ctr(des)", 2185 .test = alg_test_skcipher, 2186 .suite = { 2187 .cipher = { 2188 .enc = { 2189 .vecs = des_ctr_enc_tv_template, 2190 .count = DES_CTR_ENC_TEST_VECTORS 2191 }, 2192 .dec = { 2193 .vecs = des_ctr_dec_tv_template, 2194 .count = DES_CTR_DEC_TEST_VECTORS 2195 } 2196 } 2197 } 2198 }, { 2199 .alg = "ctr(des3_ede)", 2200 .test = alg_test_skcipher, 2201 .suite = { 2202 .cipher = { 2203 .enc = { 2204 .vecs = des3_ede_ctr_enc_tv_template, 2205 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2206 }, 2207 .dec = { 2208 .vecs = des3_ede_ctr_dec_tv_template, 2209 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2210 } 2211 } 2212 } 2213 }, { 2214 .alg = "ctr(serpent)", 2215 .test = alg_test_skcipher, 2216 .suite = { 2217 .cipher = { 2218 .enc = { 2219 .vecs = serpent_ctr_enc_tv_template, 2220 .count = SERPENT_CTR_ENC_TEST_VECTORS 2221 }, 2222 .dec = { 2223 .vecs = serpent_ctr_dec_tv_template, 2224 .count = SERPENT_CTR_DEC_TEST_VECTORS 2225 } 2226 } 2227 } 2228 }, { 2229 .alg = "ctr(twofish)", 2230 .test = alg_test_skcipher, 2231 .suite = { 2232 .cipher = { 2233 .enc = { 2234 .vecs = tf_ctr_enc_tv_template, 2235 .count = TF_CTR_ENC_TEST_VECTORS 2236 }, 2237 .dec = { 2238 .vecs = tf_ctr_dec_tv_template, 2239 .count = TF_CTR_DEC_TEST_VECTORS 2240 } 2241 } 2242 } 2243 }, { 2244 .alg = "cts(cbc(aes))", 2245 .test = alg_test_skcipher, 2246 .suite = { 2247 .cipher = { 2248 .enc = { 2249 .vecs = cts_mode_enc_tv_template, 2250 .count = CTS_MODE_ENC_TEST_VECTORS 2251 }, 2252 .dec = { 2253 .vecs = cts_mode_dec_tv_template, 2254 .count = CTS_MODE_DEC_TEST_VECTORS 2255 } 2256 } 2257 } 2258 }, { 2259 .alg = "deflate", 2260 .test = alg_test_comp, 2261 .fips_allowed = 1, 2262 .suite = { 2263 .comp = { 2264 .comp = { 2265 .vecs = deflate_comp_tv_template, 2266 .count = DEFLATE_COMP_TEST_VECTORS 2267 }, 2268 .decomp = { 2269 .vecs = deflate_decomp_tv_template, 2270 .count = DEFLATE_DECOMP_TEST_VECTORS 2271 } 2272 } 2273 } 2274 }, { 2275 .alg = "digest_null", 2276 .test = alg_test_null, 2277 }, { 2278 .alg = "ecb(__aes-aesni)", 2279 .test = alg_test_null, 2280 .fips_allowed = 1, 2281 }, { 2282 .alg = "ecb(aes)", 2283 .test = alg_test_skcipher, 2284 .fips_allowed = 1, 2285 .suite = { 2286 .cipher = { 2287 .enc = { 2288 .vecs = aes_enc_tv_template, 2289 .count = AES_ENC_TEST_VECTORS 2290 }, 2291 .dec = { 2292 .vecs = aes_dec_tv_template, 2293 .count = AES_DEC_TEST_VECTORS 2294 } 2295 } 2296 } 2297 }, { 2298 .alg = "ecb(anubis)", 2299 .test = alg_test_skcipher, 2300 .suite = { 2301 .cipher = { 2302 .enc = { 2303 .vecs = anubis_enc_tv_template, 2304 .count = ANUBIS_ENC_TEST_VECTORS 2305 }, 2306 .dec = { 2307 .vecs = anubis_dec_tv_template, 2308 .count = ANUBIS_DEC_TEST_VECTORS 2309 } 2310 } 2311 } 2312 }, { 2313 .alg = "ecb(arc4)", 2314 .test = alg_test_skcipher, 2315 .suite = { 2316 .cipher = { 2317 .enc = { 2318 .vecs = arc4_enc_tv_template, 2319 .count = ARC4_ENC_TEST_VECTORS 2320 }, 2321 .dec = { 2322 .vecs = arc4_dec_tv_template, 2323 .count = ARC4_DEC_TEST_VECTORS 2324 } 2325 } 2326 } 2327 }, { 2328 .alg = "ecb(blowfish)", 2329 .test = alg_test_skcipher, 2330 .suite = { 2331 .cipher = { 2332 .enc = { 2333 .vecs = bf_enc_tv_template, 2334 .count = BF_ENC_TEST_VECTORS 2335 }, 2336 .dec = { 2337 .vecs = bf_dec_tv_template, 2338 .count = BF_DEC_TEST_VECTORS 2339 } 2340 } 2341 } 2342 }, { 2343 .alg = "ecb(camellia)", 2344 .test = alg_test_skcipher, 2345 .suite = { 2346 .cipher = { 2347 .enc = { 2348 .vecs = camellia_enc_tv_template, 2349 .count = CAMELLIA_ENC_TEST_VECTORS 2350 }, 2351 .dec = { 2352 .vecs = camellia_dec_tv_template, 2353 .count = CAMELLIA_DEC_TEST_VECTORS 2354 } 2355 } 2356 } 2357 }, { 2358 .alg = "ecb(cast5)", 2359 .test = alg_test_skcipher, 2360 .suite = { 2361 .cipher = { 2362 .enc = { 2363 .vecs = cast5_enc_tv_template, 2364 .count = CAST5_ENC_TEST_VECTORS 2365 }, 2366 .dec = { 2367 .vecs = cast5_dec_tv_template, 2368 .count = CAST5_DEC_TEST_VECTORS 2369 } 2370 } 2371 } 2372 }, { 2373 .alg = "ecb(cast6)", 2374 .test = alg_test_skcipher, 2375 .suite = { 2376 .cipher = { 2377 .enc = { 2378 .vecs = cast6_enc_tv_template, 2379 .count = CAST6_ENC_TEST_VECTORS 2380 }, 2381 .dec = { 2382 .vecs = cast6_dec_tv_template, 2383 .count = CAST6_DEC_TEST_VECTORS 2384 } 2385 } 2386 } 2387 }, { 2388 .alg = "ecb(cipher_null)", 2389 .test = alg_test_null, 2390 }, { 2391 .alg = "ecb(des)", 2392 .test = alg_test_skcipher, 2393 .fips_allowed = 1, 2394 .suite = { 2395 .cipher = { 2396 .enc = { 2397 .vecs = des_enc_tv_template, 2398 .count = DES_ENC_TEST_VECTORS 2399 }, 2400 .dec = { 2401 .vecs = des_dec_tv_template, 2402 .count = DES_DEC_TEST_VECTORS 2403 } 2404 } 2405 } 2406 }, { 2407 .alg = "ecb(des3_ede)", 2408 .test = alg_test_skcipher, 2409 .fips_allowed = 1, 2410 .suite = { 2411 .cipher = { 2412 .enc = { 2413 .vecs = des3_ede_enc_tv_template, 2414 .count = DES3_EDE_ENC_TEST_VECTORS 2415 }, 2416 .dec = { 2417 .vecs = des3_ede_dec_tv_template, 2418 .count = DES3_EDE_DEC_TEST_VECTORS 2419 } 2420 } 2421 } 2422 }, { 2423 .alg = "ecb(fcrypt)", 2424 .test = alg_test_skcipher, 2425 .suite = { 2426 .cipher = { 2427 .enc = { 2428 .vecs = fcrypt_pcbc_enc_tv_template, 2429 .count = 1 2430 }, 2431 .dec = { 2432 .vecs = fcrypt_pcbc_dec_tv_template, 2433 .count = 1 2434 } 2435 } 2436 } 2437 }, { 2438 .alg = "ecb(khazad)", 2439 .test = alg_test_skcipher, 2440 .suite = { 2441 .cipher = { 2442 .enc = { 2443 .vecs = khazad_enc_tv_template, 2444 .count = KHAZAD_ENC_TEST_VECTORS 2445 }, 2446 .dec = { 2447 .vecs = khazad_dec_tv_template, 2448 .count = KHAZAD_DEC_TEST_VECTORS 2449 } 2450 } 2451 } 2452 }, { 2453 .alg = "ecb(seed)", 2454 .test = alg_test_skcipher, 2455 .suite = { 2456 .cipher = { 2457 .enc = { 2458 .vecs = seed_enc_tv_template, 2459 .count = SEED_ENC_TEST_VECTORS 2460 }, 2461 .dec = { 2462 .vecs = seed_dec_tv_template, 2463 .count = SEED_DEC_TEST_VECTORS 2464 } 2465 } 2466 } 2467 }, { 2468 .alg = "ecb(serpent)", 2469 .test = alg_test_skcipher, 2470 .suite = { 2471 .cipher = { 2472 .enc = { 2473 .vecs = serpent_enc_tv_template, 2474 .count = SERPENT_ENC_TEST_VECTORS 2475 }, 2476 .dec = { 2477 .vecs = serpent_dec_tv_template, 2478 .count = SERPENT_DEC_TEST_VECTORS 2479 } 2480 } 2481 } 2482 }, { 2483 .alg = "ecb(tea)", 2484 .test = alg_test_skcipher, 2485 .suite = { 2486 .cipher = { 2487 .enc = { 2488 .vecs = tea_enc_tv_template, 2489 .count = TEA_ENC_TEST_VECTORS 2490 }, 2491 .dec = { 2492 .vecs = tea_dec_tv_template, 2493 .count = TEA_DEC_TEST_VECTORS 2494 } 2495 } 2496 } 2497 }, { 2498 .alg = "ecb(tnepres)", 2499 .test = alg_test_skcipher, 2500 .suite = { 2501 .cipher = { 2502 .enc = { 2503 .vecs = tnepres_enc_tv_template, 2504 .count = TNEPRES_ENC_TEST_VECTORS 2505 }, 2506 .dec = { 2507 .vecs = tnepres_dec_tv_template, 2508 .count = TNEPRES_DEC_TEST_VECTORS 2509 } 2510 } 2511 } 2512 }, { 2513 .alg = "ecb(twofish)", 2514 .test = alg_test_skcipher, 2515 .suite = { 2516 .cipher = { 2517 .enc = { 2518 .vecs = tf_enc_tv_template, 2519 .count = TF_ENC_TEST_VECTORS 2520 }, 2521 .dec = { 2522 .vecs = tf_dec_tv_template, 2523 .count = TF_DEC_TEST_VECTORS 2524 } 2525 } 2526 } 2527 }, { 2528 .alg = "ecb(xeta)", 2529 .test = alg_test_skcipher, 2530 .suite = { 2531 .cipher = { 2532 .enc = { 2533 .vecs = xeta_enc_tv_template, 2534 .count = XETA_ENC_TEST_VECTORS 2535 }, 2536 .dec = { 2537 .vecs = xeta_dec_tv_template, 2538 .count = XETA_DEC_TEST_VECTORS 2539 } 2540 } 2541 } 2542 }, { 2543 .alg = "ecb(xtea)", 2544 .test = alg_test_skcipher, 2545 .suite = { 2546 .cipher = { 2547 .enc = { 2548 .vecs = xtea_enc_tv_template, 2549 .count = XTEA_ENC_TEST_VECTORS 2550 }, 2551 .dec = { 2552 .vecs = xtea_dec_tv_template, 2553 .count = XTEA_DEC_TEST_VECTORS 2554 } 2555 } 2556 } 2557 }, { 2558 .alg = "gcm(aes)", 2559 .test = alg_test_aead, 2560 .fips_allowed = 1, 2561 .suite = { 2562 .aead = { 2563 .enc = { 2564 .vecs = aes_gcm_enc_tv_template, 2565 .count = AES_GCM_ENC_TEST_VECTORS 2566 }, 2567 .dec = { 2568 .vecs = aes_gcm_dec_tv_template, 2569 .count = AES_GCM_DEC_TEST_VECTORS 2570 } 2571 } 2572 } 2573 }, { 2574 .alg = "ghash", 2575 .test = alg_test_hash, 2576 .fips_allowed = 1, 2577 .suite = { 2578 .hash = { 2579 .vecs = ghash_tv_template, 2580 .count = GHASH_TEST_VECTORS 2581 } 2582 } 2583 }, { 2584 .alg = "hmac(crc32)", 2585 .test = alg_test_hash, 2586 .suite = { 2587 .hash = { 2588 .vecs = bfin_crc_tv_template, 2589 .count = BFIN_CRC_TEST_VECTORS 2590 } 2591 } 2592 }, { 2593 .alg = "hmac(md5)", 2594 .test = alg_test_hash, 2595 .suite = { 2596 .hash = { 2597 .vecs = hmac_md5_tv_template, 2598 .count = HMAC_MD5_TEST_VECTORS 2599 } 2600 } 2601 }, { 2602 .alg = "hmac(rmd128)", 2603 .test = alg_test_hash, 2604 .suite = { 2605 .hash = { 2606 .vecs = hmac_rmd128_tv_template, 2607 .count = HMAC_RMD128_TEST_VECTORS 2608 } 2609 } 2610 }, { 2611 .alg = "hmac(rmd160)", 2612 .test = alg_test_hash, 2613 .suite = { 2614 .hash = { 2615 .vecs = hmac_rmd160_tv_template, 2616 .count = HMAC_RMD160_TEST_VECTORS 2617 } 2618 } 2619 }, { 2620 .alg = "hmac(sha1)", 2621 .test = alg_test_hash, 2622 .fips_allowed = 1, 2623 .suite = { 2624 .hash = { 2625 .vecs = hmac_sha1_tv_template, 2626 .count = HMAC_SHA1_TEST_VECTORS 2627 } 2628 } 2629 }, { 2630 .alg = "hmac(sha224)", 2631 .test = alg_test_hash, 2632 .fips_allowed = 1, 2633 .suite = { 2634 .hash = { 2635 .vecs = hmac_sha224_tv_template, 2636 .count = HMAC_SHA224_TEST_VECTORS 2637 } 2638 } 2639 }, { 2640 .alg = "hmac(sha256)", 2641 .test = alg_test_hash, 2642 .fips_allowed = 1, 2643 .suite = { 2644 .hash = { 2645 .vecs = hmac_sha256_tv_template, 2646 .count = HMAC_SHA256_TEST_VECTORS 2647 } 2648 } 2649 }, { 2650 .alg = "hmac(sha384)", 2651 .test = alg_test_hash, 2652 .fips_allowed = 1, 2653 .suite = { 2654 .hash = { 2655 .vecs = hmac_sha384_tv_template, 2656 .count = HMAC_SHA384_TEST_VECTORS 2657 } 2658 } 2659 }, { 2660 .alg = "hmac(sha512)", 2661 .test = alg_test_hash, 2662 .fips_allowed = 1, 2663 .suite = { 2664 .hash = { 2665 .vecs = hmac_sha512_tv_template, 2666 .count = HMAC_SHA512_TEST_VECTORS 2667 } 2668 } 2669 }, { 2670 .alg = "lrw(aes)", 2671 .test = alg_test_skcipher, 2672 .suite = { 2673 .cipher = { 2674 .enc = { 2675 .vecs = aes_lrw_enc_tv_template, 2676 .count = AES_LRW_ENC_TEST_VECTORS 2677 }, 2678 .dec = { 2679 .vecs = aes_lrw_dec_tv_template, 2680 .count = AES_LRW_DEC_TEST_VECTORS 2681 } 2682 } 2683 } 2684 }, { 2685 .alg = "lrw(camellia)", 2686 .test = alg_test_skcipher, 2687 .suite = { 2688 .cipher = { 2689 .enc = { 2690 .vecs = camellia_lrw_enc_tv_template, 2691 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 2692 }, 2693 .dec = { 2694 .vecs = camellia_lrw_dec_tv_template, 2695 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 2696 } 2697 } 2698 } 2699 }, { 2700 .alg = "lrw(cast6)", 2701 .test = alg_test_skcipher, 2702 .suite = { 2703 .cipher = { 2704 .enc = { 2705 .vecs = cast6_lrw_enc_tv_template, 2706 .count = CAST6_LRW_ENC_TEST_VECTORS 2707 }, 2708 .dec = { 2709 .vecs = cast6_lrw_dec_tv_template, 2710 .count = CAST6_LRW_DEC_TEST_VECTORS 2711 } 2712 } 2713 } 2714 }, { 2715 .alg = "lrw(serpent)", 2716 .test = alg_test_skcipher, 2717 .suite = { 2718 .cipher = { 2719 .enc = { 2720 .vecs = serpent_lrw_enc_tv_template, 2721 .count = SERPENT_LRW_ENC_TEST_VECTORS 2722 }, 2723 .dec = { 2724 .vecs = serpent_lrw_dec_tv_template, 2725 .count = SERPENT_LRW_DEC_TEST_VECTORS 2726 } 2727 } 2728 } 2729 }, { 2730 .alg = "lrw(twofish)", 2731 .test = alg_test_skcipher, 2732 .suite = { 2733 .cipher = { 2734 .enc = { 2735 .vecs = tf_lrw_enc_tv_template, 2736 .count = TF_LRW_ENC_TEST_VECTORS 2737 }, 2738 .dec = { 2739 .vecs = tf_lrw_dec_tv_template, 2740 .count = TF_LRW_DEC_TEST_VECTORS 2741 } 2742 } 2743 } 2744 }, { 2745 .alg = "lzo", 2746 .test = alg_test_comp, 2747 .fips_allowed = 1, 2748 .suite = { 2749 .comp = { 2750 .comp = { 2751 .vecs = lzo_comp_tv_template, 2752 .count = LZO_COMP_TEST_VECTORS 2753 }, 2754 .decomp = { 2755 .vecs = lzo_decomp_tv_template, 2756 .count = LZO_DECOMP_TEST_VECTORS 2757 } 2758 } 2759 } 2760 }, { 2761 .alg = "md4", 2762 .test = alg_test_hash, 2763 .suite = { 2764 .hash = { 2765 .vecs = md4_tv_template, 2766 .count = MD4_TEST_VECTORS 2767 } 2768 } 2769 }, { 2770 .alg = "md5", 2771 .test = alg_test_hash, 2772 .suite = { 2773 .hash = { 2774 .vecs = md5_tv_template, 2775 .count = MD5_TEST_VECTORS 2776 } 2777 } 2778 }, { 2779 .alg = "michael_mic", 2780 .test = alg_test_hash, 2781 .suite = { 2782 .hash = { 2783 .vecs = michael_mic_tv_template, 2784 .count = MICHAEL_MIC_TEST_VECTORS 2785 } 2786 } 2787 }, { 2788 .alg = "ofb(aes)", 2789 .test = alg_test_skcipher, 2790 .fips_allowed = 1, 2791 .suite = { 2792 .cipher = { 2793 .enc = { 2794 .vecs = aes_ofb_enc_tv_template, 2795 .count = AES_OFB_ENC_TEST_VECTORS 2796 }, 2797 .dec = { 2798 .vecs = aes_ofb_dec_tv_template, 2799 .count = AES_OFB_DEC_TEST_VECTORS 2800 } 2801 } 2802 } 2803 }, { 2804 .alg = "pcbc(fcrypt)", 2805 .test = alg_test_skcipher, 2806 .suite = { 2807 .cipher = { 2808 .enc = { 2809 .vecs = fcrypt_pcbc_enc_tv_template, 2810 .count = FCRYPT_ENC_TEST_VECTORS 2811 }, 2812 .dec = { 2813 .vecs = fcrypt_pcbc_dec_tv_template, 2814 .count = FCRYPT_DEC_TEST_VECTORS 2815 } 2816 } 2817 } 2818 }, { 2819 .alg = "rfc3686(ctr(aes))", 2820 .test = alg_test_skcipher, 2821 .fips_allowed = 1, 2822 .suite = { 2823 .cipher = { 2824 .enc = { 2825 .vecs = aes_ctr_rfc3686_enc_tv_template, 2826 .count = AES_CTR_3686_ENC_TEST_VECTORS 2827 }, 2828 .dec = { 2829 .vecs = aes_ctr_rfc3686_dec_tv_template, 2830 .count = AES_CTR_3686_DEC_TEST_VECTORS 2831 } 2832 } 2833 } 2834 }, { 2835 .alg = "rfc4106(gcm(aes))", 2836 .test = alg_test_aead, 2837 .suite = { 2838 .aead = { 2839 .enc = { 2840 .vecs = aes_gcm_rfc4106_enc_tv_template, 2841 .count = AES_GCM_4106_ENC_TEST_VECTORS 2842 }, 2843 .dec = { 2844 .vecs = aes_gcm_rfc4106_dec_tv_template, 2845 .count = AES_GCM_4106_DEC_TEST_VECTORS 2846 } 2847 } 2848 } 2849 }, { 2850 .alg = "rfc4309(ccm(aes))", 2851 .test = alg_test_aead, 2852 .fips_allowed = 1, 2853 .suite = { 2854 .aead = { 2855 .enc = { 2856 .vecs = aes_ccm_rfc4309_enc_tv_template, 2857 .count = AES_CCM_4309_ENC_TEST_VECTORS 2858 }, 2859 .dec = { 2860 .vecs = aes_ccm_rfc4309_dec_tv_template, 2861 .count = AES_CCM_4309_DEC_TEST_VECTORS 2862 } 2863 } 2864 } 2865 }, { 2866 .alg = "rfc4543(gcm(aes))", 2867 .test = alg_test_aead, 2868 .suite = { 2869 .aead = { 2870 .enc = { 2871 .vecs = aes_gcm_rfc4543_enc_tv_template, 2872 .count = AES_GCM_4543_ENC_TEST_VECTORS 2873 }, 2874 .dec = { 2875 .vecs = aes_gcm_rfc4543_dec_tv_template, 2876 .count = AES_GCM_4543_DEC_TEST_VECTORS 2877 }, 2878 } 2879 } 2880 }, { 2881 .alg = "rmd128", 2882 .test = alg_test_hash, 2883 .suite = { 2884 .hash = { 2885 .vecs = rmd128_tv_template, 2886 .count = RMD128_TEST_VECTORS 2887 } 2888 } 2889 }, { 2890 .alg = "rmd160", 2891 .test = alg_test_hash, 2892 .suite = { 2893 .hash = { 2894 .vecs = rmd160_tv_template, 2895 .count = RMD160_TEST_VECTORS 2896 } 2897 } 2898 }, { 2899 .alg = "rmd256", 2900 .test = alg_test_hash, 2901 .suite = { 2902 .hash = { 2903 .vecs = rmd256_tv_template, 2904 .count = RMD256_TEST_VECTORS 2905 } 2906 } 2907 }, { 2908 .alg = "rmd320", 2909 .test = alg_test_hash, 2910 .suite = { 2911 .hash = { 2912 .vecs = rmd320_tv_template, 2913 .count = RMD320_TEST_VECTORS 2914 } 2915 } 2916 }, { 2917 .alg = "salsa20", 2918 .test = alg_test_skcipher, 2919 .suite = { 2920 .cipher = { 2921 .enc = { 2922 .vecs = salsa20_stream_enc_tv_template, 2923 .count = SALSA20_STREAM_ENC_TEST_VECTORS 2924 } 2925 } 2926 } 2927 }, { 2928 .alg = "sha1", 2929 .test = alg_test_hash, 2930 .fips_allowed = 1, 2931 .suite = { 2932 .hash = { 2933 .vecs = sha1_tv_template, 2934 .count = SHA1_TEST_VECTORS 2935 } 2936 } 2937 }, { 2938 .alg = "sha224", 2939 .test = alg_test_hash, 2940 .fips_allowed = 1, 2941 .suite = { 2942 .hash = { 2943 .vecs = sha224_tv_template, 2944 .count = SHA224_TEST_VECTORS 2945 } 2946 } 2947 }, { 2948 .alg = "sha256", 2949 .test = alg_test_hash, 2950 .fips_allowed = 1, 2951 .suite = { 2952 .hash = { 2953 .vecs = sha256_tv_template, 2954 .count = SHA256_TEST_VECTORS 2955 } 2956 } 2957 }, { 2958 .alg = "sha384", 2959 .test = alg_test_hash, 2960 .fips_allowed = 1, 2961 .suite = { 2962 .hash = { 2963 .vecs = sha384_tv_template, 2964 .count = SHA384_TEST_VECTORS 2965 } 2966 } 2967 }, { 2968 .alg = "sha512", 2969 .test = alg_test_hash, 2970 .fips_allowed = 1, 2971 .suite = { 2972 .hash = { 2973 .vecs = sha512_tv_template, 2974 .count = SHA512_TEST_VECTORS 2975 } 2976 } 2977 }, { 2978 .alg = "tgr128", 2979 .test = alg_test_hash, 2980 .suite = { 2981 .hash = { 2982 .vecs = tgr128_tv_template, 2983 .count = TGR128_TEST_VECTORS 2984 } 2985 } 2986 }, { 2987 .alg = "tgr160", 2988 .test = alg_test_hash, 2989 .suite = { 2990 .hash = { 2991 .vecs = tgr160_tv_template, 2992 .count = TGR160_TEST_VECTORS 2993 } 2994 } 2995 }, { 2996 .alg = "tgr192", 2997 .test = alg_test_hash, 2998 .suite = { 2999 .hash = { 3000 .vecs = tgr192_tv_template, 3001 .count = TGR192_TEST_VECTORS 3002 } 3003 } 3004 }, { 3005 .alg = "vmac(aes)", 3006 .test = alg_test_hash, 3007 .suite = { 3008 .hash = { 3009 .vecs = aes_vmac128_tv_template, 3010 .count = VMAC_AES_TEST_VECTORS 3011 } 3012 } 3013 }, { 3014 .alg = "wp256", 3015 .test = alg_test_hash, 3016 .suite = { 3017 .hash = { 3018 .vecs = wp256_tv_template, 3019 .count = WP256_TEST_VECTORS 3020 } 3021 } 3022 }, { 3023 .alg = "wp384", 3024 .test = alg_test_hash, 3025 .suite = { 3026 .hash = { 3027 .vecs = wp384_tv_template, 3028 .count = WP384_TEST_VECTORS 3029 } 3030 } 3031 }, { 3032 .alg = "wp512", 3033 .test = alg_test_hash, 3034 .suite = { 3035 .hash = { 3036 .vecs = wp512_tv_template, 3037 .count = WP512_TEST_VECTORS 3038 } 3039 } 3040 }, { 3041 .alg = "xcbc(aes)", 3042 .test = alg_test_hash, 3043 .suite = { 3044 .hash = { 3045 .vecs = aes_xcbc128_tv_template, 3046 .count = XCBC_AES_TEST_VECTORS 3047 } 3048 } 3049 }, { 3050 .alg = "xts(aes)", 3051 .test = alg_test_skcipher, 3052 .fips_allowed = 1, 3053 .suite = { 3054 .cipher = { 3055 .enc = { 3056 .vecs = aes_xts_enc_tv_template, 3057 .count = AES_XTS_ENC_TEST_VECTORS 3058 }, 3059 .dec = { 3060 .vecs = aes_xts_dec_tv_template, 3061 .count = AES_XTS_DEC_TEST_VECTORS 3062 } 3063 } 3064 } 3065 }, { 3066 .alg = "xts(camellia)", 3067 .test = alg_test_skcipher, 3068 .suite = { 3069 .cipher = { 3070 .enc = { 3071 .vecs = camellia_xts_enc_tv_template, 3072 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3073 }, 3074 .dec = { 3075 .vecs = camellia_xts_dec_tv_template, 3076 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3077 } 3078 } 3079 } 3080 }, { 3081 .alg = "xts(cast6)", 3082 .test = alg_test_skcipher, 3083 .suite = { 3084 .cipher = { 3085 .enc = { 3086 .vecs = cast6_xts_enc_tv_template, 3087 .count = CAST6_XTS_ENC_TEST_VECTORS 3088 }, 3089 .dec = { 3090 .vecs = cast6_xts_dec_tv_template, 3091 .count = CAST6_XTS_DEC_TEST_VECTORS 3092 } 3093 } 3094 } 3095 }, { 3096 .alg = "xts(serpent)", 3097 .test = alg_test_skcipher, 3098 .suite = { 3099 .cipher = { 3100 .enc = { 3101 .vecs = serpent_xts_enc_tv_template, 3102 .count = SERPENT_XTS_ENC_TEST_VECTORS 3103 }, 3104 .dec = { 3105 .vecs = serpent_xts_dec_tv_template, 3106 .count = SERPENT_XTS_DEC_TEST_VECTORS 3107 } 3108 } 3109 } 3110 }, { 3111 .alg = "xts(twofish)", 3112 .test = alg_test_skcipher, 3113 .suite = { 3114 .cipher = { 3115 .enc = { 3116 .vecs = tf_xts_enc_tv_template, 3117 .count = TF_XTS_ENC_TEST_VECTORS 3118 }, 3119 .dec = { 3120 .vecs = tf_xts_dec_tv_template, 3121 .count = TF_XTS_DEC_TEST_VECTORS 3122 } 3123 } 3124 } 3125 }, { 3126 .alg = "zlib", 3127 .test = alg_test_pcomp, 3128 .fips_allowed = 1, 3129 .suite = { 3130 .pcomp = { 3131 .comp = { 3132 .vecs = zlib_comp_tv_template, 3133 .count = ZLIB_COMP_TEST_VECTORS 3134 }, 3135 .decomp = { 3136 .vecs = zlib_decomp_tv_template, 3137 .count = ZLIB_DECOMP_TEST_VECTORS 3138 } 3139 } 3140 } 3141 } 3142 }; 3143 3144 static bool alg_test_descs_checked; 3145 3146 static void alg_test_descs_check_order(void) 3147 { 3148 int i; 3149 3150 /* only check once */ 3151 if (alg_test_descs_checked) 3152 return; 3153 3154 alg_test_descs_checked = true; 3155 3156 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3157 int diff = strcmp(alg_test_descs[i - 1].alg, 3158 alg_test_descs[i].alg); 3159 3160 if (WARN_ON(diff > 0)) { 3161 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3162 alg_test_descs[i - 1].alg, 3163 alg_test_descs[i].alg); 3164 } 3165 3166 if (WARN_ON(diff == 0)) { 3167 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3168 alg_test_descs[i].alg); 3169 } 3170 } 3171 } 3172 3173 static int alg_find_test(const char *alg) 3174 { 3175 int start = 0; 3176 int end = ARRAY_SIZE(alg_test_descs); 3177 3178 while (start < end) { 3179 int i = (start + end) / 2; 3180 int diff = strcmp(alg_test_descs[i].alg, alg); 3181 3182 if (diff > 0) { 3183 end = i; 3184 continue; 3185 } 3186 3187 if (diff < 0) { 3188 start = i + 1; 3189 continue; 3190 } 3191 3192 return i; 3193 } 3194 3195 return -1; 3196 } 3197 3198 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3199 { 3200 int i; 3201 int j; 3202 int rc; 3203 3204 alg_test_descs_check_order(); 3205 3206 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3207 char nalg[CRYPTO_MAX_ALG_NAME]; 3208 3209 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3210 sizeof(nalg)) 3211 return -ENAMETOOLONG; 3212 3213 i = alg_find_test(nalg); 3214 if (i < 0) 3215 goto notest; 3216 3217 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3218 goto non_fips_alg; 3219 3220 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3221 goto test_done; 3222 } 3223 3224 i = alg_find_test(alg); 3225 j = alg_find_test(driver); 3226 if (i < 0 && j < 0) 3227 goto notest; 3228 3229 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3230 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3231 goto non_fips_alg; 3232 3233 rc = 0; 3234 if (i >= 0) 3235 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3236 type, mask); 3237 if (j >= 0 && j != i) 3238 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3239 type, mask); 3240 3241 test_done: 3242 if (fips_enabled && rc) 3243 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3244 3245 if (fips_enabled && !rc) 3246 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", 3247 driver, alg); 3248 3249 return rc; 3250 3251 notest: 3252 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3253 return 0; 3254 non_fips_alg: 3255 return -EINVAL; 3256 } 3257 3258 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3259 3260 EXPORT_SYMBOL_GPL(alg_test); 3261