1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 36 #include "internal.h" 37 38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 39 40 /* a perfect nop */ 41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 42 { 43 return 0; 44 } 45 46 #else 47 48 #include "testmgr.h" 49 50 /* 51 * Need slab memory for testing (size in number of pages). 52 */ 53 #define XBUFSIZE 8 54 55 /* 56 * Indexes into the xbuf to simulate cross-page access. 57 */ 58 #define IDX1 32 59 #define IDX2 32400 60 #define IDX3 1 61 #define IDX4 8193 62 #define IDX5 22222 63 #define IDX6 17101 64 #define IDX7 27333 65 #define IDX8 3000 66 67 /* 68 * Used by test_cipher() 69 */ 70 #define ENCRYPT 1 71 #define DECRYPT 0 72 73 struct tcrypt_result { 74 struct completion completion; 75 int err; 76 }; 77 78 struct aead_test_suite { 79 struct { 80 struct aead_testvec *vecs; 81 unsigned int count; 82 } enc, dec; 83 }; 84 85 struct cipher_test_suite { 86 struct { 87 struct cipher_testvec *vecs; 88 unsigned int count; 89 } enc, dec; 90 }; 91 92 struct comp_test_suite { 93 struct { 94 struct comp_testvec *vecs; 95 unsigned int count; 96 } comp, decomp; 97 }; 98 99 struct pcomp_test_suite { 100 struct { 101 struct pcomp_testvec *vecs; 102 unsigned int count; 103 } comp, decomp; 104 }; 105 106 struct hash_test_suite { 107 struct hash_testvec *vecs; 108 unsigned int count; 109 }; 110 111 struct cprng_test_suite { 112 struct cprng_testvec *vecs; 113 unsigned int count; 114 }; 115 116 struct drbg_test_suite { 117 struct drbg_testvec *vecs; 118 unsigned int count; 119 }; 120 121 struct akcipher_test_suite { 122 struct akcipher_testvec *vecs; 123 unsigned int count; 124 }; 125 126 struct alg_test_desc { 127 const char *alg; 128 int (*test)(const struct alg_test_desc *desc, const char *driver, 129 u32 type, u32 mask); 130 int fips_allowed; /* set if alg is allowed in fips mode */ 131 132 union { 133 struct aead_test_suite aead; 134 struct cipher_test_suite cipher; 135 struct comp_test_suite comp; 136 struct pcomp_test_suite pcomp; 137 struct hash_test_suite hash; 138 struct cprng_test_suite cprng; 139 struct drbg_test_suite drbg; 140 struct akcipher_test_suite akcipher; 141 } suite; 142 }; 143 144 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 145 146 static void hexdump(unsigned char *buf, unsigned int len) 147 { 148 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 149 16, 1, 150 buf, len, false); 151 } 152 153 static void tcrypt_complete(struct crypto_async_request *req, int err) 154 { 155 struct tcrypt_result *res = req->data; 156 157 if (err == -EINPROGRESS) 158 return; 159 160 res->err = err; 161 complete(&res->completion); 162 } 163 164 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 165 { 166 int i; 167 168 for (i = 0; i < XBUFSIZE; i++) { 169 buf[i] = (void *)__get_free_page(GFP_KERNEL); 170 if (!buf[i]) 171 goto err_free_buf; 172 } 173 174 return 0; 175 176 err_free_buf: 177 while (i-- > 0) 178 free_page((unsigned long)buf[i]); 179 180 return -ENOMEM; 181 } 182 183 static void testmgr_free_buf(char *buf[XBUFSIZE]) 184 { 185 int i; 186 187 for (i = 0; i < XBUFSIZE; i++) 188 free_page((unsigned long)buf[i]); 189 } 190 191 static int wait_async_op(struct tcrypt_result *tr, int ret) 192 { 193 if (ret == -EINPROGRESS || ret == -EBUSY) { 194 wait_for_completion(&tr->completion); 195 reinit_completion(&tr->completion); 196 ret = tr->err; 197 } 198 return ret; 199 } 200 201 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 202 unsigned int tcount, bool use_digest, 203 const int align_offset) 204 { 205 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 206 unsigned int i, j, k, temp; 207 struct scatterlist sg[8]; 208 char *result; 209 char *key; 210 struct ahash_request *req; 211 struct tcrypt_result tresult; 212 void *hash_buff; 213 char *xbuf[XBUFSIZE]; 214 int ret = -ENOMEM; 215 216 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 217 if (!result) 218 return ret; 219 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 220 if (!key) 221 goto out_nobuf; 222 if (testmgr_alloc_buf(xbuf)) 223 goto out_nobuf; 224 225 init_completion(&tresult.completion); 226 227 req = ahash_request_alloc(tfm, GFP_KERNEL); 228 if (!req) { 229 printk(KERN_ERR "alg: hash: Failed to allocate request for " 230 "%s\n", algo); 231 goto out_noreq; 232 } 233 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 234 tcrypt_complete, &tresult); 235 236 j = 0; 237 for (i = 0; i < tcount; i++) { 238 if (template[i].np) 239 continue; 240 241 ret = -EINVAL; 242 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 243 goto out; 244 245 j++; 246 memset(result, 0, MAX_DIGEST_SIZE); 247 248 hash_buff = xbuf[0]; 249 hash_buff += align_offset; 250 251 memcpy(hash_buff, template[i].plaintext, template[i].psize); 252 sg_init_one(&sg[0], hash_buff, template[i].psize); 253 254 if (template[i].ksize) { 255 crypto_ahash_clear_flags(tfm, ~0); 256 if (template[i].ksize > MAX_KEYLEN) { 257 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 258 j, algo, template[i].ksize, MAX_KEYLEN); 259 ret = -EINVAL; 260 goto out; 261 } 262 memcpy(key, template[i].key, template[i].ksize); 263 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 264 if (ret) { 265 printk(KERN_ERR "alg: hash: setkey failed on " 266 "test %d for %s: ret=%d\n", j, algo, 267 -ret); 268 goto out; 269 } 270 } 271 272 ahash_request_set_crypt(req, sg, result, template[i].psize); 273 if (use_digest) { 274 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 275 if (ret) { 276 pr_err("alg: hash: digest failed on test %d " 277 "for %s: ret=%d\n", j, algo, -ret); 278 goto out; 279 } 280 } else { 281 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 282 if (ret) { 283 pr_err("alt: hash: init failed on test %d " 284 "for %s: ret=%d\n", j, algo, -ret); 285 goto out; 286 } 287 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 288 if (ret) { 289 pr_err("alt: hash: update failed on test %d " 290 "for %s: ret=%d\n", j, algo, -ret); 291 goto out; 292 } 293 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 294 if (ret) { 295 pr_err("alt: hash: final failed on test %d " 296 "for %s: ret=%d\n", j, algo, -ret); 297 goto out; 298 } 299 } 300 301 if (memcmp(result, template[i].digest, 302 crypto_ahash_digestsize(tfm))) { 303 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 304 j, algo); 305 hexdump(result, crypto_ahash_digestsize(tfm)); 306 ret = -EINVAL; 307 goto out; 308 } 309 } 310 311 j = 0; 312 for (i = 0; i < tcount; i++) { 313 /* alignment tests are only done with continuous buffers */ 314 if (align_offset != 0) 315 break; 316 317 if (!template[i].np) 318 continue; 319 320 j++; 321 memset(result, 0, MAX_DIGEST_SIZE); 322 323 temp = 0; 324 sg_init_table(sg, template[i].np); 325 ret = -EINVAL; 326 for (k = 0; k < template[i].np; k++) { 327 if (WARN_ON(offset_in_page(IDX[k]) + 328 template[i].tap[k] > PAGE_SIZE)) 329 goto out; 330 sg_set_buf(&sg[k], 331 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 332 offset_in_page(IDX[k]), 333 template[i].plaintext + temp, 334 template[i].tap[k]), 335 template[i].tap[k]); 336 temp += template[i].tap[k]; 337 } 338 339 if (template[i].ksize) { 340 if (template[i].ksize > MAX_KEYLEN) { 341 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 342 j, algo, template[i].ksize, MAX_KEYLEN); 343 ret = -EINVAL; 344 goto out; 345 } 346 crypto_ahash_clear_flags(tfm, ~0); 347 memcpy(key, template[i].key, template[i].ksize); 348 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 349 350 if (ret) { 351 printk(KERN_ERR "alg: hash: setkey " 352 "failed on chunking test %d " 353 "for %s: ret=%d\n", j, algo, -ret); 354 goto out; 355 } 356 } 357 358 ahash_request_set_crypt(req, sg, result, template[i].psize); 359 ret = crypto_ahash_digest(req); 360 switch (ret) { 361 case 0: 362 break; 363 case -EINPROGRESS: 364 case -EBUSY: 365 wait_for_completion(&tresult.completion); 366 reinit_completion(&tresult.completion); 367 ret = tresult.err; 368 if (!ret) 369 break; 370 /* fall through */ 371 default: 372 printk(KERN_ERR "alg: hash: digest failed " 373 "on chunking test %d for %s: " 374 "ret=%d\n", j, algo, -ret); 375 goto out; 376 } 377 378 if (memcmp(result, template[i].digest, 379 crypto_ahash_digestsize(tfm))) { 380 printk(KERN_ERR "alg: hash: Chunking test %d " 381 "failed for %s\n", j, algo); 382 hexdump(result, crypto_ahash_digestsize(tfm)); 383 ret = -EINVAL; 384 goto out; 385 } 386 } 387 388 ret = 0; 389 390 out: 391 ahash_request_free(req); 392 out_noreq: 393 testmgr_free_buf(xbuf); 394 out_nobuf: 395 kfree(key); 396 kfree(result); 397 return ret; 398 } 399 400 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 401 unsigned int tcount, bool use_digest) 402 { 403 unsigned int alignmask; 404 int ret; 405 406 ret = __test_hash(tfm, template, tcount, use_digest, 0); 407 if (ret) 408 return ret; 409 410 /* test unaligned buffers, check with one byte offset */ 411 ret = __test_hash(tfm, template, tcount, use_digest, 1); 412 if (ret) 413 return ret; 414 415 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 416 if (alignmask) { 417 /* Check if alignment mask for tfm is correctly set. */ 418 ret = __test_hash(tfm, template, tcount, use_digest, 419 alignmask + 1); 420 if (ret) 421 return ret; 422 } 423 424 return 0; 425 } 426 427 static int __test_aead(struct crypto_aead *tfm, int enc, 428 struct aead_testvec *template, unsigned int tcount, 429 const bool diff_dst, const int align_offset) 430 { 431 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 432 unsigned int i, j, k, n, temp; 433 int ret = -ENOMEM; 434 char *q; 435 char *key; 436 struct aead_request *req; 437 struct scatterlist *sg; 438 struct scatterlist *sgout; 439 const char *e, *d; 440 struct tcrypt_result result; 441 unsigned int authsize, iv_len; 442 void *input; 443 void *output; 444 void *assoc; 445 char *iv; 446 char *xbuf[XBUFSIZE]; 447 char *xoutbuf[XBUFSIZE]; 448 char *axbuf[XBUFSIZE]; 449 450 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 451 if (!iv) 452 return ret; 453 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 454 if (!key) 455 goto out_noxbuf; 456 if (testmgr_alloc_buf(xbuf)) 457 goto out_noxbuf; 458 if (testmgr_alloc_buf(axbuf)) 459 goto out_noaxbuf; 460 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 461 goto out_nooutbuf; 462 463 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 464 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 465 if (!sg) 466 goto out_nosg; 467 sgout = &sg[16]; 468 469 if (diff_dst) 470 d = "-ddst"; 471 else 472 d = ""; 473 474 if (enc == ENCRYPT) 475 e = "encryption"; 476 else 477 e = "decryption"; 478 479 init_completion(&result.completion); 480 481 req = aead_request_alloc(tfm, GFP_KERNEL); 482 if (!req) { 483 pr_err("alg: aead%s: Failed to allocate request for %s\n", 484 d, algo); 485 goto out; 486 } 487 488 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 489 tcrypt_complete, &result); 490 491 for (i = 0, j = 0; i < tcount; i++) { 492 if (template[i].np) 493 continue; 494 495 j++; 496 497 /* some templates have no input data but they will 498 * touch input 499 */ 500 input = xbuf[0]; 501 input += align_offset; 502 assoc = axbuf[0]; 503 504 ret = -EINVAL; 505 if (WARN_ON(align_offset + template[i].ilen > 506 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 507 goto out; 508 509 memcpy(input, template[i].input, template[i].ilen); 510 memcpy(assoc, template[i].assoc, template[i].alen); 511 iv_len = crypto_aead_ivsize(tfm); 512 if (template[i].iv) 513 memcpy(iv, template[i].iv, iv_len); 514 else 515 memset(iv, 0, iv_len); 516 517 crypto_aead_clear_flags(tfm, ~0); 518 if (template[i].wk) 519 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 520 521 if (template[i].klen > MAX_KEYLEN) { 522 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 523 d, j, algo, template[i].klen, 524 MAX_KEYLEN); 525 ret = -EINVAL; 526 goto out; 527 } 528 memcpy(key, template[i].key, template[i].klen); 529 530 ret = crypto_aead_setkey(tfm, key, template[i].klen); 531 if (!ret == template[i].fail) { 532 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 533 d, j, algo, crypto_aead_get_flags(tfm)); 534 goto out; 535 } else if (ret) 536 continue; 537 538 authsize = abs(template[i].rlen - template[i].ilen); 539 ret = crypto_aead_setauthsize(tfm, authsize); 540 if (ret) { 541 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 542 d, authsize, j, algo); 543 goto out; 544 } 545 546 k = !!template[i].alen; 547 sg_init_table(sg, k + 1); 548 sg_set_buf(&sg[0], assoc, template[i].alen); 549 sg_set_buf(&sg[k], input, 550 template[i].ilen + (enc ? authsize : 0)); 551 output = input; 552 553 if (diff_dst) { 554 sg_init_table(sgout, k + 1); 555 sg_set_buf(&sgout[0], assoc, template[i].alen); 556 557 output = xoutbuf[0]; 558 output += align_offset; 559 sg_set_buf(&sgout[k], output, 560 template[i].rlen + (enc ? 0 : authsize)); 561 } 562 563 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 564 template[i].ilen, iv); 565 566 aead_request_set_ad(req, template[i].alen); 567 568 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 569 570 switch (ret) { 571 case 0: 572 if (template[i].novrfy) { 573 /* verification was supposed to fail */ 574 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 575 d, e, j, algo); 576 /* so really, we got a bad message */ 577 ret = -EBADMSG; 578 goto out; 579 } 580 break; 581 case -EINPROGRESS: 582 case -EBUSY: 583 wait_for_completion(&result.completion); 584 reinit_completion(&result.completion); 585 ret = result.err; 586 if (!ret) 587 break; 588 case -EBADMSG: 589 if (template[i].novrfy) 590 /* verification failure was expected */ 591 continue; 592 /* fall through */ 593 default: 594 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 595 d, e, j, algo, -ret); 596 goto out; 597 } 598 599 q = output; 600 if (memcmp(q, template[i].result, template[i].rlen)) { 601 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 602 d, j, e, algo); 603 hexdump(q, template[i].rlen); 604 ret = -EINVAL; 605 goto out; 606 } 607 } 608 609 for (i = 0, j = 0; i < tcount; i++) { 610 /* alignment tests are only done with continuous buffers */ 611 if (align_offset != 0) 612 break; 613 614 if (!template[i].np) 615 continue; 616 617 j++; 618 619 if (template[i].iv) 620 memcpy(iv, template[i].iv, MAX_IVLEN); 621 else 622 memset(iv, 0, MAX_IVLEN); 623 624 crypto_aead_clear_flags(tfm, ~0); 625 if (template[i].wk) 626 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 627 if (template[i].klen > MAX_KEYLEN) { 628 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 629 d, j, algo, template[i].klen, MAX_KEYLEN); 630 ret = -EINVAL; 631 goto out; 632 } 633 memcpy(key, template[i].key, template[i].klen); 634 635 ret = crypto_aead_setkey(tfm, key, template[i].klen); 636 if (!ret == template[i].fail) { 637 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 638 d, j, algo, crypto_aead_get_flags(tfm)); 639 goto out; 640 } else if (ret) 641 continue; 642 643 authsize = abs(template[i].rlen - template[i].ilen); 644 645 ret = -EINVAL; 646 sg_init_table(sg, template[i].anp + template[i].np); 647 if (diff_dst) 648 sg_init_table(sgout, template[i].anp + template[i].np); 649 650 ret = -EINVAL; 651 for (k = 0, temp = 0; k < template[i].anp; k++) { 652 if (WARN_ON(offset_in_page(IDX[k]) + 653 template[i].atap[k] > PAGE_SIZE)) 654 goto out; 655 sg_set_buf(&sg[k], 656 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 657 offset_in_page(IDX[k]), 658 template[i].assoc + temp, 659 template[i].atap[k]), 660 template[i].atap[k]); 661 if (diff_dst) 662 sg_set_buf(&sgout[k], 663 axbuf[IDX[k] >> PAGE_SHIFT] + 664 offset_in_page(IDX[k]), 665 template[i].atap[k]); 666 temp += template[i].atap[k]; 667 } 668 669 for (k = 0, temp = 0; k < template[i].np; k++) { 670 if (WARN_ON(offset_in_page(IDX[k]) + 671 template[i].tap[k] > PAGE_SIZE)) 672 goto out; 673 674 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 675 memcpy(q, template[i].input + temp, template[i].tap[k]); 676 sg_set_buf(&sg[template[i].anp + k], 677 q, template[i].tap[k]); 678 679 if (diff_dst) { 680 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 681 offset_in_page(IDX[k]); 682 683 memset(q, 0, template[i].tap[k]); 684 685 sg_set_buf(&sgout[template[i].anp + k], 686 q, template[i].tap[k]); 687 } 688 689 n = template[i].tap[k]; 690 if (k == template[i].np - 1 && enc) 691 n += authsize; 692 if (offset_in_page(q) + n < PAGE_SIZE) 693 q[n] = 0; 694 695 temp += template[i].tap[k]; 696 } 697 698 ret = crypto_aead_setauthsize(tfm, authsize); 699 if (ret) { 700 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 701 d, authsize, j, algo); 702 goto out; 703 } 704 705 if (enc) { 706 if (WARN_ON(sg[template[i].anp + k - 1].offset + 707 sg[template[i].anp + k - 1].length + 708 authsize > PAGE_SIZE)) { 709 ret = -EINVAL; 710 goto out; 711 } 712 713 if (diff_dst) 714 sgout[template[i].anp + k - 1].length += 715 authsize; 716 sg[template[i].anp + k - 1].length += authsize; 717 } 718 719 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 720 template[i].ilen, 721 iv); 722 723 aead_request_set_ad(req, template[i].alen); 724 725 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 726 727 switch (ret) { 728 case 0: 729 if (template[i].novrfy) { 730 /* verification was supposed to fail */ 731 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 732 d, e, j, algo); 733 /* so really, we got a bad message */ 734 ret = -EBADMSG; 735 goto out; 736 } 737 break; 738 case -EINPROGRESS: 739 case -EBUSY: 740 wait_for_completion(&result.completion); 741 reinit_completion(&result.completion); 742 ret = result.err; 743 if (!ret) 744 break; 745 case -EBADMSG: 746 if (template[i].novrfy) 747 /* verification failure was expected */ 748 continue; 749 /* fall through */ 750 default: 751 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 752 d, e, j, algo, -ret); 753 goto out; 754 } 755 756 ret = -EINVAL; 757 for (k = 0, temp = 0; k < template[i].np; k++) { 758 if (diff_dst) 759 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 760 offset_in_page(IDX[k]); 761 else 762 q = xbuf[IDX[k] >> PAGE_SHIFT] + 763 offset_in_page(IDX[k]); 764 765 n = template[i].tap[k]; 766 if (k == template[i].np - 1) 767 n += enc ? authsize : -authsize; 768 769 if (memcmp(q, template[i].result + temp, n)) { 770 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 771 d, j, e, k, algo); 772 hexdump(q, n); 773 goto out; 774 } 775 776 q += n; 777 if (k == template[i].np - 1 && !enc) { 778 if (!diff_dst && 779 memcmp(q, template[i].input + 780 temp + n, authsize)) 781 n = authsize; 782 else 783 n = 0; 784 } else { 785 for (n = 0; offset_in_page(q + n) && q[n]; n++) 786 ; 787 } 788 if (n) { 789 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 790 d, j, e, k, algo, n); 791 hexdump(q, n); 792 goto out; 793 } 794 795 temp += template[i].tap[k]; 796 } 797 } 798 799 ret = 0; 800 801 out: 802 aead_request_free(req); 803 kfree(sg); 804 out_nosg: 805 if (diff_dst) 806 testmgr_free_buf(xoutbuf); 807 out_nooutbuf: 808 testmgr_free_buf(axbuf); 809 out_noaxbuf: 810 testmgr_free_buf(xbuf); 811 out_noxbuf: 812 kfree(key); 813 kfree(iv); 814 return ret; 815 } 816 817 static int test_aead(struct crypto_aead *tfm, int enc, 818 struct aead_testvec *template, unsigned int tcount) 819 { 820 unsigned int alignmask; 821 int ret; 822 823 /* test 'dst == src' case */ 824 ret = __test_aead(tfm, enc, template, tcount, false, 0); 825 if (ret) 826 return ret; 827 828 /* test 'dst != src' case */ 829 ret = __test_aead(tfm, enc, template, tcount, true, 0); 830 if (ret) 831 return ret; 832 833 /* test unaligned buffers, check with one byte offset */ 834 ret = __test_aead(tfm, enc, template, tcount, true, 1); 835 if (ret) 836 return ret; 837 838 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 839 if (alignmask) { 840 /* Check if alignment mask for tfm is correctly set. */ 841 ret = __test_aead(tfm, enc, template, tcount, true, 842 alignmask + 1); 843 if (ret) 844 return ret; 845 } 846 847 return 0; 848 } 849 850 static int test_cipher(struct crypto_cipher *tfm, int enc, 851 struct cipher_testvec *template, unsigned int tcount) 852 { 853 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 854 unsigned int i, j, k; 855 char *q; 856 const char *e; 857 void *data; 858 char *xbuf[XBUFSIZE]; 859 int ret = -ENOMEM; 860 861 if (testmgr_alloc_buf(xbuf)) 862 goto out_nobuf; 863 864 if (enc == ENCRYPT) 865 e = "encryption"; 866 else 867 e = "decryption"; 868 869 j = 0; 870 for (i = 0; i < tcount; i++) { 871 if (template[i].np) 872 continue; 873 874 j++; 875 876 ret = -EINVAL; 877 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 878 goto out; 879 880 data = xbuf[0]; 881 memcpy(data, template[i].input, template[i].ilen); 882 883 crypto_cipher_clear_flags(tfm, ~0); 884 if (template[i].wk) 885 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 886 887 ret = crypto_cipher_setkey(tfm, template[i].key, 888 template[i].klen); 889 if (!ret == template[i].fail) { 890 printk(KERN_ERR "alg: cipher: setkey failed " 891 "on test %d for %s: flags=%x\n", j, 892 algo, crypto_cipher_get_flags(tfm)); 893 goto out; 894 } else if (ret) 895 continue; 896 897 for (k = 0; k < template[i].ilen; 898 k += crypto_cipher_blocksize(tfm)) { 899 if (enc) 900 crypto_cipher_encrypt_one(tfm, data + k, 901 data + k); 902 else 903 crypto_cipher_decrypt_one(tfm, data + k, 904 data + k); 905 } 906 907 q = data; 908 if (memcmp(q, template[i].result, template[i].rlen)) { 909 printk(KERN_ERR "alg: cipher: Test %d failed " 910 "on %s for %s\n", j, e, algo); 911 hexdump(q, template[i].rlen); 912 ret = -EINVAL; 913 goto out; 914 } 915 } 916 917 ret = 0; 918 919 out: 920 testmgr_free_buf(xbuf); 921 out_nobuf: 922 return ret; 923 } 924 925 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 926 struct cipher_testvec *template, unsigned int tcount, 927 const bool diff_dst, const int align_offset) 928 { 929 const char *algo = 930 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 931 unsigned int i, j, k, n, temp; 932 char *q; 933 struct skcipher_request *req; 934 struct scatterlist sg[8]; 935 struct scatterlist sgout[8]; 936 const char *e, *d; 937 struct tcrypt_result result; 938 void *data; 939 char iv[MAX_IVLEN]; 940 char *xbuf[XBUFSIZE]; 941 char *xoutbuf[XBUFSIZE]; 942 int ret = -ENOMEM; 943 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 944 945 if (testmgr_alloc_buf(xbuf)) 946 goto out_nobuf; 947 948 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 949 goto out_nooutbuf; 950 951 if (diff_dst) 952 d = "-ddst"; 953 else 954 d = ""; 955 956 if (enc == ENCRYPT) 957 e = "encryption"; 958 else 959 e = "decryption"; 960 961 init_completion(&result.completion); 962 963 req = skcipher_request_alloc(tfm, GFP_KERNEL); 964 if (!req) { 965 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 966 d, algo); 967 goto out; 968 } 969 970 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 971 tcrypt_complete, &result); 972 973 j = 0; 974 for (i = 0; i < tcount; i++) { 975 if (template[i].np && !template[i].also_non_np) 976 continue; 977 978 if (template[i].iv) 979 memcpy(iv, template[i].iv, ivsize); 980 else 981 memset(iv, 0, MAX_IVLEN); 982 983 j++; 984 ret = -EINVAL; 985 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 986 goto out; 987 988 data = xbuf[0]; 989 data += align_offset; 990 memcpy(data, template[i].input, template[i].ilen); 991 992 crypto_skcipher_clear_flags(tfm, ~0); 993 if (template[i].wk) 994 crypto_skcipher_set_flags(tfm, 995 CRYPTO_TFM_REQ_WEAK_KEY); 996 997 ret = crypto_skcipher_setkey(tfm, template[i].key, 998 template[i].klen); 999 if (!ret == template[i].fail) { 1000 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1001 d, j, algo, crypto_skcipher_get_flags(tfm)); 1002 goto out; 1003 } else if (ret) 1004 continue; 1005 1006 sg_init_one(&sg[0], data, template[i].ilen); 1007 if (diff_dst) { 1008 data = xoutbuf[0]; 1009 data += align_offset; 1010 sg_init_one(&sgout[0], data, template[i].ilen); 1011 } 1012 1013 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1014 template[i].ilen, iv); 1015 ret = enc ? crypto_skcipher_encrypt(req) : 1016 crypto_skcipher_decrypt(req); 1017 1018 switch (ret) { 1019 case 0: 1020 break; 1021 case -EINPROGRESS: 1022 case -EBUSY: 1023 wait_for_completion(&result.completion); 1024 reinit_completion(&result.completion); 1025 ret = result.err; 1026 if (!ret) 1027 break; 1028 /* fall through */ 1029 default: 1030 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1031 d, e, j, algo, -ret); 1032 goto out; 1033 } 1034 1035 q = data; 1036 if (memcmp(q, template[i].result, template[i].rlen)) { 1037 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1038 d, j, e, algo); 1039 hexdump(q, template[i].rlen); 1040 ret = -EINVAL; 1041 goto out; 1042 } 1043 1044 if (template[i].iv_out && 1045 memcmp(iv, template[i].iv_out, 1046 crypto_skcipher_ivsize(tfm))) { 1047 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1048 d, j, e, algo); 1049 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1050 ret = -EINVAL; 1051 goto out; 1052 } 1053 } 1054 1055 j = 0; 1056 for (i = 0; i < tcount; i++) { 1057 /* alignment tests are only done with continuous buffers */ 1058 if (align_offset != 0) 1059 break; 1060 1061 if (!template[i].np) 1062 continue; 1063 1064 if (template[i].iv) 1065 memcpy(iv, template[i].iv, ivsize); 1066 else 1067 memset(iv, 0, MAX_IVLEN); 1068 1069 j++; 1070 crypto_skcipher_clear_flags(tfm, ~0); 1071 if (template[i].wk) 1072 crypto_skcipher_set_flags(tfm, 1073 CRYPTO_TFM_REQ_WEAK_KEY); 1074 1075 ret = crypto_skcipher_setkey(tfm, template[i].key, 1076 template[i].klen); 1077 if (!ret == template[i].fail) { 1078 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1079 d, j, algo, crypto_skcipher_get_flags(tfm)); 1080 goto out; 1081 } else if (ret) 1082 continue; 1083 1084 temp = 0; 1085 ret = -EINVAL; 1086 sg_init_table(sg, template[i].np); 1087 if (diff_dst) 1088 sg_init_table(sgout, template[i].np); 1089 for (k = 0; k < template[i].np; k++) { 1090 if (WARN_ON(offset_in_page(IDX[k]) + 1091 template[i].tap[k] > PAGE_SIZE)) 1092 goto out; 1093 1094 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1095 1096 memcpy(q, template[i].input + temp, template[i].tap[k]); 1097 1098 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1099 q[template[i].tap[k]] = 0; 1100 1101 sg_set_buf(&sg[k], q, template[i].tap[k]); 1102 if (diff_dst) { 1103 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1104 offset_in_page(IDX[k]); 1105 1106 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1107 1108 memset(q, 0, template[i].tap[k]); 1109 if (offset_in_page(q) + 1110 template[i].tap[k] < PAGE_SIZE) 1111 q[template[i].tap[k]] = 0; 1112 } 1113 1114 temp += template[i].tap[k]; 1115 } 1116 1117 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1118 template[i].ilen, iv); 1119 1120 ret = enc ? crypto_skcipher_encrypt(req) : 1121 crypto_skcipher_decrypt(req); 1122 1123 switch (ret) { 1124 case 0: 1125 break; 1126 case -EINPROGRESS: 1127 case -EBUSY: 1128 wait_for_completion(&result.completion); 1129 reinit_completion(&result.completion); 1130 ret = result.err; 1131 if (!ret) 1132 break; 1133 /* fall through */ 1134 default: 1135 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1136 d, e, j, algo, -ret); 1137 goto out; 1138 } 1139 1140 temp = 0; 1141 ret = -EINVAL; 1142 for (k = 0; k < template[i].np; k++) { 1143 if (diff_dst) 1144 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1145 offset_in_page(IDX[k]); 1146 else 1147 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1148 offset_in_page(IDX[k]); 1149 1150 if (memcmp(q, template[i].result + temp, 1151 template[i].tap[k])) { 1152 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1153 d, j, e, k, algo); 1154 hexdump(q, template[i].tap[k]); 1155 goto out; 1156 } 1157 1158 q += template[i].tap[k]; 1159 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1160 ; 1161 if (n) { 1162 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1163 d, j, e, k, algo, n); 1164 hexdump(q, n); 1165 goto out; 1166 } 1167 temp += template[i].tap[k]; 1168 } 1169 } 1170 1171 ret = 0; 1172 1173 out: 1174 skcipher_request_free(req); 1175 if (diff_dst) 1176 testmgr_free_buf(xoutbuf); 1177 out_nooutbuf: 1178 testmgr_free_buf(xbuf); 1179 out_nobuf: 1180 return ret; 1181 } 1182 1183 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1184 struct cipher_testvec *template, unsigned int tcount) 1185 { 1186 unsigned int alignmask; 1187 int ret; 1188 1189 /* test 'dst == src' case */ 1190 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1191 if (ret) 1192 return ret; 1193 1194 /* test 'dst != src' case */ 1195 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1196 if (ret) 1197 return ret; 1198 1199 /* test unaligned buffers, check with one byte offset */ 1200 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1201 if (ret) 1202 return ret; 1203 1204 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1205 if (alignmask) { 1206 /* Check if alignment mask for tfm is correctly set. */ 1207 ret = __test_skcipher(tfm, enc, template, tcount, true, 1208 alignmask + 1); 1209 if (ret) 1210 return ret; 1211 } 1212 1213 return 0; 1214 } 1215 1216 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1217 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1218 { 1219 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1220 unsigned int i; 1221 char result[COMP_BUF_SIZE]; 1222 int ret; 1223 1224 for (i = 0; i < ctcount; i++) { 1225 int ilen; 1226 unsigned int dlen = COMP_BUF_SIZE; 1227 1228 memset(result, 0, sizeof (result)); 1229 1230 ilen = ctemplate[i].inlen; 1231 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1232 ilen, result, &dlen); 1233 if (ret) { 1234 printk(KERN_ERR "alg: comp: compression failed " 1235 "on test %d for %s: ret=%d\n", i + 1, algo, 1236 -ret); 1237 goto out; 1238 } 1239 1240 if (dlen != ctemplate[i].outlen) { 1241 printk(KERN_ERR "alg: comp: Compression test %d " 1242 "failed for %s: output len = %d\n", i + 1, algo, 1243 dlen); 1244 ret = -EINVAL; 1245 goto out; 1246 } 1247 1248 if (memcmp(result, ctemplate[i].output, dlen)) { 1249 printk(KERN_ERR "alg: comp: Compression test %d " 1250 "failed for %s\n", i + 1, algo); 1251 hexdump(result, dlen); 1252 ret = -EINVAL; 1253 goto out; 1254 } 1255 } 1256 1257 for (i = 0; i < dtcount; i++) { 1258 int ilen; 1259 unsigned int dlen = COMP_BUF_SIZE; 1260 1261 memset(result, 0, sizeof (result)); 1262 1263 ilen = dtemplate[i].inlen; 1264 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1265 ilen, result, &dlen); 1266 if (ret) { 1267 printk(KERN_ERR "alg: comp: decompression failed " 1268 "on test %d for %s: ret=%d\n", i + 1, algo, 1269 -ret); 1270 goto out; 1271 } 1272 1273 if (dlen != dtemplate[i].outlen) { 1274 printk(KERN_ERR "alg: comp: Decompression test %d " 1275 "failed for %s: output len = %d\n", i + 1, algo, 1276 dlen); 1277 ret = -EINVAL; 1278 goto out; 1279 } 1280 1281 if (memcmp(result, dtemplate[i].output, dlen)) { 1282 printk(KERN_ERR "alg: comp: Decompression test %d " 1283 "failed for %s\n", i + 1, algo); 1284 hexdump(result, dlen); 1285 ret = -EINVAL; 1286 goto out; 1287 } 1288 } 1289 1290 ret = 0; 1291 1292 out: 1293 return ret; 1294 } 1295 1296 static int test_pcomp(struct crypto_pcomp *tfm, 1297 struct pcomp_testvec *ctemplate, 1298 struct pcomp_testvec *dtemplate, int ctcount, 1299 int dtcount) 1300 { 1301 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1302 unsigned int i; 1303 char result[COMP_BUF_SIZE]; 1304 int res; 1305 1306 for (i = 0; i < ctcount; i++) { 1307 struct comp_request req; 1308 unsigned int produced = 0; 1309 1310 res = crypto_compress_setup(tfm, ctemplate[i].params, 1311 ctemplate[i].paramsize); 1312 if (res) { 1313 pr_err("alg: pcomp: compression setup failed on test " 1314 "%d for %s: error=%d\n", i + 1, algo, res); 1315 return res; 1316 } 1317 1318 res = crypto_compress_init(tfm); 1319 if (res) { 1320 pr_err("alg: pcomp: compression init failed on test " 1321 "%d for %s: error=%d\n", i + 1, algo, res); 1322 return res; 1323 } 1324 1325 memset(result, 0, sizeof(result)); 1326 1327 req.next_in = ctemplate[i].input; 1328 req.avail_in = ctemplate[i].inlen / 2; 1329 req.next_out = result; 1330 req.avail_out = ctemplate[i].outlen / 2; 1331 1332 res = crypto_compress_update(tfm, &req); 1333 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1334 pr_err("alg: pcomp: compression update failed on test " 1335 "%d for %s: error=%d\n", i + 1, algo, res); 1336 return res; 1337 } 1338 if (res > 0) 1339 produced += res; 1340 1341 /* Add remaining input data */ 1342 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1343 1344 res = crypto_compress_update(tfm, &req); 1345 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1346 pr_err("alg: pcomp: compression update failed on test " 1347 "%d for %s: error=%d\n", i + 1, algo, res); 1348 return res; 1349 } 1350 if (res > 0) 1351 produced += res; 1352 1353 /* Provide remaining output space */ 1354 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1355 1356 res = crypto_compress_final(tfm, &req); 1357 if (res < 0) { 1358 pr_err("alg: pcomp: compression final failed on test " 1359 "%d for %s: error=%d\n", i + 1, algo, res); 1360 return res; 1361 } 1362 produced += res; 1363 1364 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1365 pr_err("alg: comp: Compression test %d failed for %s: " 1366 "output len = %d (expected %d)\n", i + 1, algo, 1367 COMP_BUF_SIZE - req.avail_out, 1368 ctemplate[i].outlen); 1369 return -EINVAL; 1370 } 1371 1372 if (produced != ctemplate[i].outlen) { 1373 pr_err("alg: comp: Compression test %d failed for %s: " 1374 "returned len = %u (expected %d)\n", i + 1, 1375 algo, produced, ctemplate[i].outlen); 1376 return -EINVAL; 1377 } 1378 1379 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1380 pr_err("alg: pcomp: Compression test %d failed for " 1381 "%s\n", i + 1, algo); 1382 hexdump(result, ctemplate[i].outlen); 1383 return -EINVAL; 1384 } 1385 } 1386 1387 for (i = 0; i < dtcount; i++) { 1388 struct comp_request req; 1389 unsigned int produced = 0; 1390 1391 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1392 dtemplate[i].paramsize); 1393 if (res) { 1394 pr_err("alg: pcomp: decompression setup failed on " 1395 "test %d for %s: error=%d\n", i + 1, algo, res); 1396 return res; 1397 } 1398 1399 res = crypto_decompress_init(tfm); 1400 if (res) { 1401 pr_err("alg: pcomp: decompression init failed on test " 1402 "%d for %s: error=%d\n", i + 1, algo, res); 1403 return res; 1404 } 1405 1406 memset(result, 0, sizeof(result)); 1407 1408 req.next_in = dtemplate[i].input; 1409 req.avail_in = dtemplate[i].inlen / 2; 1410 req.next_out = result; 1411 req.avail_out = dtemplate[i].outlen / 2; 1412 1413 res = crypto_decompress_update(tfm, &req); 1414 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1415 pr_err("alg: pcomp: decompression update failed on " 1416 "test %d for %s: error=%d\n", i + 1, algo, res); 1417 return res; 1418 } 1419 if (res > 0) 1420 produced += res; 1421 1422 /* Add remaining input data */ 1423 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1424 1425 res = crypto_decompress_update(tfm, &req); 1426 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1427 pr_err("alg: pcomp: decompression update failed on " 1428 "test %d for %s: error=%d\n", i + 1, algo, res); 1429 return res; 1430 } 1431 if (res > 0) 1432 produced += res; 1433 1434 /* Provide remaining output space */ 1435 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1436 1437 res = crypto_decompress_final(tfm, &req); 1438 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1439 pr_err("alg: pcomp: decompression final failed on " 1440 "test %d for %s: error=%d\n", i + 1, algo, res); 1441 return res; 1442 } 1443 if (res > 0) 1444 produced += res; 1445 1446 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1447 pr_err("alg: comp: Decompression test %d failed for " 1448 "%s: output len = %d (expected %d)\n", i + 1, 1449 algo, COMP_BUF_SIZE - req.avail_out, 1450 dtemplate[i].outlen); 1451 return -EINVAL; 1452 } 1453 1454 if (produced != dtemplate[i].outlen) { 1455 pr_err("alg: comp: Decompression test %d failed for " 1456 "%s: returned len = %u (expected %d)\n", i + 1, 1457 algo, produced, dtemplate[i].outlen); 1458 return -EINVAL; 1459 } 1460 1461 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1462 pr_err("alg: pcomp: Decompression test %d failed for " 1463 "%s\n", i + 1, algo); 1464 hexdump(result, dtemplate[i].outlen); 1465 return -EINVAL; 1466 } 1467 } 1468 1469 return 0; 1470 } 1471 1472 1473 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1474 unsigned int tcount) 1475 { 1476 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1477 int err = 0, i, j, seedsize; 1478 u8 *seed; 1479 char result[32]; 1480 1481 seedsize = crypto_rng_seedsize(tfm); 1482 1483 seed = kmalloc(seedsize, GFP_KERNEL); 1484 if (!seed) { 1485 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1486 "for %s\n", algo); 1487 return -ENOMEM; 1488 } 1489 1490 for (i = 0; i < tcount; i++) { 1491 memset(result, 0, 32); 1492 1493 memcpy(seed, template[i].v, template[i].vlen); 1494 memcpy(seed + template[i].vlen, template[i].key, 1495 template[i].klen); 1496 memcpy(seed + template[i].vlen + template[i].klen, 1497 template[i].dt, template[i].dtlen); 1498 1499 err = crypto_rng_reset(tfm, seed, seedsize); 1500 if (err) { 1501 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1502 "for %s\n", algo); 1503 goto out; 1504 } 1505 1506 for (j = 0; j < template[i].loops; j++) { 1507 err = crypto_rng_get_bytes(tfm, result, 1508 template[i].rlen); 1509 if (err < 0) { 1510 printk(KERN_ERR "alg: cprng: Failed to obtain " 1511 "the correct amount of random data for " 1512 "%s (requested %d)\n", algo, 1513 template[i].rlen); 1514 goto out; 1515 } 1516 } 1517 1518 err = memcmp(result, template[i].result, 1519 template[i].rlen); 1520 if (err) { 1521 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1522 i, algo); 1523 hexdump(result, template[i].rlen); 1524 err = -EINVAL; 1525 goto out; 1526 } 1527 } 1528 1529 out: 1530 kfree(seed); 1531 return err; 1532 } 1533 1534 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1535 u32 type, u32 mask) 1536 { 1537 struct crypto_aead *tfm; 1538 int err = 0; 1539 1540 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask); 1541 if (IS_ERR(tfm)) { 1542 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1543 "%ld\n", driver, PTR_ERR(tfm)); 1544 return PTR_ERR(tfm); 1545 } 1546 1547 if (desc->suite.aead.enc.vecs) { 1548 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1549 desc->suite.aead.enc.count); 1550 if (err) 1551 goto out; 1552 } 1553 1554 if (!err && desc->suite.aead.dec.vecs) 1555 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1556 desc->suite.aead.dec.count); 1557 1558 out: 1559 crypto_free_aead(tfm); 1560 return err; 1561 } 1562 1563 static int alg_test_cipher(const struct alg_test_desc *desc, 1564 const char *driver, u32 type, u32 mask) 1565 { 1566 struct crypto_cipher *tfm; 1567 int err = 0; 1568 1569 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1570 if (IS_ERR(tfm)) { 1571 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1572 "%s: %ld\n", driver, PTR_ERR(tfm)); 1573 return PTR_ERR(tfm); 1574 } 1575 1576 if (desc->suite.cipher.enc.vecs) { 1577 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1578 desc->suite.cipher.enc.count); 1579 if (err) 1580 goto out; 1581 } 1582 1583 if (desc->suite.cipher.dec.vecs) 1584 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1585 desc->suite.cipher.dec.count); 1586 1587 out: 1588 crypto_free_cipher(tfm); 1589 return err; 1590 } 1591 1592 static int alg_test_skcipher(const struct alg_test_desc *desc, 1593 const char *driver, u32 type, u32 mask) 1594 { 1595 struct crypto_skcipher *tfm; 1596 int err = 0; 1597 1598 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1599 if (IS_ERR(tfm)) { 1600 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1601 "%s: %ld\n", driver, PTR_ERR(tfm)); 1602 return PTR_ERR(tfm); 1603 } 1604 1605 if (desc->suite.cipher.enc.vecs) { 1606 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1607 desc->suite.cipher.enc.count); 1608 if (err) 1609 goto out; 1610 } 1611 1612 if (desc->suite.cipher.dec.vecs) 1613 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1614 desc->suite.cipher.dec.count); 1615 1616 out: 1617 crypto_free_skcipher(tfm); 1618 return err; 1619 } 1620 1621 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1622 u32 type, u32 mask) 1623 { 1624 struct crypto_comp *tfm; 1625 int err; 1626 1627 tfm = crypto_alloc_comp(driver, type, mask); 1628 if (IS_ERR(tfm)) { 1629 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1630 "%ld\n", driver, PTR_ERR(tfm)); 1631 return PTR_ERR(tfm); 1632 } 1633 1634 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1635 desc->suite.comp.decomp.vecs, 1636 desc->suite.comp.comp.count, 1637 desc->suite.comp.decomp.count); 1638 1639 crypto_free_comp(tfm); 1640 return err; 1641 } 1642 1643 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1644 u32 type, u32 mask) 1645 { 1646 struct crypto_pcomp *tfm; 1647 int err; 1648 1649 tfm = crypto_alloc_pcomp(driver, type, mask); 1650 if (IS_ERR(tfm)) { 1651 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1652 driver, PTR_ERR(tfm)); 1653 return PTR_ERR(tfm); 1654 } 1655 1656 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1657 desc->suite.pcomp.decomp.vecs, 1658 desc->suite.pcomp.comp.count, 1659 desc->suite.pcomp.decomp.count); 1660 1661 crypto_free_pcomp(tfm); 1662 return err; 1663 } 1664 1665 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1666 u32 type, u32 mask) 1667 { 1668 struct crypto_ahash *tfm; 1669 int err; 1670 1671 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1672 if (IS_ERR(tfm)) { 1673 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1674 "%ld\n", driver, PTR_ERR(tfm)); 1675 return PTR_ERR(tfm); 1676 } 1677 1678 err = test_hash(tfm, desc->suite.hash.vecs, 1679 desc->suite.hash.count, true); 1680 if (!err) 1681 err = test_hash(tfm, desc->suite.hash.vecs, 1682 desc->suite.hash.count, false); 1683 1684 crypto_free_ahash(tfm); 1685 return err; 1686 } 1687 1688 static int alg_test_crc32c(const struct alg_test_desc *desc, 1689 const char *driver, u32 type, u32 mask) 1690 { 1691 struct crypto_shash *tfm; 1692 u32 val; 1693 int err; 1694 1695 err = alg_test_hash(desc, driver, type, mask); 1696 if (err) 1697 goto out; 1698 1699 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1700 if (IS_ERR(tfm)) { 1701 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1702 "%ld\n", driver, PTR_ERR(tfm)); 1703 err = PTR_ERR(tfm); 1704 goto out; 1705 } 1706 1707 do { 1708 SHASH_DESC_ON_STACK(shash, tfm); 1709 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1710 1711 shash->tfm = tfm; 1712 shash->flags = 0; 1713 1714 *ctx = le32_to_cpu(420553207); 1715 err = crypto_shash_final(shash, (u8 *)&val); 1716 if (err) { 1717 printk(KERN_ERR "alg: crc32c: Operation failed for " 1718 "%s: %d\n", driver, err); 1719 break; 1720 } 1721 1722 if (val != ~420553207) { 1723 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1724 "%d\n", driver, val); 1725 err = -EINVAL; 1726 } 1727 } while (0); 1728 1729 crypto_free_shash(tfm); 1730 1731 out: 1732 return err; 1733 } 1734 1735 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1736 u32 type, u32 mask) 1737 { 1738 struct crypto_rng *rng; 1739 int err; 1740 1741 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1742 if (IS_ERR(rng)) { 1743 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1744 "%ld\n", driver, PTR_ERR(rng)); 1745 return PTR_ERR(rng); 1746 } 1747 1748 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1749 1750 crypto_free_rng(rng); 1751 1752 return err; 1753 } 1754 1755 1756 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1757 const char *driver, u32 type, u32 mask) 1758 { 1759 int ret = -EAGAIN; 1760 struct crypto_rng *drng; 1761 struct drbg_test_data test_data; 1762 struct drbg_string addtl, pers, testentropy; 1763 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1764 1765 if (!buf) 1766 return -ENOMEM; 1767 1768 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1769 if (IS_ERR(drng)) { 1770 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1771 "%s\n", driver); 1772 kzfree(buf); 1773 return -ENOMEM; 1774 } 1775 1776 test_data.testentropy = &testentropy; 1777 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1778 drbg_string_fill(&pers, test->pers, test->perslen); 1779 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1780 if (ret) { 1781 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1782 goto outbuf; 1783 } 1784 1785 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1786 if (pr) { 1787 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1788 ret = crypto_drbg_get_bytes_addtl_test(drng, 1789 buf, test->expectedlen, &addtl, &test_data); 1790 } else { 1791 ret = crypto_drbg_get_bytes_addtl(drng, 1792 buf, test->expectedlen, &addtl); 1793 } 1794 if (ret < 0) { 1795 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1796 "driver %s\n", driver); 1797 goto outbuf; 1798 } 1799 1800 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1801 if (pr) { 1802 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1803 ret = crypto_drbg_get_bytes_addtl_test(drng, 1804 buf, test->expectedlen, &addtl, &test_data); 1805 } else { 1806 ret = crypto_drbg_get_bytes_addtl(drng, 1807 buf, test->expectedlen, &addtl); 1808 } 1809 if (ret < 0) { 1810 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1811 "driver %s\n", driver); 1812 goto outbuf; 1813 } 1814 1815 ret = memcmp(test->expected, buf, test->expectedlen); 1816 1817 outbuf: 1818 crypto_free_rng(drng); 1819 kzfree(buf); 1820 return ret; 1821 } 1822 1823 1824 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1825 u32 type, u32 mask) 1826 { 1827 int err = 0; 1828 int pr = 0; 1829 int i = 0; 1830 struct drbg_testvec *template = desc->suite.drbg.vecs; 1831 unsigned int tcount = desc->suite.drbg.count; 1832 1833 if (0 == memcmp(driver, "drbg_pr_", 8)) 1834 pr = 1; 1835 1836 for (i = 0; i < tcount; i++) { 1837 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1838 if (err) { 1839 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1840 i, driver); 1841 err = -EINVAL; 1842 break; 1843 } 1844 } 1845 return err; 1846 1847 } 1848 1849 static int do_test_rsa(struct crypto_akcipher *tfm, 1850 struct akcipher_testvec *vecs) 1851 { 1852 struct akcipher_request *req; 1853 void *outbuf_enc = NULL; 1854 void *outbuf_dec = NULL; 1855 struct tcrypt_result result; 1856 unsigned int out_len_max, out_len = 0; 1857 int err = -ENOMEM; 1858 struct scatterlist src, dst, src_tab[2]; 1859 1860 req = akcipher_request_alloc(tfm, GFP_KERNEL); 1861 if (!req) 1862 return err; 1863 1864 init_completion(&result.completion); 1865 1866 if (vecs->public_key_vec) 1867 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 1868 vecs->key_len); 1869 else 1870 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 1871 vecs->key_len); 1872 if (err) 1873 goto free_req; 1874 1875 out_len_max = crypto_akcipher_maxsize(tfm); 1876 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 1877 if (!outbuf_enc) 1878 goto free_req; 1879 1880 sg_init_table(src_tab, 2); 1881 sg_set_buf(&src_tab[0], vecs->m, 8); 1882 sg_set_buf(&src_tab[1], vecs->m + 8, vecs->m_size - 8); 1883 sg_init_one(&dst, outbuf_enc, out_len_max); 1884 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 1885 out_len_max); 1886 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1887 tcrypt_complete, &result); 1888 1889 /* Run RSA encrypt - c = m^e mod n;*/ 1890 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 1891 if (err) { 1892 pr_err("alg: rsa: encrypt test failed. err %d\n", err); 1893 goto free_all; 1894 } 1895 if (req->dst_len != vecs->c_size) { 1896 pr_err("alg: rsa: encrypt test failed. Invalid output len\n"); 1897 err = -EINVAL; 1898 goto free_all; 1899 } 1900 /* verify that encrypted message is equal to expected */ 1901 if (memcmp(vecs->c, sg_virt(req->dst), vecs->c_size)) { 1902 pr_err("alg: rsa: encrypt test failed. Invalid output\n"); 1903 err = -EINVAL; 1904 goto free_all; 1905 } 1906 /* Don't invoke decrypt for vectors with public key */ 1907 if (vecs->public_key_vec) { 1908 err = 0; 1909 goto free_all; 1910 } 1911 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 1912 if (!outbuf_dec) { 1913 err = -ENOMEM; 1914 goto free_all; 1915 } 1916 sg_init_one(&src, vecs->c, vecs->c_size); 1917 sg_init_one(&dst, outbuf_dec, out_len_max); 1918 init_completion(&result.completion); 1919 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 1920 1921 /* Run RSA decrypt - m = c^d mod n;*/ 1922 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 1923 if (err) { 1924 pr_err("alg: rsa: decrypt test failed. err %d\n", err); 1925 goto free_all; 1926 } 1927 out_len = req->dst_len; 1928 if (out_len != vecs->m_size) { 1929 pr_err("alg: rsa: decrypt test failed. Invalid output len\n"); 1930 err = -EINVAL; 1931 goto free_all; 1932 } 1933 /* verify that decrypted message is equal to the original msg */ 1934 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) { 1935 pr_err("alg: rsa: decrypt test failed. Invalid output\n"); 1936 err = -EINVAL; 1937 } 1938 free_all: 1939 kfree(outbuf_dec); 1940 kfree(outbuf_enc); 1941 free_req: 1942 akcipher_request_free(req); 1943 return err; 1944 } 1945 1946 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs, 1947 unsigned int tcount) 1948 { 1949 int ret, i; 1950 1951 for (i = 0; i < tcount; i++) { 1952 ret = do_test_rsa(tfm, vecs++); 1953 if (ret) { 1954 pr_err("alg: rsa: test failed on vector %d, err=%d\n", 1955 i + 1, ret); 1956 return ret; 1957 } 1958 } 1959 return 0; 1960 } 1961 1962 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 1963 struct akcipher_testvec *vecs, unsigned int tcount) 1964 { 1965 if (strncmp(alg, "rsa", 3) == 0) 1966 return test_rsa(tfm, vecs, tcount); 1967 1968 return 0; 1969 } 1970 1971 static int alg_test_akcipher(const struct alg_test_desc *desc, 1972 const char *driver, u32 type, u32 mask) 1973 { 1974 struct crypto_akcipher *tfm; 1975 int err = 0; 1976 1977 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1978 if (IS_ERR(tfm)) { 1979 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 1980 driver, PTR_ERR(tfm)); 1981 return PTR_ERR(tfm); 1982 } 1983 if (desc->suite.akcipher.vecs) 1984 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 1985 desc->suite.akcipher.count); 1986 1987 crypto_free_akcipher(tfm); 1988 return err; 1989 } 1990 1991 static int alg_test_null(const struct alg_test_desc *desc, 1992 const char *driver, u32 type, u32 mask) 1993 { 1994 return 0; 1995 } 1996 1997 /* Please keep this list sorted by algorithm name. */ 1998 static const struct alg_test_desc alg_test_descs[] = { 1999 { 2000 .alg = "__cbc-cast5-avx", 2001 .test = alg_test_null, 2002 }, { 2003 .alg = "__cbc-cast6-avx", 2004 .test = alg_test_null, 2005 }, { 2006 .alg = "__cbc-serpent-avx", 2007 .test = alg_test_null, 2008 }, { 2009 .alg = "__cbc-serpent-avx2", 2010 .test = alg_test_null, 2011 }, { 2012 .alg = "__cbc-serpent-sse2", 2013 .test = alg_test_null, 2014 }, { 2015 .alg = "__cbc-twofish-avx", 2016 .test = alg_test_null, 2017 }, { 2018 .alg = "__driver-cbc-aes-aesni", 2019 .test = alg_test_null, 2020 .fips_allowed = 1, 2021 }, { 2022 .alg = "__driver-cbc-camellia-aesni", 2023 .test = alg_test_null, 2024 }, { 2025 .alg = "__driver-cbc-camellia-aesni-avx2", 2026 .test = alg_test_null, 2027 }, { 2028 .alg = "__driver-cbc-cast5-avx", 2029 .test = alg_test_null, 2030 }, { 2031 .alg = "__driver-cbc-cast6-avx", 2032 .test = alg_test_null, 2033 }, { 2034 .alg = "__driver-cbc-serpent-avx", 2035 .test = alg_test_null, 2036 }, { 2037 .alg = "__driver-cbc-serpent-avx2", 2038 .test = alg_test_null, 2039 }, { 2040 .alg = "__driver-cbc-serpent-sse2", 2041 .test = alg_test_null, 2042 }, { 2043 .alg = "__driver-cbc-twofish-avx", 2044 .test = alg_test_null, 2045 }, { 2046 .alg = "__driver-ecb-aes-aesni", 2047 .test = alg_test_null, 2048 .fips_allowed = 1, 2049 }, { 2050 .alg = "__driver-ecb-camellia-aesni", 2051 .test = alg_test_null, 2052 }, { 2053 .alg = "__driver-ecb-camellia-aesni-avx2", 2054 .test = alg_test_null, 2055 }, { 2056 .alg = "__driver-ecb-cast5-avx", 2057 .test = alg_test_null, 2058 }, { 2059 .alg = "__driver-ecb-cast6-avx", 2060 .test = alg_test_null, 2061 }, { 2062 .alg = "__driver-ecb-serpent-avx", 2063 .test = alg_test_null, 2064 }, { 2065 .alg = "__driver-ecb-serpent-avx2", 2066 .test = alg_test_null, 2067 }, { 2068 .alg = "__driver-ecb-serpent-sse2", 2069 .test = alg_test_null, 2070 }, { 2071 .alg = "__driver-ecb-twofish-avx", 2072 .test = alg_test_null, 2073 }, { 2074 .alg = "__driver-gcm-aes-aesni", 2075 .test = alg_test_null, 2076 .fips_allowed = 1, 2077 }, { 2078 .alg = "__ghash-pclmulqdqni", 2079 .test = alg_test_null, 2080 .fips_allowed = 1, 2081 }, { 2082 .alg = "ansi_cprng", 2083 .test = alg_test_cprng, 2084 .suite = { 2085 .cprng = { 2086 .vecs = ansi_cprng_aes_tv_template, 2087 .count = ANSI_CPRNG_AES_TEST_VECTORS 2088 } 2089 } 2090 }, { 2091 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2092 .test = alg_test_aead, 2093 .suite = { 2094 .aead = { 2095 .enc = { 2096 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2097 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2098 }, 2099 .dec = { 2100 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2101 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2102 } 2103 } 2104 } 2105 }, { 2106 .alg = "authenc(hmac(sha1),cbc(aes))", 2107 .test = alg_test_aead, 2108 .suite = { 2109 .aead = { 2110 .enc = { 2111 .vecs = 2112 hmac_sha1_aes_cbc_enc_tv_temp, 2113 .count = 2114 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2115 } 2116 } 2117 } 2118 }, { 2119 .alg = "authenc(hmac(sha1),cbc(des))", 2120 .test = alg_test_aead, 2121 .suite = { 2122 .aead = { 2123 .enc = { 2124 .vecs = 2125 hmac_sha1_des_cbc_enc_tv_temp, 2126 .count = 2127 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2128 } 2129 } 2130 } 2131 }, { 2132 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2133 .test = alg_test_aead, 2134 .suite = { 2135 .aead = { 2136 .enc = { 2137 .vecs = 2138 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2139 .count = 2140 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2141 } 2142 } 2143 } 2144 }, { 2145 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2146 .test = alg_test_aead, 2147 .suite = { 2148 .aead = { 2149 .enc = { 2150 .vecs = 2151 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2152 .count = 2153 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2154 }, 2155 .dec = { 2156 .vecs = 2157 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2158 .count = 2159 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2160 } 2161 } 2162 } 2163 }, { 2164 .alg = "authenc(hmac(sha224),cbc(des))", 2165 .test = alg_test_aead, 2166 .suite = { 2167 .aead = { 2168 .enc = { 2169 .vecs = 2170 hmac_sha224_des_cbc_enc_tv_temp, 2171 .count = 2172 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2173 } 2174 } 2175 } 2176 }, { 2177 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2178 .test = alg_test_aead, 2179 .suite = { 2180 .aead = { 2181 .enc = { 2182 .vecs = 2183 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2184 .count = 2185 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2186 } 2187 } 2188 } 2189 }, { 2190 .alg = "authenc(hmac(sha256),cbc(aes))", 2191 .test = alg_test_aead, 2192 .suite = { 2193 .aead = { 2194 .enc = { 2195 .vecs = 2196 hmac_sha256_aes_cbc_enc_tv_temp, 2197 .count = 2198 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2199 } 2200 } 2201 } 2202 }, { 2203 .alg = "authenc(hmac(sha256),cbc(des))", 2204 .test = alg_test_aead, 2205 .suite = { 2206 .aead = { 2207 .enc = { 2208 .vecs = 2209 hmac_sha256_des_cbc_enc_tv_temp, 2210 .count = 2211 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2212 } 2213 } 2214 } 2215 }, { 2216 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2217 .test = alg_test_aead, 2218 .suite = { 2219 .aead = { 2220 .enc = { 2221 .vecs = 2222 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2223 .count = 2224 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2225 } 2226 } 2227 } 2228 }, { 2229 .alg = "authenc(hmac(sha384),cbc(des))", 2230 .test = alg_test_aead, 2231 .suite = { 2232 .aead = { 2233 .enc = { 2234 .vecs = 2235 hmac_sha384_des_cbc_enc_tv_temp, 2236 .count = 2237 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2238 } 2239 } 2240 } 2241 }, { 2242 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2243 .test = alg_test_aead, 2244 .suite = { 2245 .aead = { 2246 .enc = { 2247 .vecs = 2248 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2249 .count = 2250 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2251 } 2252 } 2253 } 2254 }, { 2255 .alg = "authenc(hmac(sha512),cbc(aes))", 2256 .test = alg_test_aead, 2257 .suite = { 2258 .aead = { 2259 .enc = { 2260 .vecs = 2261 hmac_sha512_aes_cbc_enc_tv_temp, 2262 .count = 2263 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2264 } 2265 } 2266 } 2267 }, { 2268 .alg = "authenc(hmac(sha512),cbc(des))", 2269 .test = alg_test_aead, 2270 .suite = { 2271 .aead = { 2272 .enc = { 2273 .vecs = 2274 hmac_sha512_des_cbc_enc_tv_temp, 2275 .count = 2276 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2277 } 2278 } 2279 } 2280 }, { 2281 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2282 .test = alg_test_aead, 2283 .suite = { 2284 .aead = { 2285 .enc = { 2286 .vecs = 2287 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2288 .count = 2289 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2290 } 2291 } 2292 } 2293 }, { 2294 .alg = "cbc(aes)", 2295 .test = alg_test_skcipher, 2296 .fips_allowed = 1, 2297 .suite = { 2298 .cipher = { 2299 .enc = { 2300 .vecs = aes_cbc_enc_tv_template, 2301 .count = AES_CBC_ENC_TEST_VECTORS 2302 }, 2303 .dec = { 2304 .vecs = aes_cbc_dec_tv_template, 2305 .count = AES_CBC_DEC_TEST_VECTORS 2306 } 2307 } 2308 } 2309 }, { 2310 .alg = "cbc(anubis)", 2311 .test = alg_test_skcipher, 2312 .suite = { 2313 .cipher = { 2314 .enc = { 2315 .vecs = anubis_cbc_enc_tv_template, 2316 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2317 }, 2318 .dec = { 2319 .vecs = anubis_cbc_dec_tv_template, 2320 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2321 } 2322 } 2323 } 2324 }, { 2325 .alg = "cbc(blowfish)", 2326 .test = alg_test_skcipher, 2327 .suite = { 2328 .cipher = { 2329 .enc = { 2330 .vecs = bf_cbc_enc_tv_template, 2331 .count = BF_CBC_ENC_TEST_VECTORS 2332 }, 2333 .dec = { 2334 .vecs = bf_cbc_dec_tv_template, 2335 .count = BF_CBC_DEC_TEST_VECTORS 2336 } 2337 } 2338 } 2339 }, { 2340 .alg = "cbc(camellia)", 2341 .test = alg_test_skcipher, 2342 .suite = { 2343 .cipher = { 2344 .enc = { 2345 .vecs = camellia_cbc_enc_tv_template, 2346 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2347 }, 2348 .dec = { 2349 .vecs = camellia_cbc_dec_tv_template, 2350 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2351 } 2352 } 2353 } 2354 }, { 2355 .alg = "cbc(cast5)", 2356 .test = alg_test_skcipher, 2357 .suite = { 2358 .cipher = { 2359 .enc = { 2360 .vecs = cast5_cbc_enc_tv_template, 2361 .count = CAST5_CBC_ENC_TEST_VECTORS 2362 }, 2363 .dec = { 2364 .vecs = cast5_cbc_dec_tv_template, 2365 .count = CAST5_CBC_DEC_TEST_VECTORS 2366 } 2367 } 2368 } 2369 }, { 2370 .alg = "cbc(cast6)", 2371 .test = alg_test_skcipher, 2372 .suite = { 2373 .cipher = { 2374 .enc = { 2375 .vecs = cast6_cbc_enc_tv_template, 2376 .count = CAST6_CBC_ENC_TEST_VECTORS 2377 }, 2378 .dec = { 2379 .vecs = cast6_cbc_dec_tv_template, 2380 .count = CAST6_CBC_DEC_TEST_VECTORS 2381 } 2382 } 2383 } 2384 }, { 2385 .alg = "cbc(des)", 2386 .test = alg_test_skcipher, 2387 .suite = { 2388 .cipher = { 2389 .enc = { 2390 .vecs = des_cbc_enc_tv_template, 2391 .count = DES_CBC_ENC_TEST_VECTORS 2392 }, 2393 .dec = { 2394 .vecs = des_cbc_dec_tv_template, 2395 .count = DES_CBC_DEC_TEST_VECTORS 2396 } 2397 } 2398 } 2399 }, { 2400 .alg = "cbc(des3_ede)", 2401 .test = alg_test_skcipher, 2402 .fips_allowed = 1, 2403 .suite = { 2404 .cipher = { 2405 .enc = { 2406 .vecs = des3_ede_cbc_enc_tv_template, 2407 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2408 }, 2409 .dec = { 2410 .vecs = des3_ede_cbc_dec_tv_template, 2411 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2412 } 2413 } 2414 } 2415 }, { 2416 .alg = "cbc(serpent)", 2417 .test = alg_test_skcipher, 2418 .suite = { 2419 .cipher = { 2420 .enc = { 2421 .vecs = serpent_cbc_enc_tv_template, 2422 .count = SERPENT_CBC_ENC_TEST_VECTORS 2423 }, 2424 .dec = { 2425 .vecs = serpent_cbc_dec_tv_template, 2426 .count = SERPENT_CBC_DEC_TEST_VECTORS 2427 } 2428 } 2429 } 2430 }, { 2431 .alg = "cbc(twofish)", 2432 .test = alg_test_skcipher, 2433 .suite = { 2434 .cipher = { 2435 .enc = { 2436 .vecs = tf_cbc_enc_tv_template, 2437 .count = TF_CBC_ENC_TEST_VECTORS 2438 }, 2439 .dec = { 2440 .vecs = tf_cbc_dec_tv_template, 2441 .count = TF_CBC_DEC_TEST_VECTORS 2442 } 2443 } 2444 } 2445 }, { 2446 .alg = "ccm(aes)", 2447 .test = alg_test_aead, 2448 .fips_allowed = 1, 2449 .suite = { 2450 .aead = { 2451 .enc = { 2452 .vecs = aes_ccm_enc_tv_template, 2453 .count = AES_CCM_ENC_TEST_VECTORS 2454 }, 2455 .dec = { 2456 .vecs = aes_ccm_dec_tv_template, 2457 .count = AES_CCM_DEC_TEST_VECTORS 2458 } 2459 } 2460 } 2461 }, { 2462 .alg = "chacha20", 2463 .test = alg_test_skcipher, 2464 .suite = { 2465 .cipher = { 2466 .enc = { 2467 .vecs = chacha20_enc_tv_template, 2468 .count = CHACHA20_ENC_TEST_VECTORS 2469 }, 2470 .dec = { 2471 .vecs = chacha20_enc_tv_template, 2472 .count = CHACHA20_ENC_TEST_VECTORS 2473 }, 2474 } 2475 } 2476 }, { 2477 .alg = "cmac(aes)", 2478 .fips_allowed = 1, 2479 .test = alg_test_hash, 2480 .suite = { 2481 .hash = { 2482 .vecs = aes_cmac128_tv_template, 2483 .count = CMAC_AES_TEST_VECTORS 2484 } 2485 } 2486 }, { 2487 .alg = "cmac(des3_ede)", 2488 .fips_allowed = 1, 2489 .test = alg_test_hash, 2490 .suite = { 2491 .hash = { 2492 .vecs = des3_ede_cmac64_tv_template, 2493 .count = CMAC_DES3_EDE_TEST_VECTORS 2494 } 2495 } 2496 }, { 2497 .alg = "compress_null", 2498 .test = alg_test_null, 2499 }, { 2500 .alg = "crc32", 2501 .test = alg_test_hash, 2502 .suite = { 2503 .hash = { 2504 .vecs = crc32_tv_template, 2505 .count = CRC32_TEST_VECTORS 2506 } 2507 } 2508 }, { 2509 .alg = "crc32c", 2510 .test = alg_test_crc32c, 2511 .fips_allowed = 1, 2512 .suite = { 2513 .hash = { 2514 .vecs = crc32c_tv_template, 2515 .count = CRC32C_TEST_VECTORS 2516 } 2517 } 2518 }, { 2519 .alg = "crct10dif", 2520 .test = alg_test_hash, 2521 .fips_allowed = 1, 2522 .suite = { 2523 .hash = { 2524 .vecs = crct10dif_tv_template, 2525 .count = CRCT10DIF_TEST_VECTORS 2526 } 2527 } 2528 }, { 2529 .alg = "cryptd(__driver-cbc-aes-aesni)", 2530 .test = alg_test_null, 2531 .fips_allowed = 1, 2532 }, { 2533 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2534 .test = alg_test_null, 2535 }, { 2536 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2537 .test = alg_test_null, 2538 }, { 2539 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2540 .test = alg_test_null, 2541 }, { 2542 .alg = "cryptd(__driver-ecb-aes-aesni)", 2543 .test = alg_test_null, 2544 .fips_allowed = 1, 2545 }, { 2546 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2547 .test = alg_test_null, 2548 }, { 2549 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2550 .test = alg_test_null, 2551 }, { 2552 .alg = "cryptd(__driver-ecb-cast5-avx)", 2553 .test = alg_test_null, 2554 }, { 2555 .alg = "cryptd(__driver-ecb-cast6-avx)", 2556 .test = alg_test_null, 2557 }, { 2558 .alg = "cryptd(__driver-ecb-serpent-avx)", 2559 .test = alg_test_null, 2560 }, { 2561 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2562 .test = alg_test_null, 2563 }, { 2564 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2565 .test = alg_test_null, 2566 }, { 2567 .alg = "cryptd(__driver-ecb-twofish-avx)", 2568 .test = alg_test_null, 2569 }, { 2570 .alg = "cryptd(__driver-gcm-aes-aesni)", 2571 .test = alg_test_null, 2572 .fips_allowed = 1, 2573 }, { 2574 .alg = "cryptd(__ghash-pclmulqdqni)", 2575 .test = alg_test_null, 2576 .fips_allowed = 1, 2577 }, { 2578 .alg = "ctr(aes)", 2579 .test = alg_test_skcipher, 2580 .fips_allowed = 1, 2581 .suite = { 2582 .cipher = { 2583 .enc = { 2584 .vecs = aes_ctr_enc_tv_template, 2585 .count = AES_CTR_ENC_TEST_VECTORS 2586 }, 2587 .dec = { 2588 .vecs = aes_ctr_dec_tv_template, 2589 .count = AES_CTR_DEC_TEST_VECTORS 2590 } 2591 } 2592 } 2593 }, { 2594 .alg = "ctr(blowfish)", 2595 .test = alg_test_skcipher, 2596 .suite = { 2597 .cipher = { 2598 .enc = { 2599 .vecs = bf_ctr_enc_tv_template, 2600 .count = BF_CTR_ENC_TEST_VECTORS 2601 }, 2602 .dec = { 2603 .vecs = bf_ctr_dec_tv_template, 2604 .count = BF_CTR_DEC_TEST_VECTORS 2605 } 2606 } 2607 } 2608 }, { 2609 .alg = "ctr(camellia)", 2610 .test = alg_test_skcipher, 2611 .suite = { 2612 .cipher = { 2613 .enc = { 2614 .vecs = camellia_ctr_enc_tv_template, 2615 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2616 }, 2617 .dec = { 2618 .vecs = camellia_ctr_dec_tv_template, 2619 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2620 } 2621 } 2622 } 2623 }, { 2624 .alg = "ctr(cast5)", 2625 .test = alg_test_skcipher, 2626 .suite = { 2627 .cipher = { 2628 .enc = { 2629 .vecs = cast5_ctr_enc_tv_template, 2630 .count = CAST5_CTR_ENC_TEST_VECTORS 2631 }, 2632 .dec = { 2633 .vecs = cast5_ctr_dec_tv_template, 2634 .count = CAST5_CTR_DEC_TEST_VECTORS 2635 } 2636 } 2637 } 2638 }, { 2639 .alg = "ctr(cast6)", 2640 .test = alg_test_skcipher, 2641 .suite = { 2642 .cipher = { 2643 .enc = { 2644 .vecs = cast6_ctr_enc_tv_template, 2645 .count = CAST6_CTR_ENC_TEST_VECTORS 2646 }, 2647 .dec = { 2648 .vecs = cast6_ctr_dec_tv_template, 2649 .count = CAST6_CTR_DEC_TEST_VECTORS 2650 } 2651 } 2652 } 2653 }, { 2654 .alg = "ctr(des)", 2655 .test = alg_test_skcipher, 2656 .suite = { 2657 .cipher = { 2658 .enc = { 2659 .vecs = des_ctr_enc_tv_template, 2660 .count = DES_CTR_ENC_TEST_VECTORS 2661 }, 2662 .dec = { 2663 .vecs = des_ctr_dec_tv_template, 2664 .count = DES_CTR_DEC_TEST_VECTORS 2665 } 2666 } 2667 } 2668 }, { 2669 .alg = "ctr(des3_ede)", 2670 .test = alg_test_skcipher, 2671 .suite = { 2672 .cipher = { 2673 .enc = { 2674 .vecs = des3_ede_ctr_enc_tv_template, 2675 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2676 }, 2677 .dec = { 2678 .vecs = des3_ede_ctr_dec_tv_template, 2679 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2680 } 2681 } 2682 } 2683 }, { 2684 .alg = "ctr(serpent)", 2685 .test = alg_test_skcipher, 2686 .suite = { 2687 .cipher = { 2688 .enc = { 2689 .vecs = serpent_ctr_enc_tv_template, 2690 .count = SERPENT_CTR_ENC_TEST_VECTORS 2691 }, 2692 .dec = { 2693 .vecs = serpent_ctr_dec_tv_template, 2694 .count = SERPENT_CTR_DEC_TEST_VECTORS 2695 } 2696 } 2697 } 2698 }, { 2699 .alg = "ctr(twofish)", 2700 .test = alg_test_skcipher, 2701 .suite = { 2702 .cipher = { 2703 .enc = { 2704 .vecs = tf_ctr_enc_tv_template, 2705 .count = TF_CTR_ENC_TEST_VECTORS 2706 }, 2707 .dec = { 2708 .vecs = tf_ctr_dec_tv_template, 2709 .count = TF_CTR_DEC_TEST_VECTORS 2710 } 2711 } 2712 } 2713 }, { 2714 .alg = "cts(cbc(aes))", 2715 .test = alg_test_skcipher, 2716 .suite = { 2717 .cipher = { 2718 .enc = { 2719 .vecs = cts_mode_enc_tv_template, 2720 .count = CTS_MODE_ENC_TEST_VECTORS 2721 }, 2722 .dec = { 2723 .vecs = cts_mode_dec_tv_template, 2724 .count = CTS_MODE_DEC_TEST_VECTORS 2725 } 2726 } 2727 } 2728 }, { 2729 .alg = "deflate", 2730 .test = alg_test_comp, 2731 .fips_allowed = 1, 2732 .suite = { 2733 .comp = { 2734 .comp = { 2735 .vecs = deflate_comp_tv_template, 2736 .count = DEFLATE_COMP_TEST_VECTORS 2737 }, 2738 .decomp = { 2739 .vecs = deflate_decomp_tv_template, 2740 .count = DEFLATE_DECOMP_TEST_VECTORS 2741 } 2742 } 2743 } 2744 }, { 2745 .alg = "digest_null", 2746 .test = alg_test_null, 2747 }, { 2748 .alg = "drbg_nopr_ctr_aes128", 2749 .test = alg_test_drbg, 2750 .fips_allowed = 1, 2751 .suite = { 2752 .drbg = { 2753 .vecs = drbg_nopr_ctr_aes128_tv_template, 2754 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2755 } 2756 } 2757 }, { 2758 .alg = "drbg_nopr_ctr_aes192", 2759 .test = alg_test_drbg, 2760 .fips_allowed = 1, 2761 .suite = { 2762 .drbg = { 2763 .vecs = drbg_nopr_ctr_aes192_tv_template, 2764 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2765 } 2766 } 2767 }, { 2768 .alg = "drbg_nopr_ctr_aes256", 2769 .test = alg_test_drbg, 2770 .fips_allowed = 1, 2771 .suite = { 2772 .drbg = { 2773 .vecs = drbg_nopr_ctr_aes256_tv_template, 2774 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2775 } 2776 } 2777 }, { 2778 /* 2779 * There is no need to specifically test the DRBG with every 2780 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2781 */ 2782 .alg = "drbg_nopr_hmac_sha1", 2783 .fips_allowed = 1, 2784 .test = alg_test_null, 2785 }, { 2786 .alg = "drbg_nopr_hmac_sha256", 2787 .test = alg_test_drbg, 2788 .fips_allowed = 1, 2789 .suite = { 2790 .drbg = { 2791 .vecs = drbg_nopr_hmac_sha256_tv_template, 2792 .count = 2793 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2794 } 2795 } 2796 }, { 2797 /* covered by drbg_nopr_hmac_sha256 test */ 2798 .alg = "drbg_nopr_hmac_sha384", 2799 .fips_allowed = 1, 2800 .test = alg_test_null, 2801 }, { 2802 .alg = "drbg_nopr_hmac_sha512", 2803 .test = alg_test_null, 2804 .fips_allowed = 1, 2805 }, { 2806 .alg = "drbg_nopr_sha1", 2807 .fips_allowed = 1, 2808 .test = alg_test_null, 2809 }, { 2810 .alg = "drbg_nopr_sha256", 2811 .test = alg_test_drbg, 2812 .fips_allowed = 1, 2813 .suite = { 2814 .drbg = { 2815 .vecs = drbg_nopr_sha256_tv_template, 2816 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2817 } 2818 } 2819 }, { 2820 /* covered by drbg_nopr_sha256 test */ 2821 .alg = "drbg_nopr_sha384", 2822 .fips_allowed = 1, 2823 .test = alg_test_null, 2824 }, { 2825 .alg = "drbg_nopr_sha512", 2826 .fips_allowed = 1, 2827 .test = alg_test_null, 2828 }, { 2829 .alg = "drbg_pr_ctr_aes128", 2830 .test = alg_test_drbg, 2831 .fips_allowed = 1, 2832 .suite = { 2833 .drbg = { 2834 .vecs = drbg_pr_ctr_aes128_tv_template, 2835 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 2836 } 2837 } 2838 }, { 2839 /* covered by drbg_pr_ctr_aes128 test */ 2840 .alg = "drbg_pr_ctr_aes192", 2841 .fips_allowed = 1, 2842 .test = alg_test_null, 2843 }, { 2844 .alg = "drbg_pr_ctr_aes256", 2845 .fips_allowed = 1, 2846 .test = alg_test_null, 2847 }, { 2848 .alg = "drbg_pr_hmac_sha1", 2849 .fips_allowed = 1, 2850 .test = alg_test_null, 2851 }, { 2852 .alg = "drbg_pr_hmac_sha256", 2853 .test = alg_test_drbg, 2854 .fips_allowed = 1, 2855 .suite = { 2856 .drbg = { 2857 .vecs = drbg_pr_hmac_sha256_tv_template, 2858 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 2859 } 2860 } 2861 }, { 2862 /* covered by drbg_pr_hmac_sha256 test */ 2863 .alg = "drbg_pr_hmac_sha384", 2864 .fips_allowed = 1, 2865 .test = alg_test_null, 2866 }, { 2867 .alg = "drbg_pr_hmac_sha512", 2868 .test = alg_test_null, 2869 .fips_allowed = 1, 2870 }, { 2871 .alg = "drbg_pr_sha1", 2872 .fips_allowed = 1, 2873 .test = alg_test_null, 2874 }, { 2875 .alg = "drbg_pr_sha256", 2876 .test = alg_test_drbg, 2877 .fips_allowed = 1, 2878 .suite = { 2879 .drbg = { 2880 .vecs = drbg_pr_sha256_tv_template, 2881 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 2882 } 2883 } 2884 }, { 2885 /* covered by drbg_pr_sha256 test */ 2886 .alg = "drbg_pr_sha384", 2887 .fips_allowed = 1, 2888 .test = alg_test_null, 2889 }, { 2890 .alg = "drbg_pr_sha512", 2891 .fips_allowed = 1, 2892 .test = alg_test_null, 2893 }, { 2894 .alg = "ecb(__aes-aesni)", 2895 .test = alg_test_null, 2896 .fips_allowed = 1, 2897 }, { 2898 .alg = "ecb(aes)", 2899 .test = alg_test_skcipher, 2900 .fips_allowed = 1, 2901 .suite = { 2902 .cipher = { 2903 .enc = { 2904 .vecs = aes_enc_tv_template, 2905 .count = AES_ENC_TEST_VECTORS 2906 }, 2907 .dec = { 2908 .vecs = aes_dec_tv_template, 2909 .count = AES_DEC_TEST_VECTORS 2910 } 2911 } 2912 } 2913 }, { 2914 .alg = "ecb(anubis)", 2915 .test = alg_test_skcipher, 2916 .suite = { 2917 .cipher = { 2918 .enc = { 2919 .vecs = anubis_enc_tv_template, 2920 .count = ANUBIS_ENC_TEST_VECTORS 2921 }, 2922 .dec = { 2923 .vecs = anubis_dec_tv_template, 2924 .count = ANUBIS_DEC_TEST_VECTORS 2925 } 2926 } 2927 } 2928 }, { 2929 .alg = "ecb(arc4)", 2930 .test = alg_test_skcipher, 2931 .suite = { 2932 .cipher = { 2933 .enc = { 2934 .vecs = arc4_enc_tv_template, 2935 .count = ARC4_ENC_TEST_VECTORS 2936 }, 2937 .dec = { 2938 .vecs = arc4_dec_tv_template, 2939 .count = ARC4_DEC_TEST_VECTORS 2940 } 2941 } 2942 } 2943 }, { 2944 .alg = "ecb(blowfish)", 2945 .test = alg_test_skcipher, 2946 .suite = { 2947 .cipher = { 2948 .enc = { 2949 .vecs = bf_enc_tv_template, 2950 .count = BF_ENC_TEST_VECTORS 2951 }, 2952 .dec = { 2953 .vecs = bf_dec_tv_template, 2954 .count = BF_DEC_TEST_VECTORS 2955 } 2956 } 2957 } 2958 }, { 2959 .alg = "ecb(camellia)", 2960 .test = alg_test_skcipher, 2961 .suite = { 2962 .cipher = { 2963 .enc = { 2964 .vecs = camellia_enc_tv_template, 2965 .count = CAMELLIA_ENC_TEST_VECTORS 2966 }, 2967 .dec = { 2968 .vecs = camellia_dec_tv_template, 2969 .count = CAMELLIA_DEC_TEST_VECTORS 2970 } 2971 } 2972 } 2973 }, { 2974 .alg = "ecb(cast5)", 2975 .test = alg_test_skcipher, 2976 .suite = { 2977 .cipher = { 2978 .enc = { 2979 .vecs = cast5_enc_tv_template, 2980 .count = CAST5_ENC_TEST_VECTORS 2981 }, 2982 .dec = { 2983 .vecs = cast5_dec_tv_template, 2984 .count = CAST5_DEC_TEST_VECTORS 2985 } 2986 } 2987 } 2988 }, { 2989 .alg = "ecb(cast6)", 2990 .test = alg_test_skcipher, 2991 .suite = { 2992 .cipher = { 2993 .enc = { 2994 .vecs = cast6_enc_tv_template, 2995 .count = CAST6_ENC_TEST_VECTORS 2996 }, 2997 .dec = { 2998 .vecs = cast6_dec_tv_template, 2999 .count = CAST6_DEC_TEST_VECTORS 3000 } 3001 } 3002 } 3003 }, { 3004 .alg = "ecb(cipher_null)", 3005 .test = alg_test_null, 3006 }, { 3007 .alg = "ecb(des)", 3008 .test = alg_test_skcipher, 3009 .suite = { 3010 .cipher = { 3011 .enc = { 3012 .vecs = des_enc_tv_template, 3013 .count = DES_ENC_TEST_VECTORS 3014 }, 3015 .dec = { 3016 .vecs = des_dec_tv_template, 3017 .count = DES_DEC_TEST_VECTORS 3018 } 3019 } 3020 } 3021 }, { 3022 .alg = "ecb(des3_ede)", 3023 .test = alg_test_skcipher, 3024 .fips_allowed = 1, 3025 .suite = { 3026 .cipher = { 3027 .enc = { 3028 .vecs = des3_ede_enc_tv_template, 3029 .count = DES3_EDE_ENC_TEST_VECTORS 3030 }, 3031 .dec = { 3032 .vecs = des3_ede_dec_tv_template, 3033 .count = DES3_EDE_DEC_TEST_VECTORS 3034 } 3035 } 3036 } 3037 }, { 3038 .alg = "ecb(fcrypt)", 3039 .test = alg_test_skcipher, 3040 .suite = { 3041 .cipher = { 3042 .enc = { 3043 .vecs = fcrypt_pcbc_enc_tv_template, 3044 .count = 1 3045 }, 3046 .dec = { 3047 .vecs = fcrypt_pcbc_dec_tv_template, 3048 .count = 1 3049 } 3050 } 3051 } 3052 }, { 3053 .alg = "ecb(khazad)", 3054 .test = alg_test_skcipher, 3055 .suite = { 3056 .cipher = { 3057 .enc = { 3058 .vecs = khazad_enc_tv_template, 3059 .count = KHAZAD_ENC_TEST_VECTORS 3060 }, 3061 .dec = { 3062 .vecs = khazad_dec_tv_template, 3063 .count = KHAZAD_DEC_TEST_VECTORS 3064 } 3065 } 3066 } 3067 }, { 3068 .alg = "ecb(seed)", 3069 .test = alg_test_skcipher, 3070 .suite = { 3071 .cipher = { 3072 .enc = { 3073 .vecs = seed_enc_tv_template, 3074 .count = SEED_ENC_TEST_VECTORS 3075 }, 3076 .dec = { 3077 .vecs = seed_dec_tv_template, 3078 .count = SEED_DEC_TEST_VECTORS 3079 } 3080 } 3081 } 3082 }, { 3083 .alg = "ecb(serpent)", 3084 .test = alg_test_skcipher, 3085 .suite = { 3086 .cipher = { 3087 .enc = { 3088 .vecs = serpent_enc_tv_template, 3089 .count = SERPENT_ENC_TEST_VECTORS 3090 }, 3091 .dec = { 3092 .vecs = serpent_dec_tv_template, 3093 .count = SERPENT_DEC_TEST_VECTORS 3094 } 3095 } 3096 } 3097 }, { 3098 .alg = "ecb(tea)", 3099 .test = alg_test_skcipher, 3100 .suite = { 3101 .cipher = { 3102 .enc = { 3103 .vecs = tea_enc_tv_template, 3104 .count = TEA_ENC_TEST_VECTORS 3105 }, 3106 .dec = { 3107 .vecs = tea_dec_tv_template, 3108 .count = TEA_DEC_TEST_VECTORS 3109 } 3110 } 3111 } 3112 }, { 3113 .alg = "ecb(tnepres)", 3114 .test = alg_test_skcipher, 3115 .suite = { 3116 .cipher = { 3117 .enc = { 3118 .vecs = tnepres_enc_tv_template, 3119 .count = TNEPRES_ENC_TEST_VECTORS 3120 }, 3121 .dec = { 3122 .vecs = tnepres_dec_tv_template, 3123 .count = TNEPRES_DEC_TEST_VECTORS 3124 } 3125 } 3126 } 3127 }, { 3128 .alg = "ecb(twofish)", 3129 .test = alg_test_skcipher, 3130 .suite = { 3131 .cipher = { 3132 .enc = { 3133 .vecs = tf_enc_tv_template, 3134 .count = TF_ENC_TEST_VECTORS 3135 }, 3136 .dec = { 3137 .vecs = tf_dec_tv_template, 3138 .count = TF_DEC_TEST_VECTORS 3139 } 3140 } 3141 } 3142 }, { 3143 .alg = "ecb(xeta)", 3144 .test = alg_test_skcipher, 3145 .suite = { 3146 .cipher = { 3147 .enc = { 3148 .vecs = xeta_enc_tv_template, 3149 .count = XETA_ENC_TEST_VECTORS 3150 }, 3151 .dec = { 3152 .vecs = xeta_dec_tv_template, 3153 .count = XETA_DEC_TEST_VECTORS 3154 } 3155 } 3156 } 3157 }, { 3158 .alg = "ecb(xtea)", 3159 .test = alg_test_skcipher, 3160 .suite = { 3161 .cipher = { 3162 .enc = { 3163 .vecs = xtea_enc_tv_template, 3164 .count = XTEA_ENC_TEST_VECTORS 3165 }, 3166 .dec = { 3167 .vecs = xtea_dec_tv_template, 3168 .count = XTEA_DEC_TEST_VECTORS 3169 } 3170 } 3171 } 3172 }, { 3173 .alg = "gcm(aes)", 3174 .test = alg_test_aead, 3175 .fips_allowed = 1, 3176 .suite = { 3177 .aead = { 3178 .enc = { 3179 .vecs = aes_gcm_enc_tv_template, 3180 .count = AES_GCM_ENC_TEST_VECTORS 3181 }, 3182 .dec = { 3183 .vecs = aes_gcm_dec_tv_template, 3184 .count = AES_GCM_DEC_TEST_VECTORS 3185 } 3186 } 3187 } 3188 }, { 3189 .alg = "ghash", 3190 .test = alg_test_hash, 3191 .fips_allowed = 1, 3192 .suite = { 3193 .hash = { 3194 .vecs = ghash_tv_template, 3195 .count = GHASH_TEST_VECTORS 3196 } 3197 } 3198 }, { 3199 .alg = "hmac(crc32)", 3200 .test = alg_test_hash, 3201 .suite = { 3202 .hash = { 3203 .vecs = bfin_crc_tv_template, 3204 .count = BFIN_CRC_TEST_VECTORS 3205 } 3206 } 3207 }, { 3208 .alg = "hmac(md5)", 3209 .test = alg_test_hash, 3210 .suite = { 3211 .hash = { 3212 .vecs = hmac_md5_tv_template, 3213 .count = HMAC_MD5_TEST_VECTORS 3214 } 3215 } 3216 }, { 3217 .alg = "hmac(rmd128)", 3218 .test = alg_test_hash, 3219 .suite = { 3220 .hash = { 3221 .vecs = hmac_rmd128_tv_template, 3222 .count = HMAC_RMD128_TEST_VECTORS 3223 } 3224 } 3225 }, { 3226 .alg = "hmac(rmd160)", 3227 .test = alg_test_hash, 3228 .suite = { 3229 .hash = { 3230 .vecs = hmac_rmd160_tv_template, 3231 .count = HMAC_RMD160_TEST_VECTORS 3232 } 3233 } 3234 }, { 3235 .alg = "hmac(sha1)", 3236 .test = alg_test_hash, 3237 .fips_allowed = 1, 3238 .suite = { 3239 .hash = { 3240 .vecs = hmac_sha1_tv_template, 3241 .count = HMAC_SHA1_TEST_VECTORS 3242 } 3243 } 3244 }, { 3245 .alg = "hmac(sha224)", 3246 .test = alg_test_hash, 3247 .fips_allowed = 1, 3248 .suite = { 3249 .hash = { 3250 .vecs = hmac_sha224_tv_template, 3251 .count = HMAC_SHA224_TEST_VECTORS 3252 } 3253 } 3254 }, { 3255 .alg = "hmac(sha256)", 3256 .test = alg_test_hash, 3257 .fips_allowed = 1, 3258 .suite = { 3259 .hash = { 3260 .vecs = hmac_sha256_tv_template, 3261 .count = HMAC_SHA256_TEST_VECTORS 3262 } 3263 } 3264 }, { 3265 .alg = "hmac(sha384)", 3266 .test = alg_test_hash, 3267 .fips_allowed = 1, 3268 .suite = { 3269 .hash = { 3270 .vecs = hmac_sha384_tv_template, 3271 .count = HMAC_SHA384_TEST_VECTORS 3272 } 3273 } 3274 }, { 3275 .alg = "hmac(sha512)", 3276 .test = alg_test_hash, 3277 .fips_allowed = 1, 3278 .suite = { 3279 .hash = { 3280 .vecs = hmac_sha512_tv_template, 3281 .count = HMAC_SHA512_TEST_VECTORS 3282 } 3283 } 3284 }, { 3285 .alg = "jitterentropy_rng", 3286 .fips_allowed = 1, 3287 .test = alg_test_null, 3288 }, { 3289 .alg = "kw(aes)", 3290 .test = alg_test_skcipher, 3291 .fips_allowed = 1, 3292 .suite = { 3293 .cipher = { 3294 .enc = { 3295 .vecs = aes_kw_enc_tv_template, 3296 .count = ARRAY_SIZE(aes_kw_enc_tv_template) 3297 }, 3298 .dec = { 3299 .vecs = aes_kw_dec_tv_template, 3300 .count = ARRAY_SIZE(aes_kw_dec_tv_template) 3301 } 3302 } 3303 } 3304 }, { 3305 .alg = "lrw(aes)", 3306 .test = alg_test_skcipher, 3307 .suite = { 3308 .cipher = { 3309 .enc = { 3310 .vecs = aes_lrw_enc_tv_template, 3311 .count = AES_LRW_ENC_TEST_VECTORS 3312 }, 3313 .dec = { 3314 .vecs = aes_lrw_dec_tv_template, 3315 .count = AES_LRW_DEC_TEST_VECTORS 3316 } 3317 } 3318 } 3319 }, { 3320 .alg = "lrw(camellia)", 3321 .test = alg_test_skcipher, 3322 .suite = { 3323 .cipher = { 3324 .enc = { 3325 .vecs = camellia_lrw_enc_tv_template, 3326 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3327 }, 3328 .dec = { 3329 .vecs = camellia_lrw_dec_tv_template, 3330 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3331 } 3332 } 3333 } 3334 }, { 3335 .alg = "lrw(cast6)", 3336 .test = alg_test_skcipher, 3337 .suite = { 3338 .cipher = { 3339 .enc = { 3340 .vecs = cast6_lrw_enc_tv_template, 3341 .count = CAST6_LRW_ENC_TEST_VECTORS 3342 }, 3343 .dec = { 3344 .vecs = cast6_lrw_dec_tv_template, 3345 .count = CAST6_LRW_DEC_TEST_VECTORS 3346 } 3347 } 3348 } 3349 }, { 3350 .alg = "lrw(serpent)", 3351 .test = alg_test_skcipher, 3352 .suite = { 3353 .cipher = { 3354 .enc = { 3355 .vecs = serpent_lrw_enc_tv_template, 3356 .count = SERPENT_LRW_ENC_TEST_VECTORS 3357 }, 3358 .dec = { 3359 .vecs = serpent_lrw_dec_tv_template, 3360 .count = SERPENT_LRW_DEC_TEST_VECTORS 3361 } 3362 } 3363 } 3364 }, { 3365 .alg = "lrw(twofish)", 3366 .test = alg_test_skcipher, 3367 .suite = { 3368 .cipher = { 3369 .enc = { 3370 .vecs = tf_lrw_enc_tv_template, 3371 .count = TF_LRW_ENC_TEST_VECTORS 3372 }, 3373 .dec = { 3374 .vecs = tf_lrw_dec_tv_template, 3375 .count = TF_LRW_DEC_TEST_VECTORS 3376 } 3377 } 3378 } 3379 }, { 3380 .alg = "lz4", 3381 .test = alg_test_comp, 3382 .fips_allowed = 1, 3383 .suite = { 3384 .comp = { 3385 .comp = { 3386 .vecs = lz4_comp_tv_template, 3387 .count = LZ4_COMP_TEST_VECTORS 3388 }, 3389 .decomp = { 3390 .vecs = lz4_decomp_tv_template, 3391 .count = LZ4_DECOMP_TEST_VECTORS 3392 } 3393 } 3394 } 3395 }, { 3396 .alg = "lz4hc", 3397 .test = alg_test_comp, 3398 .fips_allowed = 1, 3399 .suite = { 3400 .comp = { 3401 .comp = { 3402 .vecs = lz4hc_comp_tv_template, 3403 .count = LZ4HC_COMP_TEST_VECTORS 3404 }, 3405 .decomp = { 3406 .vecs = lz4hc_decomp_tv_template, 3407 .count = LZ4HC_DECOMP_TEST_VECTORS 3408 } 3409 } 3410 } 3411 }, { 3412 .alg = "lzo", 3413 .test = alg_test_comp, 3414 .fips_allowed = 1, 3415 .suite = { 3416 .comp = { 3417 .comp = { 3418 .vecs = lzo_comp_tv_template, 3419 .count = LZO_COMP_TEST_VECTORS 3420 }, 3421 .decomp = { 3422 .vecs = lzo_decomp_tv_template, 3423 .count = LZO_DECOMP_TEST_VECTORS 3424 } 3425 } 3426 } 3427 }, { 3428 .alg = "md4", 3429 .test = alg_test_hash, 3430 .suite = { 3431 .hash = { 3432 .vecs = md4_tv_template, 3433 .count = MD4_TEST_VECTORS 3434 } 3435 } 3436 }, { 3437 .alg = "md5", 3438 .test = alg_test_hash, 3439 .suite = { 3440 .hash = { 3441 .vecs = md5_tv_template, 3442 .count = MD5_TEST_VECTORS 3443 } 3444 } 3445 }, { 3446 .alg = "michael_mic", 3447 .test = alg_test_hash, 3448 .suite = { 3449 .hash = { 3450 .vecs = michael_mic_tv_template, 3451 .count = MICHAEL_MIC_TEST_VECTORS 3452 } 3453 } 3454 }, { 3455 .alg = "ofb(aes)", 3456 .test = alg_test_skcipher, 3457 .fips_allowed = 1, 3458 .suite = { 3459 .cipher = { 3460 .enc = { 3461 .vecs = aes_ofb_enc_tv_template, 3462 .count = AES_OFB_ENC_TEST_VECTORS 3463 }, 3464 .dec = { 3465 .vecs = aes_ofb_dec_tv_template, 3466 .count = AES_OFB_DEC_TEST_VECTORS 3467 } 3468 } 3469 } 3470 }, { 3471 .alg = "pcbc(fcrypt)", 3472 .test = alg_test_skcipher, 3473 .suite = { 3474 .cipher = { 3475 .enc = { 3476 .vecs = fcrypt_pcbc_enc_tv_template, 3477 .count = FCRYPT_ENC_TEST_VECTORS 3478 }, 3479 .dec = { 3480 .vecs = fcrypt_pcbc_dec_tv_template, 3481 .count = FCRYPT_DEC_TEST_VECTORS 3482 } 3483 } 3484 } 3485 }, { 3486 .alg = "poly1305", 3487 .test = alg_test_hash, 3488 .suite = { 3489 .hash = { 3490 .vecs = poly1305_tv_template, 3491 .count = POLY1305_TEST_VECTORS 3492 } 3493 } 3494 }, { 3495 .alg = "rfc3686(ctr(aes))", 3496 .test = alg_test_skcipher, 3497 .fips_allowed = 1, 3498 .suite = { 3499 .cipher = { 3500 .enc = { 3501 .vecs = aes_ctr_rfc3686_enc_tv_template, 3502 .count = AES_CTR_3686_ENC_TEST_VECTORS 3503 }, 3504 .dec = { 3505 .vecs = aes_ctr_rfc3686_dec_tv_template, 3506 .count = AES_CTR_3686_DEC_TEST_VECTORS 3507 } 3508 } 3509 } 3510 }, { 3511 .alg = "rfc4106(gcm(aes))", 3512 .test = alg_test_aead, 3513 .fips_allowed = 1, 3514 .suite = { 3515 .aead = { 3516 .enc = { 3517 .vecs = aes_gcm_rfc4106_enc_tv_template, 3518 .count = AES_GCM_4106_ENC_TEST_VECTORS 3519 }, 3520 .dec = { 3521 .vecs = aes_gcm_rfc4106_dec_tv_template, 3522 .count = AES_GCM_4106_DEC_TEST_VECTORS 3523 } 3524 } 3525 } 3526 }, { 3527 .alg = "rfc4309(ccm(aes))", 3528 .test = alg_test_aead, 3529 .fips_allowed = 1, 3530 .suite = { 3531 .aead = { 3532 .enc = { 3533 .vecs = aes_ccm_rfc4309_enc_tv_template, 3534 .count = AES_CCM_4309_ENC_TEST_VECTORS 3535 }, 3536 .dec = { 3537 .vecs = aes_ccm_rfc4309_dec_tv_template, 3538 .count = AES_CCM_4309_DEC_TEST_VECTORS 3539 } 3540 } 3541 } 3542 }, { 3543 .alg = "rfc4543(gcm(aes))", 3544 .test = alg_test_aead, 3545 .suite = { 3546 .aead = { 3547 .enc = { 3548 .vecs = aes_gcm_rfc4543_enc_tv_template, 3549 .count = AES_GCM_4543_ENC_TEST_VECTORS 3550 }, 3551 .dec = { 3552 .vecs = aes_gcm_rfc4543_dec_tv_template, 3553 .count = AES_GCM_4543_DEC_TEST_VECTORS 3554 }, 3555 } 3556 } 3557 }, { 3558 .alg = "rfc7539(chacha20,poly1305)", 3559 .test = alg_test_aead, 3560 .suite = { 3561 .aead = { 3562 .enc = { 3563 .vecs = rfc7539_enc_tv_template, 3564 .count = RFC7539_ENC_TEST_VECTORS 3565 }, 3566 .dec = { 3567 .vecs = rfc7539_dec_tv_template, 3568 .count = RFC7539_DEC_TEST_VECTORS 3569 }, 3570 } 3571 } 3572 }, { 3573 .alg = "rfc7539esp(chacha20,poly1305)", 3574 .test = alg_test_aead, 3575 .suite = { 3576 .aead = { 3577 .enc = { 3578 .vecs = rfc7539esp_enc_tv_template, 3579 .count = RFC7539ESP_ENC_TEST_VECTORS 3580 }, 3581 .dec = { 3582 .vecs = rfc7539esp_dec_tv_template, 3583 .count = RFC7539ESP_DEC_TEST_VECTORS 3584 }, 3585 } 3586 } 3587 }, { 3588 .alg = "rmd128", 3589 .test = alg_test_hash, 3590 .suite = { 3591 .hash = { 3592 .vecs = rmd128_tv_template, 3593 .count = RMD128_TEST_VECTORS 3594 } 3595 } 3596 }, { 3597 .alg = "rmd160", 3598 .test = alg_test_hash, 3599 .suite = { 3600 .hash = { 3601 .vecs = rmd160_tv_template, 3602 .count = RMD160_TEST_VECTORS 3603 } 3604 } 3605 }, { 3606 .alg = "rmd256", 3607 .test = alg_test_hash, 3608 .suite = { 3609 .hash = { 3610 .vecs = rmd256_tv_template, 3611 .count = RMD256_TEST_VECTORS 3612 } 3613 } 3614 }, { 3615 .alg = "rmd320", 3616 .test = alg_test_hash, 3617 .suite = { 3618 .hash = { 3619 .vecs = rmd320_tv_template, 3620 .count = RMD320_TEST_VECTORS 3621 } 3622 } 3623 }, { 3624 .alg = "rsa", 3625 .test = alg_test_akcipher, 3626 .fips_allowed = 1, 3627 .suite = { 3628 .akcipher = { 3629 .vecs = rsa_tv_template, 3630 .count = RSA_TEST_VECTORS 3631 } 3632 } 3633 }, { 3634 .alg = "salsa20", 3635 .test = alg_test_skcipher, 3636 .suite = { 3637 .cipher = { 3638 .enc = { 3639 .vecs = salsa20_stream_enc_tv_template, 3640 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3641 } 3642 } 3643 } 3644 }, { 3645 .alg = "sha1", 3646 .test = alg_test_hash, 3647 .fips_allowed = 1, 3648 .suite = { 3649 .hash = { 3650 .vecs = sha1_tv_template, 3651 .count = SHA1_TEST_VECTORS 3652 } 3653 } 3654 }, { 3655 .alg = "sha224", 3656 .test = alg_test_hash, 3657 .fips_allowed = 1, 3658 .suite = { 3659 .hash = { 3660 .vecs = sha224_tv_template, 3661 .count = SHA224_TEST_VECTORS 3662 } 3663 } 3664 }, { 3665 .alg = "sha256", 3666 .test = alg_test_hash, 3667 .fips_allowed = 1, 3668 .suite = { 3669 .hash = { 3670 .vecs = sha256_tv_template, 3671 .count = SHA256_TEST_VECTORS 3672 } 3673 } 3674 }, { 3675 .alg = "sha384", 3676 .test = alg_test_hash, 3677 .fips_allowed = 1, 3678 .suite = { 3679 .hash = { 3680 .vecs = sha384_tv_template, 3681 .count = SHA384_TEST_VECTORS 3682 } 3683 } 3684 }, { 3685 .alg = "sha512", 3686 .test = alg_test_hash, 3687 .fips_allowed = 1, 3688 .suite = { 3689 .hash = { 3690 .vecs = sha512_tv_template, 3691 .count = SHA512_TEST_VECTORS 3692 } 3693 } 3694 }, { 3695 .alg = "tgr128", 3696 .test = alg_test_hash, 3697 .suite = { 3698 .hash = { 3699 .vecs = tgr128_tv_template, 3700 .count = TGR128_TEST_VECTORS 3701 } 3702 } 3703 }, { 3704 .alg = "tgr160", 3705 .test = alg_test_hash, 3706 .suite = { 3707 .hash = { 3708 .vecs = tgr160_tv_template, 3709 .count = TGR160_TEST_VECTORS 3710 } 3711 } 3712 }, { 3713 .alg = "tgr192", 3714 .test = alg_test_hash, 3715 .suite = { 3716 .hash = { 3717 .vecs = tgr192_tv_template, 3718 .count = TGR192_TEST_VECTORS 3719 } 3720 } 3721 }, { 3722 .alg = "vmac(aes)", 3723 .test = alg_test_hash, 3724 .suite = { 3725 .hash = { 3726 .vecs = aes_vmac128_tv_template, 3727 .count = VMAC_AES_TEST_VECTORS 3728 } 3729 } 3730 }, { 3731 .alg = "wp256", 3732 .test = alg_test_hash, 3733 .suite = { 3734 .hash = { 3735 .vecs = wp256_tv_template, 3736 .count = WP256_TEST_VECTORS 3737 } 3738 } 3739 }, { 3740 .alg = "wp384", 3741 .test = alg_test_hash, 3742 .suite = { 3743 .hash = { 3744 .vecs = wp384_tv_template, 3745 .count = WP384_TEST_VECTORS 3746 } 3747 } 3748 }, { 3749 .alg = "wp512", 3750 .test = alg_test_hash, 3751 .suite = { 3752 .hash = { 3753 .vecs = wp512_tv_template, 3754 .count = WP512_TEST_VECTORS 3755 } 3756 } 3757 }, { 3758 .alg = "xcbc(aes)", 3759 .test = alg_test_hash, 3760 .suite = { 3761 .hash = { 3762 .vecs = aes_xcbc128_tv_template, 3763 .count = XCBC_AES_TEST_VECTORS 3764 } 3765 } 3766 }, { 3767 .alg = "xts(aes)", 3768 .test = alg_test_skcipher, 3769 .fips_allowed = 1, 3770 .suite = { 3771 .cipher = { 3772 .enc = { 3773 .vecs = aes_xts_enc_tv_template, 3774 .count = AES_XTS_ENC_TEST_VECTORS 3775 }, 3776 .dec = { 3777 .vecs = aes_xts_dec_tv_template, 3778 .count = AES_XTS_DEC_TEST_VECTORS 3779 } 3780 } 3781 } 3782 }, { 3783 .alg = "xts(camellia)", 3784 .test = alg_test_skcipher, 3785 .suite = { 3786 .cipher = { 3787 .enc = { 3788 .vecs = camellia_xts_enc_tv_template, 3789 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3790 }, 3791 .dec = { 3792 .vecs = camellia_xts_dec_tv_template, 3793 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3794 } 3795 } 3796 } 3797 }, { 3798 .alg = "xts(cast6)", 3799 .test = alg_test_skcipher, 3800 .suite = { 3801 .cipher = { 3802 .enc = { 3803 .vecs = cast6_xts_enc_tv_template, 3804 .count = CAST6_XTS_ENC_TEST_VECTORS 3805 }, 3806 .dec = { 3807 .vecs = cast6_xts_dec_tv_template, 3808 .count = CAST6_XTS_DEC_TEST_VECTORS 3809 } 3810 } 3811 } 3812 }, { 3813 .alg = "xts(serpent)", 3814 .test = alg_test_skcipher, 3815 .suite = { 3816 .cipher = { 3817 .enc = { 3818 .vecs = serpent_xts_enc_tv_template, 3819 .count = SERPENT_XTS_ENC_TEST_VECTORS 3820 }, 3821 .dec = { 3822 .vecs = serpent_xts_dec_tv_template, 3823 .count = SERPENT_XTS_DEC_TEST_VECTORS 3824 } 3825 } 3826 } 3827 }, { 3828 .alg = "xts(twofish)", 3829 .test = alg_test_skcipher, 3830 .suite = { 3831 .cipher = { 3832 .enc = { 3833 .vecs = tf_xts_enc_tv_template, 3834 .count = TF_XTS_ENC_TEST_VECTORS 3835 }, 3836 .dec = { 3837 .vecs = tf_xts_dec_tv_template, 3838 .count = TF_XTS_DEC_TEST_VECTORS 3839 } 3840 } 3841 } 3842 }, { 3843 .alg = "zlib", 3844 .test = alg_test_pcomp, 3845 .fips_allowed = 1, 3846 .suite = { 3847 .pcomp = { 3848 .comp = { 3849 .vecs = zlib_comp_tv_template, 3850 .count = ZLIB_COMP_TEST_VECTORS 3851 }, 3852 .decomp = { 3853 .vecs = zlib_decomp_tv_template, 3854 .count = ZLIB_DECOMP_TEST_VECTORS 3855 } 3856 } 3857 } 3858 } 3859 }; 3860 3861 static bool alg_test_descs_checked; 3862 3863 static void alg_test_descs_check_order(void) 3864 { 3865 int i; 3866 3867 /* only check once */ 3868 if (alg_test_descs_checked) 3869 return; 3870 3871 alg_test_descs_checked = true; 3872 3873 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3874 int diff = strcmp(alg_test_descs[i - 1].alg, 3875 alg_test_descs[i].alg); 3876 3877 if (WARN_ON(diff > 0)) { 3878 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3879 alg_test_descs[i - 1].alg, 3880 alg_test_descs[i].alg); 3881 } 3882 3883 if (WARN_ON(diff == 0)) { 3884 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3885 alg_test_descs[i].alg); 3886 } 3887 } 3888 } 3889 3890 static int alg_find_test(const char *alg) 3891 { 3892 int start = 0; 3893 int end = ARRAY_SIZE(alg_test_descs); 3894 3895 while (start < end) { 3896 int i = (start + end) / 2; 3897 int diff = strcmp(alg_test_descs[i].alg, alg); 3898 3899 if (diff > 0) { 3900 end = i; 3901 continue; 3902 } 3903 3904 if (diff < 0) { 3905 start = i + 1; 3906 continue; 3907 } 3908 3909 return i; 3910 } 3911 3912 return -1; 3913 } 3914 3915 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3916 { 3917 int i; 3918 int j; 3919 int rc; 3920 3921 alg_test_descs_check_order(); 3922 3923 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3924 char nalg[CRYPTO_MAX_ALG_NAME]; 3925 3926 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3927 sizeof(nalg)) 3928 return -ENAMETOOLONG; 3929 3930 i = alg_find_test(nalg); 3931 if (i < 0) 3932 goto notest; 3933 3934 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3935 goto non_fips_alg; 3936 3937 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3938 goto test_done; 3939 } 3940 3941 i = alg_find_test(alg); 3942 j = alg_find_test(driver); 3943 if (i < 0 && j < 0) 3944 goto notest; 3945 3946 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3947 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3948 goto non_fips_alg; 3949 3950 rc = 0; 3951 if (i >= 0) 3952 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3953 type, mask); 3954 if (j >= 0 && j != i) 3955 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3956 type, mask); 3957 3958 test_done: 3959 if (fips_enabled && rc) 3960 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3961 3962 if (fips_enabled && !rc) 3963 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3964 3965 return rc; 3966 3967 notest: 3968 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3969 return 0; 3970 non_fips_alg: 3971 return -EINVAL; 3972 } 3973 3974 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3975 3976 EXPORT_SYMBOL_GPL(alg_test); 3977