1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 36 #include "internal.h" 37 38 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 39 40 /* a perfect nop */ 41 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 42 { 43 return 0; 44 } 45 46 #else 47 48 #include "testmgr.h" 49 50 /* 51 * Need slab memory for testing (size in number of pages). 52 */ 53 #define XBUFSIZE 8 54 55 /* 56 * Indexes into the xbuf to simulate cross-page access. 57 */ 58 #define IDX1 32 59 #define IDX2 32400 60 #define IDX3 1 61 #define IDX4 8193 62 #define IDX5 22222 63 #define IDX6 17101 64 #define IDX7 27333 65 #define IDX8 3000 66 67 /* 68 * Used by test_cipher() 69 */ 70 #define ENCRYPT 1 71 #define DECRYPT 0 72 73 struct tcrypt_result { 74 struct completion completion; 75 int err; 76 }; 77 78 struct aead_test_suite { 79 struct { 80 struct aead_testvec *vecs; 81 unsigned int count; 82 } enc, dec; 83 }; 84 85 struct cipher_test_suite { 86 struct { 87 struct cipher_testvec *vecs; 88 unsigned int count; 89 } enc, dec; 90 }; 91 92 struct comp_test_suite { 93 struct { 94 struct comp_testvec *vecs; 95 unsigned int count; 96 } comp, decomp; 97 }; 98 99 struct hash_test_suite { 100 struct hash_testvec *vecs; 101 unsigned int count; 102 }; 103 104 struct cprng_test_suite { 105 struct cprng_testvec *vecs; 106 unsigned int count; 107 }; 108 109 struct drbg_test_suite { 110 struct drbg_testvec *vecs; 111 unsigned int count; 112 }; 113 114 struct akcipher_test_suite { 115 struct akcipher_testvec *vecs; 116 unsigned int count; 117 }; 118 119 struct alg_test_desc { 120 const char *alg; 121 int (*test)(const struct alg_test_desc *desc, const char *driver, 122 u32 type, u32 mask); 123 int fips_allowed; /* set if alg is allowed in fips mode */ 124 125 union { 126 struct aead_test_suite aead; 127 struct cipher_test_suite cipher; 128 struct comp_test_suite comp; 129 struct hash_test_suite hash; 130 struct cprng_test_suite cprng; 131 struct drbg_test_suite drbg; 132 struct akcipher_test_suite akcipher; 133 } suite; 134 }; 135 136 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 137 138 static void hexdump(unsigned char *buf, unsigned int len) 139 { 140 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 141 16, 1, 142 buf, len, false); 143 } 144 145 static void tcrypt_complete(struct crypto_async_request *req, int err) 146 { 147 struct tcrypt_result *res = req->data; 148 149 if (err == -EINPROGRESS) 150 return; 151 152 res->err = err; 153 complete(&res->completion); 154 } 155 156 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 157 { 158 int i; 159 160 for (i = 0; i < XBUFSIZE; i++) { 161 buf[i] = (void *)__get_free_page(GFP_KERNEL); 162 if (!buf[i]) 163 goto err_free_buf; 164 } 165 166 return 0; 167 168 err_free_buf: 169 while (i-- > 0) 170 free_page((unsigned long)buf[i]); 171 172 return -ENOMEM; 173 } 174 175 static void testmgr_free_buf(char *buf[XBUFSIZE]) 176 { 177 int i; 178 179 for (i = 0; i < XBUFSIZE; i++) 180 free_page((unsigned long)buf[i]); 181 } 182 183 static int wait_async_op(struct tcrypt_result *tr, int ret) 184 { 185 if (ret == -EINPROGRESS || ret == -EBUSY) { 186 wait_for_completion(&tr->completion); 187 reinit_completion(&tr->completion); 188 ret = tr->err; 189 } 190 return ret; 191 } 192 193 static int ahash_partial_update(struct ahash_request **preq, 194 struct crypto_ahash *tfm, struct hash_testvec *template, 195 void *hash_buff, int k, int temp, struct scatterlist *sg, 196 const char *algo, char *result, struct tcrypt_result *tresult) 197 { 198 char *state; 199 struct ahash_request *req; 200 int statesize, ret = -EINVAL; 201 202 req = *preq; 203 statesize = crypto_ahash_statesize( 204 crypto_ahash_reqtfm(req)); 205 state = kmalloc(statesize, GFP_KERNEL); 206 if (!state) { 207 pr_err("alt: hash: Failed to alloc state for %s\n", algo); 208 goto out_nostate; 209 } 210 ret = crypto_ahash_export(req, state); 211 if (ret) { 212 pr_err("alt: hash: Failed to export() for %s\n", algo); 213 goto out; 214 } 215 ahash_request_free(req); 216 req = ahash_request_alloc(tfm, GFP_KERNEL); 217 if (!req) { 218 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 219 goto out_noreq; 220 } 221 ahash_request_set_callback(req, 222 CRYPTO_TFM_REQ_MAY_BACKLOG, 223 tcrypt_complete, tresult); 224 225 memcpy(hash_buff, template->plaintext + temp, 226 template->tap[k]); 227 sg_init_one(&sg[0], hash_buff, template->tap[k]); 228 ahash_request_set_crypt(req, sg, result, template->tap[k]); 229 ret = crypto_ahash_import(req, state); 230 if (ret) { 231 pr_err("alg: hash: Failed to import() for %s\n", algo); 232 goto out; 233 } 234 ret = wait_async_op(tresult, crypto_ahash_update(req)); 235 if (ret) 236 goto out; 237 *preq = req; 238 ret = 0; 239 goto out_noreq; 240 out: 241 ahash_request_free(req); 242 out_noreq: 243 kfree(state); 244 out_nostate: 245 return ret; 246 } 247 248 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 249 unsigned int tcount, bool use_digest, 250 const int align_offset) 251 { 252 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 253 unsigned int i, j, k, temp; 254 struct scatterlist sg[8]; 255 char *result; 256 char *key; 257 struct ahash_request *req; 258 struct tcrypt_result tresult; 259 void *hash_buff; 260 char *xbuf[XBUFSIZE]; 261 int ret = -ENOMEM; 262 263 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 264 if (!result) 265 return ret; 266 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 267 if (!key) 268 goto out_nobuf; 269 if (testmgr_alloc_buf(xbuf)) 270 goto out_nobuf; 271 272 init_completion(&tresult.completion); 273 274 req = ahash_request_alloc(tfm, GFP_KERNEL); 275 if (!req) { 276 printk(KERN_ERR "alg: hash: Failed to allocate request for " 277 "%s\n", algo); 278 goto out_noreq; 279 } 280 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 281 tcrypt_complete, &tresult); 282 283 j = 0; 284 for (i = 0; i < tcount; i++) { 285 if (template[i].np) 286 continue; 287 288 ret = -EINVAL; 289 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 290 goto out; 291 292 j++; 293 memset(result, 0, MAX_DIGEST_SIZE); 294 295 hash_buff = xbuf[0]; 296 hash_buff += align_offset; 297 298 memcpy(hash_buff, template[i].plaintext, template[i].psize); 299 sg_init_one(&sg[0], hash_buff, template[i].psize); 300 301 if (template[i].ksize) { 302 crypto_ahash_clear_flags(tfm, ~0); 303 if (template[i].ksize > MAX_KEYLEN) { 304 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 305 j, algo, template[i].ksize, MAX_KEYLEN); 306 ret = -EINVAL; 307 goto out; 308 } 309 memcpy(key, template[i].key, template[i].ksize); 310 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 311 if (ret) { 312 printk(KERN_ERR "alg: hash: setkey failed on " 313 "test %d for %s: ret=%d\n", j, algo, 314 -ret); 315 goto out; 316 } 317 } 318 319 ahash_request_set_crypt(req, sg, result, template[i].psize); 320 if (use_digest) { 321 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 322 if (ret) { 323 pr_err("alg: hash: digest failed on test %d " 324 "for %s: ret=%d\n", j, algo, -ret); 325 goto out; 326 } 327 } else { 328 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 329 if (ret) { 330 pr_err("alt: hash: init failed on test %d " 331 "for %s: ret=%d\n", j, algo, -ret); 332 goto out; 333 } 334 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 335 if (ret) { 336 pr_err("alt: hash: update failed on test %d " 337 "for %s: ret=%d\n", j, algo, -ret); 338 goto out; 339 } 340 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 341 if (ret) { 342 pr_err("alt: hash: final failed on test %d " 343 "for %s: ret=%d\n", j, algo, -ret); 344 goto out; 345 } 346 } 347 348 if (memcmp(result, template[i].digest, 349 crypto_ahash_digestsize(tfm))) { 350 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 351 j, algo); 352 hexdump(result, crypto_ahash_digestsize(tfm)); 353 ret = -EINVAL; 354 goto out; 355 } 356 } 357 358 j = 0; 359 for (i = 0; i < tcount; i++) { 360 /* alignment tests are only done with continuous buffers */ 361 if (align_offset != 0) 362 break; 363 364 if (!template[i].np) 365 continue; 366 367 j++; 368 memset(result, 0, MAX_DIGEST_SIZE); 369 370 temp = 0; 371 sg_init_table(sg, template[i].np); 372 ret = -EINVAL; 373 for (k = 0; k < template[i].np; k++) { 374 if (WARN_ON(offset_in_page(IDX[k]) + 375 template[i].tap[k] > PAGE_SIZE)) 376 goto out; 377 sg_set_buf(&sg[k], 378 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 379 offset_in_page(IDX[k]), 380 template[i].plaintext + temp, 381 template[i].tap[k]), 382 template[i].tap[k]); 383 temp += template[i].tap[k]; 384 } 385 386 if (template[i].ksize) { 387 if (template[i].ksize > MAX_KEYLEN) { 388 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 389 j, algo, template[i].ksize, MAX_KEYLEN); 390 ret = -EINVAL; 391 goto out; 392 } 393 crypto_ahash_clear_flags(tfm, ~0); 394 memcpy(key, template[i].key, template[i].ksize); 395 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 396 397 if (ret) { 398 printk(KERN_ERR "alg: hash: setkey " 399 "failed on chunking test %d " 400 "for %s: ret=%d\n", j, algo, -ret); 401 goto out; 402 } 403 } 404 405 ahash_request_set_crypt(req, sg, result, template[i].psize); 406 ret = crypto_ahash_digest(req); 407 switch (ret) { 408 case 0: 409 break; 410 case -EINPROGRESS: 411 case -EBUSY: 412 wait_for_completion(&tresult.completion); 413 reinit_completion(&tresult.completion); 414 ret = tresult.err; 415 if (!ret) 416 break; 417 /* fall through */ 418 default: 419 printk(KERN_ERR "alg: hash: digest failed " 420 "on chunking test %d for %s: " 421 "ret=%d\n", j, algo, -ret); 422 goto out; 423 } 424 425 if (memcmp(result, template[i].digest, 426 crypto_ahash_digestsize(tfm))) { 427 printk(KERN_ERR "alg: hash: Chunking test %d " 428 "failed for %s\n", j, algo); 429 hexdump(result, crypto_ahash_digestsize(tfm)); 430 ret = -EINVAL; 431 goto out; 432 } 433 } 434 435 /* partial update exercise */ 436 j = 0; 437 for (i = 0; i < tcount; i++) { 438 /* alignment tests are only done with continuous buffers */ 439 if (align_offset != 0) 440 break; 441 442 if (template[i].np < 2) 443 continue; 444 445 j++; 446 memset(result, 0, MAX_DIGEST_SIZE); 447 448 ret = -EINVAL; 449 hash_buff = xbuf[0]; 450 memcpy(hash_buff, template[i].plaintext, 451 template[i].tap[0]); 452 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 453 454 if (template[i].ksize) { 455 crypto_ahash_clear_flags(tfm, ~0); 456 if (template[i].ksize > MAX_KEYLEN) { 457 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 458 j, algo, template[i].ksize, MAX_KEYLEN); 459 ret = -EINVAL; 460 goto out; 461 } 462 memcpy(key, template[i].key, template[i].ksize); 463 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 464 if (ret) { 465 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 466 j, algo, -ret); 467 goto out; 468 } 469 } 470 471 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 472 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 473 if (ret) { 474 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n", 475 j, algo, -ret); 476 goto out; 477 } 478 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 479 if (ret) { 480 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n", 481 j, algo, -ret); 482 goto out; 483 } 484 485 temp = template[i].tap[0]; 486 for (k = 1; k < template[i].np; k++) { 487 ret = ahash_partial_update(&req, tfm, &template[i], 488 hash_buff, k, temp, &sg[0], algo, result, 489 &tresult); 490 if (ret) { 491 pr_err("hash: partial update failed on test %d for %s: ret=%d\n", 492 j, algo, -ret); 493 goto out_noreq; 494 } 495 temp += template[i].tap[k]; 496 } 497 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 498 if (ret) { 499 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n", 500 j, algo, -ret); 501 goto out; 502 } 503 if (memcmp(result, template[i].digest, 504 crypto_ahash_digestsize(tfm))) { 505 pr_err("alg: hash: Partial Test %d failed for %s\n", 506 j, algo); 507 hexdump(result, crypto_ahash_digestsize(tfm)); 508 ret = -EINVAL; 509 goto out; 510 } 511 } 512 513 ret = 0; 514 515 out: 516 ahash_request_free(req); 517 out_noreq: 518 testmgr_free_buf(xbuf); 519 out_nobuf: 520 kfree(key); 521 kfree(result); 522 return ret; 523 } 524 525 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 526 unsigned int tcount, bool use_digest) 527 { 528 unsigned int alignmask; 529 int ret; 530 531 ret = __test_hash(tfm, template, tcount, use_digest, 0); 532 if (ret) 533 return ret; 534 535 /* test unaligned buffers, check with one byte offset */ 536 ret = __test_hash(tfm, template, tcount, use_digest, 1); 537 if (ret) 538 return ret; 539 540 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 541 if (alignmask) { 542 /* Check if alignment mask for tfm is correctly set. */ 543 ret = __test_hash(tfm, template, tcount, use_digest, 544 alignmask + 1); 545 if (ret) 546 return ret; 547 } 548 549 return 0; 550 } 551 552 static int __test_aead(struct crypto_aead *tfm, int enc, 553 struct aead_testvec *template, unsigned int tcount, 554 const bool diff_dst, const int align_offset) 555 { 556 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 557 unsigned int i, j, k, n, temp; 558 int ret = -ENOMEM; 559 char *q; 560 char *key; 561 struct aead_request *req; 562 struct scatterlist *sg; 563 struct scatterlist *sgout; 564 const char *e, *d; 565 struct tcrypt_result result; 566 unsigned int authsize, iv_len; 567 void *input; 568 void *output; 569 void *assoc; 570 char *iv; 571 char *xbuf[XBUFSIZE]; 572 char *xoutbuf[XBUFSIZE]; 573 char *axbuf[XBUFSIZE]; 574 575 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 576 if (!iv) 577 return ret; 578 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 579 if (!key) 580 goto out_noxbuf; 581 if (testmgr_alloc_buf(xbuf)) 582 goto out_noxbuf; 583 if (testmgr_alloc_buf(axbuf)) 584 goto out_noaxbuf; 585 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 586 goto out_nooutbuf; 587 588 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 589 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 590 if (!sg) 591 goto out_nosg; 592 sgout = &sg[16]; 593 594 if (diff_dst) 595 d = "-ddst"; 596 else 597 d = ""; 598 599 if (enc == ENCRYPT) 600 e = "encryption"; 601 else 602 e = "decryption"; 603 604 init_completion(&result.completion); 605 606 req = aead_request_alloc(tfm, GFP_KERNEL); 607 if (!req) { 608 pr_err("alg: aead%s: Failed to allocate request for %s\n", 609 d, algo); 610 goto out; 611 } 612 613 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 614 tcrypt_complete, &result); 615 616 iv_len = crypto_aead_ivsize(tfm); 617 618 for (i = 0, j = 0; i < tcount; i++) { 619 if (template[i].np) 620 continue; 621 622 j++; 623 624 /* some templates have no input data but they will 625 * touch input 626 */ 627 input = xbuf[0]; 628 input += align_offset; 629 assoc = axbuf[0]; 630 631 ret = -EINVAL; 632 if (WARN_ON(align_offset + template[i].ilen > 633 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 634 goto out; 635 636 memcpy(input, template[i].input, template[i].ilen); 637 memcpy(assoc, template[i].assoc, template[i].alen); 638 if (template[i].iv) 639 memcpy(iv, template[i].iv, iv_len); 640 else 641 memset(iv, 0, iv_len); 642 643 crypto_aead_clear_flags(tfm, ~0); 644 if (template[i].wk) 645 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 646 647 if (template[i].klen > MAX_KEYLEN) { 648 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 649 d, j, algo, template[i].klen, 650 MAX_KEYLEN); 651 ret = -EINVAL; 652 goto out; 653 } 654 memcpy(key, template[i].key, template[i].klen); 655 656 ret = crypto_aead_setkey(tfm, key, template[i].klen); 657 if (!ret == template[i].fail) { 658 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 659 d, j, algo, crypto_aead_get_flags(tfm)); 660 goto out; 661 } else if (ret) 662 continue; 663 664 authsize = abs(template[i].rlen - template[i].ilen); 665 ret = crypto_aead_setauthsize(tfm, authsize); 666 if (ret) { 667 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 668 d, authsize, j, algo); 669 goto out; 670 } 671 672 k = !!template[i].alen; 673 sg_init_table(sg, k + 1); 674 sg_set_buf(&sg[0], assoc, template[i].alen); 675 sg_set_buf(&sg[k], input, 676 template[i].ilen + (enc ? authsize : 0)); 677 output = input; 678 679 if (diff_dst) { 680 sg_init_table(sgout, k + 1); 681 sg_set_buf(&sgout[0], assoc, template[i].alen); 682 683 output = xoutbuf[0]; 684 output += align_offset; 685 sg_set_buf(&sgout[k], output, 686 template[i].rlen + (enc ? 0 : authsize)); 687 } 688 689 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 690 template[i].ilen, iv); 691 692 aead_request_set_ad(req, template[i].alen); 693 694 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 695 696 switch (ret) { 697 case 0: 698 if (template[i].novrfy) { 699 /* verification was supposed to fail */ 700 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 701 d, e, j, algo); 702 /* so really, we got a bad message */ 703 ret = -EBADMSG; 704 goto out; 705 } 706 break; 707 case -EINPROGRESS: 708 case -EBUSY: 709 wait_for_completion(&result.completion); 710 reinit_completion(&result.completion); 711 ret = result.err; 712 if (!ret) 713 break; 714 case -EBADMSG: 715 if (template[i].novrfy) 716 /* verification failure was expected */ 717 continue; 718 /* fall through */ 719 default: 720 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 721 d, e, j, algo, -ret); 722 goto out; 723 } 724 725 q = output; 726 if (memcmp(q, template[i].result, template[i].rlen)) { 727 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 728 d, j, e, algo); 729 hexdump(q, template[i].rlen); 730 ret = -EINVAL; 731 goto out; 732 } 733 } 734 735 for (i = 0, j = 0; i < tcount; i++) { 736 /* alignment tests are only done with continuous buffers */ 737 if (align_offset != 0) 738 break; 739 740 if (!template[i].np) 741 continue; 742 743 j++; 744 745 if (template[i].iv) 746 memcpy(iv, template[i].iv, iv_len); 747 else 748 memset(iv, 0, MAX_IVLEN); 749 750 crypto_aead_clear_flags(tfm, ~0); 751 if (template[i].wk) 752 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 753 if (template[i].klen > MAX_KEYLEN) { 754 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 755 d, j, algo, template[i].klen, MAX_KEYLEN); 756 ret = -EINVAL; 757 goto out; 758 } 759 memcpy(key, template[i].key, template[i].klen); 760 761 ret = crypto_aead_setkey(tfm, key, template[i].klen); 762 if (!ret == template[i].fail) { 763 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 764 d, j, algo, crypto_aead_get_flags(tfm)); 765 goto out; 766 } else if (ret) 767 continue; 768 769 authsize = abs(template[i].rlen - template[i].ilen); 770 771 ret = -EINVAL; 772 sg_init_table(sg, template[i].anp + template[i].np); 773 if (diff_dst) 774 sg_init_table(sgout, template[i].anp + template[i].np); 775 776 ret = -EINVAL; 777 for (k = 0, temp = 0; k < template[i].anp; k++) { 778 if (WARN_ON(offset_in_page(IDX[k]) + 779 template[i].atap[k] > PAGE_SIZE)) 780 goto out; 781 sg_set_buf(&sg[k], 782 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 783 offset_in_page(IDX[k]), 784 template[i].assoc + temp, 785 template[i].atap[k]), 786 template[i].atap[k]); 787 if (diff_dst) 788 sg_set_buf(&sgout[k], 789 axbuf[IDX[k] >> PAGE_SHIFT] + 790 offset_in_page(IDX[k]), 791 template[i].atap[k]); 792 temp += template[i].atap[k]; 793 } 794 795 for (k = 0, temp = 0; k < template[i].np; k++) { 796 if (WARN_ON(offset_in_page(IDX[k]) + 797 template[i].tap[k] > PAGE_SIZE)) 798 goto out; 799 800 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 801 memcpy(q, template[i].input + temp, template[i].tap[k]); 802 sg_set_buf(&sg[template[i].anp + k], 803 q, template[i].tap[k]); 804 805 if (diff_dst) { 806 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 807 offset_in_page(IDX[k]); 808 809 memset(q, 0, template[i].tap[k]); 810 811 sg_set_buf(&sgout[template[i].anp + k], 812 q, template[i].tap[k]); 813 } 814 815 n = template[i].tap[k]; 816 if (k == template[i].np - 1 && enc) 817 n += authsize; 818 if (offset_in_page(q) + n < PAGE_SIZE) 819 q[n] = 0; 820 821 temp += template[i].tap[k]; 822 } 823 824 ret = crypto_aead_setauthsize(tfm, authsize); 825 if (ret) { 826 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 827 d, authsize, j, algo); 828 goto out; 829 } 830 831 if (enc) { 832 if (WARN_ON(sg[template[i].anp + k - 1].offset + 833 sg[template[i].anp + k - 1].length + 834 authsize > PAGE_SIZE)) { 835 ret = -EINVAL; 836 goto out; 837 } 838 839 if (diff_dst) 840 sgout[template[i].anp + k - 1].length += 841 authsize; 842 sg[template[i].anp + k - 1].length += authsize; 843 } 844 845 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 846 template[i].ilen, 847 iv); 848 849 aead_request_set_ad(req, template[i].alen); 850 851 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 852 853 switch (ret) { 854 case 0: 855 if (template[i].novrfy) { 856 /* verification was supposed to fail */ 857 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 858 d, e, j, algo); 859 /* so really, we got a bad message */ 860 ret = -EBADMSG; 861 goto out; 862 } 863 break; 864 case -EINPROGRESS: 865 case -EBUSY: 866 wait_for_completion(&result.completion); 867 reinit_completion(&result.completion); 868 ret = result.err; 869 if (!ret) 870 break; 871 case -EBADMSG: 872 if (template[i].novrfy) 873 /* verification failure was expected */ 874 continue; 875 /* fall through */ 876 default: 877 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 878 d, e, j, algo, -ret); 879 goto out; 880 } 881 882 ret = -EINVAL; 883 for (k = 0, temp = 0; k < template[i].np; k++) { 884 if (diff_dst) 885 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 886 offset_in_page(IDX[k]); 887 else 888 q = xbuf[IDX[k] >> PAGE_SHIFT] + 889 offset_in_page(IDX[k]); 890 891 n = template[i].tap[k]; 892 if (k == template[i].np - 1) 893 n += enc ? authsize : -authsize; 894 895 if (memcmp(q, template[i].result + temp, n)) { 896 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 897 d, j, e, k, algo); 898 hexdump(q, n); 899 goto out; 900 } 901 902 q += n; 903 if (k == template[i].np - 1 && !enc) { 904 if (!diff_dst && 905 memcmp(q, template[i].input + 906 temp + n, authsize)) 907 n = authsize; 908 else 909 n = 0; 910 } else { 911 for (n = 0; offset_in_page(q + n) && q[n]; n++) 912 ; 913 } 914 if (n) { 915 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 916 d, j, e, k, algo, n); 917 hexdump(q, n); 918 goto out; 919 } 920 921 temp += template[i].tap[k]; 922 } 923 } 924 925 ret = 0; 926 927 out: 928 aead_request_free(req); 929 kfree(sg); 930 out_nosg: 931 if (diff_dst) 932 testmgr_free_buf(xoutbuf); 933 out_nooutbuf: 934 testmgr_free_buf(axbuf); 935 out_noaxbuf: 936 testmgr_free_buf(xbuf); 937 out_noxbuf: 938 kfree(key); 939 kfree(iv); 940 return ret; 941 } 942 943 static int test_aead(struct crypto_aead *tfm, int enc, 944 struct aead_testvec *template, unsigned int tcount) 945 { 946 unsigned int alignmask; 947 int ret; 948 949 /* test 'dst == src' case */ 950 ret = __test_aead(tfm, enc, template, tcount, false, 0); 951 if (ret) 952 return ret; 953 954 /* test 'dst != src' case */ 955 ret = __test_aead(tfm, enc, template, tcount, true, 0); 956 if (ret) 957 return ret; 958 959 /* test unaligned buffers, check with one byte offset */ 960 ret = __test_aead(tfm, enc, template, tcount, true, 1); 961 if (ret) 962 return ret; 963 964 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 965 if (alignmask) { 966 /* Check if alignment mask for tfm is correctly set. */ 967 ret = __test_aead(tfm, enc, template, tcount, true, 968 alignmask + 1); 969 if (ret) 970 return ret; 971 } 972 973 return 0; 974 } 975 976 static int test_cipher(struct crypto_cipher *tfm, int enc, 977 struct cipher_testvec *template, unsigned int tcount) 978 { 979 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 980 unsigned int i, j, k; 981 char *q; 982 const char *e; 983 void *data; 984 char *xbuf[XBUFSIZE]; 985 int ret = -ENOMEM; 986 987 if (testmgr_alloc_buf(xbuf)) 988 goto out_nobuf; 989 990 if (enc == ENCRYPT) 991 e = "encryption"; 992 else 993 e = "decryption"; 994 995 j = 0; 996 for (i = 0; i < tcount; i++) { 997 if (template[i].np) 998 continue; 999 1000 j++; 1001 1002 ret = -EINVAL; 1003 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 1004 goto out; 1005 1006 data = xbuf[0]; 1007 memcpy(data, template[i].input, template[i].ilen); 1008 1009 crypto_cipher_clear_flags(tfm, ~0); 1010 if (template[i].wk) 1011 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1012 1013 ret = crypto_cipher_setkey(tfm, template[i].key, 1014 template[i].klen); 1015 if (!ret == template[i].fail) { 1016 printk(KERN_ERR "alg: cipher: setkey failed " 1017 "on test %d for %s: flags=%x\n", j, 1018 algo, crypto_cipher_get_flags(tfm)); 1019 goto out; 1020 } else if (ret) 1021 continue; 1022 1023 for (k = 0; k < template[i].ilen; 1024 k += crypto_cipher_blocksize(tfm)) { 1025 if (enc) 1026 crypto_cipher_encrypt_one(tfm, data + k, 1027 data + k); 1028 else 1029 crypto_cipher_decrypt_one(tfm, data + k, 1030 data + k); 1031 } 1032 1033 q = data; 1034 if (memcmp(q, template[i].result, template[i].rlen)) { 1035 printk(KERN_ERR "alg: cipher: Test %d failed " 1036 "on %s for %s\n", j, e, algo); 1037 hexdump(q, template[i].rlen); 1038 ret = -EINVAL; 1039 goto out; 1040 } 1041 } 1042 1043 ret = 0; 1044 1045 out: 1046 testmgr_free_buf(xbuf); 1047 out_nobuf: 1048 return ret; 1049 } 1050 1051 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 1052 struct cipher_testvec *template, unsigned int tcount, 1053 const bool diff_dst, const int align_offset) 1054 { 1055 const char *algo = 1056 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 1057 unsigned int i, j, k, n, temp; 1058 char *q; 1059 struct skcipher_request *req; 1060 struct scatterlist sg[8]; 1061 struct scatterlist sgout[8]; 1062 const char *e, *d; 1063 struct tcrypt_result result; 1064 void *data; 1065 char iv[MAX_IVLEN]; 1066 char *xbuf[XBUFSIZE]; 1067 char *xoutbuf[XBUFSIZE]; 1068 int ret = -ENOMEM; 1069 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1070 1071 if (testmgr_alloc_buf(xbuf)) 1072 goto out_nobuf; 1073 1074 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1075 goto out_nooutbuf; 1076 1077 if (diff_dst) 1078 d = "-ddst"; 1079 else 1080 d = ""; 1081 1082 if (enc == ENCRYPT) 1083 e = "encryption"; 1084 else 1085 e = "decryption"; 1086 1087 init_completion(&result.completion); 1088 1089 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1090 if (!req) { 1091 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 1092 d, algo); 1093 goto out; 1094 } 1095 1096 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1097 tcrypt_complete, &result); 1098 1099 j = 0; 1100 for (i = 0; i < tcount; i++) { 1101 if (template[i].np && !template[i].also_non_np) 1102 continue; 1103 1104 if (template[i].iv) 1105 memcpy(iv, template[i].iv, ivsize); 1106 else 1107 memset(iv, 0, MAX_IVLEN); 1108 1109 j++; 1110 ret = -EINVAL; 1111 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 1112 goto out; 1113 1114 data = xbuf[0]; 1115 data += align_offset; 1116 memcpy(data, template[i].input, template[i].ilen); 1117 1118 crypto_skcipher_clear_flags(tfm, ~0); 1119 if (template[i].wk) 1120 crypto_skcipher_set_flags(tfm, 1121 CRYPTO_TFM_REQ_WEAK_KEY); 1122 1123 ret = crypto_skcipher_setkey(tfm, template[i].key, 1124 template[i].klen); 1125 if (!ret == template[i].fail) { 1126 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1127 d, j, algo, crypto_skcipher_get_flags(tfm)); 1128 goto out; 1129 } else if (ret) 1130 continue; 1131 1132 sg_init_one(&sg[0], data, template[i].ilen); 1133 if (diff_dst) { 1134 data = xoutbuf[0]; 1135 data += align_offset; 1136 sg_init_one(&sgout[0], data, template[i].ilen); 1137 } 1138 1139 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1140 template[i].ilen, iv); 1141 ret = enc ? crypto_skcipher_encrypt(req) : 1142 crypto_skcipher_decrypt(req); 1143 1144 switch (ret) { 1145 case 0: 1146 break; 1147 case -EINPROGRESS: 1148 case -EBUSY: 1149 wait_for_completion(&result.completion); 1150 reinit_completion(&result.completion); 1151 ret = result.err; 1152 if (!ret) 1153 break; 1154 /* fall through */ 1155 default: 1156 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1157 d, e, j, algo, -ret); 1158 goto out; 1159 } 1160 1161 q = data; 1162 if (memcmp(q, template[i].result, template[i].rlen)) { 1163 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1164 d, j, e, algo); 1165 hexdump(q, template[i].rlen); 1166 ret = -EINVAL; 1167 goto out; 1168 } 1169 1170 if (template[i].iv_out && 1171 memcmp(iv, template[i].iv_out, 1172 crypto_skcipher_ivsize(tfm))) { 1173 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1174 d, j, e, algo); 1175 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1176 ret = -EINVAL; 1177 goto out; 1178 } 1179 } 1180 1181 j = 0; 1182 for (i = 0; i < tcount; i++) { 1183 /* alignment tests are only done with continuous buffers */ 1184 if (align_offset != 0) 1185 break; 1186 1187 if (!template[i].np) 1188 continue; 1189 1190 if (template[i].iv) 1191 memcpy(iv, template[i].iv, ivsize); 1192 else 1193 memset(iv, 0, MAX_IVLEN); 1194 1195 j++; 1196 crypto_skcipher_clear_flags(tfm, ~0); 1197 if (template[i].wk) 1198 crypto_skcipher_set_flags(tfm, 1199 CRYPTO_TFM_REQ_WEAK_KEY); 1200 1201 ret = crypto_skcipher_setkey(tfm, template[i].key, 1202 template[i].klen); 1203 if (!ret == template[i].fail) { 1204 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1205 d, j, algo, crypto_skcipher_get_flags(tfm)); 1206 goto out; 1207 } else if (ret) 1208 continue; 1209 1210 temp = 0; 1211 ret = -EINVAL; 1212 sg_init_table(sg, template[i].np); 1213 if (diff_dst) 1214 sg_init_table(sgout, template[i].np); 1215 for (k = 0; k < template[i].np; k++) { 1216 if (WARN_ON(offset_in_page(IDX[k]) + 1217 template[i].tap[k] > PAGE_SIZE)) 1218 goto out; 1219 1220 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1221 1222 memcpy(q, template[i].input + temp, template[i].tap[k]); 1223 1224 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1225 q[template[i].tap[k]] = 0; 1226 1227 sg_set_buf(&sg[k], q, template[i].tap[k]); 1228 if (diff_dst) { 1229 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1230 offset_in_page(IDX[k]); 1231 1232 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1233 1234 memset(q, 0, template[i].tap[k]); 1235 if (offset_in_page(q) + 1236 template[i].tap[k] < PAGE_SIZE) 1237 q[template[i].tap[k]] = 0; 1238 } 1239 1240 temp += template[i].tap[k]; 1241 } 1242 1243 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1244 template[i].ilen, iv); 1245 1246 ret = enc ? crypto_skcipher_encrypt(req) : 1247 crypto_skcipher_decrypt(req); 1248 1249 switch (ret) { 1250 case 0: 1251 break; 1252 case -EINPROGRESS: 1253 case -EBUSY: 1254 wait_for_completion(&result.completion); 1255 reinit_completion(&result.completion); 1256 ret = result.err; 1257 if (!ret) 1258 break; 1259 /* fall through */ 1260 default: 1261 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1262 d, e, j, algo, -ret); 1263 goto out; 1264 } 1265 1266 temp = 0; 1267 ret = -EINVAL; 1268 for (k = 0; k < template[i].np; k++) { 1269 if (diff_dst) 1270 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1271 offset_in_page(IDX[k]); 1272 else 1273 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1274 offset_in_page(IDX[k]); 1275 1276 if (memcmp(q, template[i].result + temp, 1277 template[i].tap[k])) { 1278 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1279 d, j, e, k, algo); 1280 hexdump(q, template[i].tap[k]); 1281 goto out; 1282 } 1283 1284 q += template[i].tap[k]; 1285 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1286 ; 1287 if (n) { 1288 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1289 d, j, e, k, algo, n); 1290 hexdump(q, n); 1291 goto out; 1292 } 1293 temp += template[i].tap[k]; 1294 } 1295 } 1296 1297 ret = 0; 1298 1299 out: 1300 skcipher_request_free(req); 1301 if (diff_dst) 1302 testmgr_free_buf(xoutbuf); 1303 out_nooutbuf: 1304 testmgr_free_buf(xbuf); 1305 out_nobuf: 1306 return ret; 1307 } 1308 1309 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1310 struct cipher_testvec *template, unsigned int tcount) 1311 { 1312 unsigned int alignmask; 1313 int ret; 1314 1315 /* test 'dst == src' case */ 1316 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1317 if (ret) 1318 return ret; 1319 1320 /* test 'dst != src' case */ 1321 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1322 if (ret) 1323 return ret; 1324 1325 /* test unaligned buffers, check with one byte offset */ 1326 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1327 if (ret) 1328 return ret; 1329 1330 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1331 if (alignmask) { 1332 /* Check if alignment mask for tfm is correctly set. */ 1333 ret = __test_skcipher(tfm, enc, template, tcount, true, 1334 alignmask + 1); 1335 if (ret) 1336 return ret; 1337 } 1338 1339 return 0; 1340 } 1341 1342 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1343 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1344 { 1345 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1346 unsigned int i; 1347 char result[COMP_BUF_SIZE]; 1348 int ret; 1349 1350 for (i = 0; i < ctcount; i++) { 1351 int ilen; 1352 unsigned int dlen = COMP_BUF_SIZE; 1353 1354 memset(result, 0, sizeof (result)); 1355 1356 ilen = ctemplate[i].inlen; 1357 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1358 ilen, result, &dlen); 1359 if (ret) { 1360 printk(KERN_ERR "alg: comp: compression failed " 1361 "on test %d for %s: ret=%d\n", i + 1, algo, 1362 -ret); 1363 goto out; 1364 } 1365 1366 if (dlen != ctemplate[i].outlen) { 1367 printk(KERN_ERR "alg: comp: Compression test %d " 1368 "failed for %s: output len = %d\n", i + 1, algo, 1369 dlen); 1370 ret = -EINVAL; 1371 goto out; 1372 } 1373 1374 if (memcmp(result, ctemplate[i].output, dlen)) { 1375 printk(KERN_ERR "alg: comp: Compression test %d " 1376 "failed for %s\n", i + 1, algo); 1377 hexdump(result, dlen); 1378 ret = -EINVAL; 1379 goto out; 1380 } 1381 } 1382 1383 for (i = 0; i < dtcount; i++) { 1384 int ilen; 1385 unsigned int dlen = COMP_BUF_SIZE; 1386 1387 memset(result, 0, sizeof (result)); 1388 1389 ilen = dtemplate[i].inlen; 1390 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1391 ilen, result, &dlen); 1392 if (ret) { 1393 printk(KERN_ERR "alg: comp: decompression failed " 1394 "on test %d for %s: ret=%d\n", i + 1, algo, 1395 -ret); 1396 goto out; 1397 } 1398 1399 if (dlen != dtemplate[i].outlen) { 1400 printk(KERN_ERR "alg: comp: Decompression test %d " 1401 "failed for %s: output len = %d\n", i + 1, algo, 1402 dlen); 1403 ret = -EINVAL; 1404 goto out; 1405 } 1406 1407 if (memcmp(result, dtemplate[i].output, dlen)) { 1408 printk(KERN_ERR "alg: comp: Decompression test %d " 1409 "failed for %s\n", i + 1, algo); 1410 hexdump(result, dlen); 1411 ret = -EINVAL; 1412 goto out; 1413 } 1414 } 1415 1416 ret = 0; 1417 1418 out: 1419 return ret; 1420 } 1421 1422 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1423 unsigned int tcount) 1424 { 1425 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1426 int err = 0, i, j, seedsize; 1427 u8 *seed; 1428 char result[32]; 1429 1430 seedsize = crypto_rng_seedsize(tfm); 1431 1432 seed = kmalloc(seedsize, GFP_KERNEL); 1433 if (!seed) { 1434 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1435 "for %s\n", algo); 1436 return -ENOMEM; 1437 } 1438 1439 for (i = 0; i < tcount; i++) { 1440 memset(result, 0, 32); 1441 1442 memcpy(seed, template[i].v, template[i].vlen); 1443 memcpy(seed + template[i].vlen, template[i].key, 1444 template[i].klen); 1445 memcpy(seed + template[i].vlen + template[i].klen, 1446 template[i].dt, template[i].dtlen); 1447 1448 err = crypto_rng_reset(tfm, seed, seedsize); 1449 if (err) { 1450 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1451 "for %s\n", algo); 1452 goto out; 1453 } 1454 1455 for (j = 0; j < template[i].loops; j++) { 1456 err = crypto_rng_get_bytes(tfm, result, 1457 template[i].rlen); 1458 if (err < 0) { 1459 printk(KERN_ERR "alg: cprng: Failed to obtain " 1460 "the correct amount of random data for " 1461 "%s (requested %d)\n", algo, 1462 template[i].rlen); 1463 goto out; 1464 } 1465 } 1466 1467 err = memcmp(result, template[i].result, 1468 template[i].rlen); 1469 if (err) { 1470 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1471 i, algo); 1472 hexdump(result, template[i].rlen); 1473 err = -EINVAL; 1474 goto out; 1475 } 1476 } 1477 1478 out: 1479 kfree(seed); 1480 return err; 1481 } 1482 1483 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1484 u32 type, u32 mask) 1485 { 1486 struct crypto_aead *tfm; 1487 int err = 0; 1488 1489 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask); 1490 if (IS_ERR(tfm)) { 1491 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1492 "%ld\n", driver, PTR_ERR(tfm)); 1493 return PTR_ERR(tfm); 1494 } 1495 1496 if (desc->suite.aead.enc.vecs) { 1497 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1498 desc->suite.aead.enc.count); 1499 if (err) 1500 goto out; 1501 } 1502 1503 if (!err && desc->suite.aead.dec.vecs) 1504 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1505 desc->suite.aead.dec.count); 1506 1507 out: 1508 crypto_free_aead(tfm); 1509 return err; 1510 } 1511 1512 static int alg_test_cipher(const struct alg_test_desc *desc, 1513 const char *driver, u32 type, u32 mask) 1514 { 1515 struct crypto_cipher *tfm; 1516 int err = 0; 1517 1518 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1519 if (IS_ERR(tfm)) { 1520 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1521 "%s: %ld\n", driver, PTR_ERR(tfm)); 1522 return PTR_ERR(tfm); 1523 } 1524 1525 if (desc->suite.cipher.enc.vecs) { 1526 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1527 desc->suite.cipher.enc.count); 1528 if (err) 1529 goto out; 1530 } 1531 1532 if (desc->suite.cipher.dec.vecs) 1533 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1534 desc->suite.cipher.dec.count); 1535 1536 out: 1537 crypto_free_cipher(tfm); 1538 return err; 1539 } 1540 1541 static int alg_test_skcipher(const struct alg_test_desc *desc, 1542 const char *driver, u32 type, u32 mask) 1543 { 1544 struct crypto_skcipher *tfm; 1545 int err = 0; 1546 1547 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1548 if (IS_ERR(tfm)) { 1549 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1550 "%s: %ld\n", driver, PTR_ERR(tfm)); 1551 return PTR_ERR(tfm); 1552 } 1553 1554 if (desc->suite.cipher.enc.vecs) { 1555 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1556 desc->suite.cipher.enc.count); 1557 if (err) 1558 goto out; 1559 } 1560 1561 if (desc->suite.cipher.dec.vecs) 1562 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1563 desc->suite.cipher.dec.count); 1564 1565 out: 1566 crypto_free_skcipher(tfm); 1567 return err; 1568 } 1569 1570 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1571 u32 type, u32 mask) 1572 { 1573 struct crypto_comp *tfm; 1574 int err; 1575 1576 tfm = crypto_alloc_comp(driver, type, mask); 1577 if (IS_ERR(tfm)) { 1578 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1579 "%ld\n", driver, PTR_ERR(tfm)); 1580 return PTR_ERR(tfm); 1581 } 1582 1583 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1584 desc->suite.comp.decomp.vecs, 1585 desc->suite.comp.comp.count, 1586 desc->suite.comp.decomp.count); 1587 1588 crypto_free_comp(tfm); 1589 return err; 1590 } 1591 1592 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1593 u32 type, u32 mask) 1594 { 1595 struct crypto_ahash *tfm; 1596 int err; 1597 1598 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1599 if (IS_ERR(tfm)) { 1600 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1601 "%ld\n", driver, PTR_ERR(tfm)); 1602 return PTR_ERR(tfm); 1603 } 1604 1605 err = test_hash(tfm, desc->suite.hash.vecs, 1606 desc->suite.hash.count, true); 1607 if (!err) 1608 err = test_hash(tfm, desc->suite.hash.vecs, 1609 desc->suite.hash.count, false); 1610 1611 crypto_free_ahash(tfm); 1612 return err; 1613 } 1614 1615 static int alg_test_crc32c(const struct alg_test_desc *desc, 1616 const char *driver, u32 type, u32 mask) 1617 { 1618 struct crypto_shash *tfm; 1619 u32 val; 1620 int err; 1621 1622 err = alg_test_hash(desc, driver, type, mask); 1623 if (err) 1624 goto out; 1625 1626 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask); 1627 if (IS_ERR(tfm)) { 1628 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1629 "%ld\n", driver, PTR_ERR(tfm)); 1630 err = PTR_ERR(tfm); 1631 goto out; 1632 } 1633 1634 do { 1635 SHASH_DESC_ON_STACK(shash, tfm); 1636 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1637 1638 shash->tfm = tfm; 1639 shash->flags = 0; 1640 1641 *ctx = le32_to_cpu(420553207); 1642 err = crypto_shash_final(shash, (u8 *)&val); 1643 if (err) { 1644 printk(KERN_ERR "alg: crc32c: Operation failed for " 1645 "%s: %d\n", driver, err); 1646 break; 1647 } 1648 1649 if (val != ~420553207) { 1650 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1651 "%d\n", driver, val); 1652 err = -EINVAL; 1653 } 1654 } while (0); 1655 1656 crypto_free_shash(tfm); 1657 1658 out: 1659 return err; 1660 } 1661 1662 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1663 u32 type, u32 mask) 1664 { 1665 struct crypto_rng *rng; 1666 int err; 1667 1668 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1669 if (IS_ERR(rng)) { 1670 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1671 "%ld\n", driver, PTR_ERR(rng)); 1672 return PTR_ERR(rng); 1673 } 1674 1675 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1676 1677 crypto_free_rng(rng); 1678 1679 return err; 1680 } 1681 1682 1683 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1684 const char *driver, u32 type, u32 mask) 1685 { 1686 int ret = -EAGAIN; 1687 struct crypto_rng *drng; 1688 struct drbg_test_data test_data; 1689 struct drbg_string addtl, pers, testentropy; 1690 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1691 1692 if (!buf) 1693 return -ENOMEM; 1694 1695 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask); 1696 if (IS_ERR(drng)) { 1697 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1698 "%s\n", driver); 1699 kzfree(buf); 1700 return -ENOMEM; 1701 } 1702 1703 test_data.testentropy = &testentropy; 1704 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1705 drbg_string_fill(&pers, test->pers, test->perslen); 1706 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1707 if (ret) { 1708 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1709 goto outbuf; 1710 } 1711 1712 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1713 if (pr) { 1714 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1715 ret = crypto_drbg_get_bytes_addtl_test(drng, 1716 buf, test->expectedlen, &addtl, &test_data); 1717 } else { 1718 ret = crypto_drbg_get_bytes_addtl(drng, 1719 buf, test->expectedlen, &addtl); 1720 } 1721 if (ret < 0) { 1722 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1723 "driver %s\n", driver); 1724 goto outbuf; 1725 } 1726 1727 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1728 if (pr) { 1729 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1730 ret = crypto_drbg_get_bytes_addtl_test(drng, 1731 buf, test->expectedlen, &addtl, &test_data); 1732 } else { 1733 ret = crypto_drbg_get_bytes_addtl(drng, 1734 buf, test->expectedlen, &addtl); 1735 } 1736 if (ret < 0) { 1737 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1738 "driver %s\n", driver); 1739 goto outbuf; 1740 } 1741 1742 ret = memcmp(test->expected, buf, test->expectedlen); 1743 1744 outbuf: 1745 crypto_free_rng(drng); 1746 kzfree(buf); 1747 return ret; 1748 } 1749 1750 1751 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1752 u32 type, u32 mask) 1753 { 1754 int err = 0; 1755 int pr = 0; 1756 int i = 0; 1757 struct drbg_testvec *template = desc->suite.drbg.vecs; 1758 unsigned int tcount = desc->suite.drbg.count; 1759 1760 if (0 == memcmp(driver, "drbg_pr_", 8)) 1761 pr = 1; 1762 1763 for (i = 0; i < tcount; i++) { 1764 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1765 if (err) { 1766 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1767 i, driver); 1768 err = -EINVAL; 1769 break; 1770 } 1771 } 1772 return err; 1773 1774 } 1775 1776 static int do_test_rsa(struct crypto_akcipher *tfm, 1777 struct akcipher_testvec *vecs) 1778 { 1779 struct akcipher_request *req; 1780 void *outbuf_enc = NULL; 1781 void *outbuf_dec = NULL; 1782 struct tcrypt_result result; 1783 unsigned int out_len_max, out_len = 0; 1784 int err = -ENOMEM; 1785 struct scatterlist src, dst, src_tab[2]; 1786 1787 req = akcipher_request_alloc(tfm, GFP_KERNEL); 1788 if (!req) 1789 return err; 1790 1791 init_completion(&result.completion); 1792 1793 if (vecs->public_key_vec) 1794 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 1795 vecs->key_len); 1796 else 1797 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 1798 vecs->key_len); 1799 if (err) 1800 goto free_req; 1801 1802 out_len_max = crypto_akcipher_maxsize(tfm); 1803 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 1804 if (!outbuf_enc) 1805 goto free_req; 1806 1807 sg_init_table(src_tab, 2); 1808 sg_set_buf(&src_tab[0], vecs->m, 8); 1809 sg_set_buf(&src_tab[1], vecs->m + 8, vecs->m_size - 8); 1810 sg_init_one(&dst, outbuf_enc, out_len_max); 1811 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 1812 out_len_max); 1813 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1814 tcrypt_complete, &result); 1815 1816 /* Run RSA encrypt - c = m^e mod n;*/ 1817 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 1818 if (err) { 1819 pr_err("alg: rsa: encrypt test failed. err %d\n", err); 1820 goto free_all; 1821 } 1822 if (req->dst_len != vecs->c_size) { 1823 pr_err("alg: rsa: encrypt test failed. Invalid output len\n"); 1824 err = -EINVAL; 1825 goto free_all; 1826 } 1827 /* verify that encrypted message is equal to expected */ 1828 if (memcmp(vecs->c, sg_virt(req->dst), vecs->c_size)) { 1829 pr_err("alg: rsa: encrypt test failed. Invalid output\n"); 1830 err = -EINVAL; 1831 goto free_all; 1832 } 1833 /* Don't invoke decrypt for vectors with public key */ 1834 if (vecs->public_key_vec) { 1835 err = 0; 1836 goto free_all; 1837 } 1838 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 1839 if (!outbuf_dec) { 1840 err = -ENOMEM; 1841 goto free_all; 1842 } 1843 sg_init_one(&src, vecs->c, vecs->c_size); 1844 sg_init_one(&dst, outbuf_dec, out_len_max); 1845 init_completion(&result.completion); 1846 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 1847 1848 /* Run RSA decrypt - m = c^d mod n;*/ 1849 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 1850 if (err) { 1851 pr_err("alg: rsa: decrypt test failed. err %d\n", err); 1852 goto free_all; 1853 } 1854 out_len = req->dst_len; 1855 if (out_len != vecs->m_size) { 1856 pr_err("alg: rsa: decrypt test failed. Invalid output len\n"); 1857 err = -EINVAL; 1858 goto free_all; 1859 } 1860 /* verify that decrypted message is equal to the original msg */ 1861 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) { 1862 pr_err("alg: rsa: decrypt test failed. Invalid output\n"); 1863 err = -EINVAL; 1864 } 1865 free_all: 1866 kfree(outbuf_dec); 1867 kfree(outbuf_enc); 1868 free_req: 1869 akcipher_request_free(req); 1870 return err; 1871 } 1872 1873 static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs, 1874 unsigned int tcount) 1875 { 1876 int ret, i; 1877 1878 for (i = 0; i < tcount; i++) { 1879 ret = do_test_rsa(tfm, vecs++); 1880 if (ret) { 1881 pr_err("alg: rsa: test failed on vector %d, err=%d\n", 1882 i + 1, ret); 1883 return ret; 1884 } 1885 } 1886 return 0; 1887 } 1888 1889 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 1890 struct akcipher_testvec *vecs, unsigned int tcount) 1891 { 1892 if (strncmp(alg, "rsa", 3) == 0) 1893 return test_rsa(tfm, vecs, tcount); 1894 1895 return 0; 1896 } 1897 1898 static int alg_test_akcipher(const struct alg_test_desc *desc, 1899 const char *driver, u32 type, u32 mask) 1900 { 1901 struct crypto_akcipher *tfm; 1902 int err = 0; 1903 1904 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask); 1905 if (IS_ERR(tfm)) { 1906 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 1907 driver, PTR_ERR(tfm)); 1908 return PTR_ERR(tfm); 1909 } 1910 if (desc->suite.akcipher.vecs) 1911 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 1912 desc->suite.akcipher.count); 1913 1914 crypto_free_akcipher(tfm); 1915 return err; 1916 } 1917 1918 static int alg_test_null(const struct alg_test_desc *desc, 1919 const char *driver, u32 type, u32 mask) 1920 { 1921 return 0; 1922 } 1923 1924 /* Please keep this list sorted by algorithm name. */ 1925 static const struct alg_test_desc alg_test_descs[] = { 1926 { 1927 .alg = "__cbc-cast5-avx", 1928 .test = alg_test_null, 1929 }, { 1930 .alg = "__cbc-cast6-avx", 1931 .test = alg_test_null, 1932 }, { 1933 .alg = "__cbc-serpent-avx", 1934 .test = alg_test_null, 1935 }, { 1936 .alg = "__cbc-serpent-avx2", 1937 .test = alg_test_null, 1938 }, { 1939 .alg = "__cbc-serpent-sse2", 1940 .test = alg_test_null, 1941 }, { 1942 .alg = "__cbc-twofish-avx", 1943 .test = alg_test_null, 1944 }, { 1945 .alg = "__driver-cbc-aes-aesni", 1946 .test = alg_test_null, 1947 .fips_allowed = 1, 1948 }, { 1949 .alg = "__driver-cbc-camellia-aesni", 1950 .test = alg_test_null, 1951 }, { 1952 .alg = "__driver-cbc-camellia-aesni-avx2", 1953 .test = alg_test_null, 1954 }, { 1955 .alg = "__driver-cbc-cast5-avx", 1956 .test = alg_test_null, 1957 }, { 1958 .alg = "__driver-cbc-cast6-avx", 1959 .test = alg_test_null, 1960 }, { 1961 .alg = "__driver-cbc-serpent-avx", 1962 .test = alg_test_null, 1963 }, { 1964 .alg = "__driver-cbc-serpent-avx2", 1965 .test = alg_test_null, 1966 }, { 1967 .alg = "__driver-cbc-serpent-sse2", 1968 .test = alg_test_null, 1969 }, { 1970 .alg = "__driver-cbc-twofish-avx", 1971 .test = alg_test_null, 1972 }, { 1973 .alg = "__driver-ecb-aes-aesni", 1974 .test = alg_test_null, 1975 .fips_allowed = 1, 1976 }, { 1977 .alg = "__driver-ecb-camellia-aesni", 1978 .test = alg_test_null, 1979 }, { 1980 .alg = "__driver-ecb-camellia-aesni-avx2", 1981 .test = alg_test_null, 1982 }, { 1983 .alg = "__driver-ecb-cast5-avx", 1984 .test = alg_test_null, 1985 }, { 1986 .alg = "__driver-ecb-cast6-avx", 1987 .test = alg_test_null, 1988 }, { 1989 .alg = "__driver-ecb-serpent-avx", 1990 .test = alg_test_null, 1991 }, { 1992 .alg = "__driver-ecb-serpent-avx2", 1993 .test = alg_test_null, 1994 }, { 1995 .alg = "__driver-ecb-serpent-sse2", 1996 .test = alg_test_null, 1997 }, { 1998 .alg = "__driver-ecb-twofish-avx", 1999 .test = alg_test_null, 2000 }, { 2001 .alg = "__driver-gcm-aes-aesni", 2002 .test = alg_test_null, 2003 .fips_allowed = 1, 2004 }, { 2005 .alg = "__ghash-pclmulqdqni", 2006 .test = alg_test_null, 2007 .fips_allowed = 1, 2008 }, { 2009 .alg = "ansi_cprng", 2010 .test = alg_test_cprng, 2011 .suite = { 2012 .cprng = { 2013 .vecs = ansi_cprng_aes_tv_template, 2014 .count = ANSI_CPRNG_AES_TEST_VECTORS 2015 } 2016 } 2017 }, { 2018 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2019 .test = alg_test_aead, 2020 .suite = { 2021 .aead = { 2022 .enc = { 2023 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2024 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2025 }, 2026 .dec = { 2027 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2028 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2029 } 2030 } 2031 } 2032 }, { 2033 .alg = "authenc(hmac(sha1),cbc(aes))", 2034 .test = alg_test_aead, 2035 .suite = { 2036 .aead = { 2037 .enc = { 2038 .vecs = 2039 hmac_sha1_aes_cbc_enc_tv_temp, 2040 .count = 2041 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2042 } 2043 } 2044 } 2045 }, { 2046 .alg = "authenc(hmac(sha1),cbc(des))", 2047 .test = alg_test_aead, 2048 .suite = { 2049 .aead = { 2050 .enc = { 2051 .vecs = 2052 hmac_sha1_des_cbc_enc_tv_temp, 2053 .count = 2054 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2055 } 2056 } 2057 } 2058 }, { 2059 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2060 .test = alg_test_aead, 2061 .fips_allowed = 1, 2062 .suite = { 2063 .aead = { 2064 .enc = { 2065 .vecs = 2066 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2067 .count = 2068 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2069 } 2070 } 2071 } 2072 }, { 2073 .alg = "authenc(hmac(sha1),ctr(aes))", 2074 .test = alg_test_null, 2075 .fips_allowed = 1, 2076 }, { 2077 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2078 .test = alg_test_aead, 2079 .suite = { 2080 .aead = { 2081 .enc = { 2082 .vecs = 2083 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2084 .count = 2085 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2086 }, 2087 .dec = { 2088 .vecs = 2089 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2090 .count = 2091 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2092 } 2093 } 2094 } 2095 }, { 2096 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 2097 .test = alg_test_null, 2098 .fips_allowed = 1, 2099 }, { 2100 .alg = "authenc(hmac(sha224),cbc(des))", 2101 .test = alg_test_aead, 2102 .suite = { 2103 .aead = { 2104 .enc = { 2105 .vecs = 2106 hmac_sha224_des_cbc_enc_tv_temp, 2107 .count = 2108 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2109 } 2110 } 2111 } 2112 }, { 2113 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2114 .test = alg_test_aead, 2115 .fips_allowed = 1, 2116 .suite = { 2117 .aead = { 2118 .enc = { 2119 .vecs = 2120 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2121 .count = 2122 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2123 } 2124 } 2125 } 2126 }, { 2127 .alg = "authenc(hmac(sha256),cbc(aes))", 2128 .test = alg_test_aead, 2129 .fips_allowed = 1, 2130 .suite = { 2131 .aead = { 2132 .enc = { 2133 .vecs = 2134 hmac_sha256_aes_cbc_enc_tv_temp, 2135 .count = 2136 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2137 } 2138 } 2139 } 2140 }, { 2141 .alg = "authenc(hmac(sha256),cbc(des))", 2142 .test = alg_test_aead, 2143 .suite = { 2144 .aead = { 2145 .enc = { 2146 .vecs = 2147 hmac_sha256_des_cbc_enc_tv_temp, 2148 .count = 2149 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2150 } 2151 } 2152 } 2153 }, { 2154 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2155 .test = alg_test_aead, 2156 .fips_allowed = 1, 2157 .suite = { 2158 .aead = { 2159 .enc = { 2160 .vecs = 2161 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2162 .count = 2163 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2164 } 2165 } 2166 } 2167 }, { 2168 .alg = "authenc(hmac(sha256),ctr(aes))", 2169 .test = alg_test_null, 2170 .fips_allowed = 1, 2171 }, { 2172 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 2173 .test = alg_test_null, 2174 .fips_allowed = 1, 2175 }, { 2176 .alg = "authenc(hmac(sha384),cbc(des))", 2177 .test = alg_test_aead, 2178 .suite = { 2179 .aead = { 2180 .enc = { 2181 .vecs = 2182 hmac_sha384_des_cbc_enc_tv_temp, 2183 .count = 2184 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2185 } 2186 } 2187 } 2188 }, { 2189 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2190 .test = alg_test_aead, 2191 .fips_allowed = 1, 2192 .suite = { 2193 .aead = { 2194 .enc = { 2195 .vecs = 2196 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2197 .count = 2198 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2199 } 2200 } 2201 } 2202 }, { 2203 .alg = "authenc(hmac(sha384),ctr(aes))", 2204 .test = alg_test_null, 2205 .fips_allowed = 1, 2206 }, { 2207 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 2208 .test = alg_test_null, 2209 .fips_allowed = 1, 2210 }, { 2211 .alg = "authenc(hmac(sha512),cbc(aes))", 2212 .fips_allowed = 1, 2213 .test = alg_test_aead, 2214 .suite = { 2215 .aead = { 2216 .enc = { 2217 .vecs = 2218 hmac_sha512_aes_cbc_enc_tv_temp, 2219 .count = 2220 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2221 } 2222 } 2223 } 2224 }, { 2225 .alg = "authenc(hmac(sha512),cbc(des))", 2226 .test = alg_test_aead, 2227 .suite = { 2228 .aead = { 2229 .enc = { 2230 .vecs = 2231 hmac_sha512_des_cbc_enc_tv_temp, 2232 .count = 2233 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2234 } 2235 } 2236 } 2237 }, { 2238 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2239 .test = alg_test_aead, 2240 .fips_allowed = 1, 2241 .suite = { 2242 .aead = { 2243 .enc = { 2244 .vecs = 2245 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2246 .count = 2247 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2248 } 2249 } 2250 } 2251 }, { 2252 .alg = "authenc(hmac(sha512),ctr(aes))", 2253 .test = alg_test_null, 2254 .fips_allowed = 1, 2255 }, { 2256 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 2257 .test = alg_test_null, 2258 .fips_allowed = 1, 2259 }, { 2260 .alg = "cbc(aes)", 2261 .test = alg_test_skcipher, 2262 .fips_allowed = 1, 2263 .suite = { 2264 .cipher = { 2265 .enc = { 2266 .vecs = aes_cbc_enc_tv_template, 2267 .count = AES_CBC_ENC_TEST_VECTORS 2268 }, 2269 .dec = { 2270 .vecs = aes_cbc_dec_tv_template, 2271 .count = AES_CBC_DEC_TEST_VECTORS 2272 } 2273 } 2274 } 2275 }, { 2276 .alg = "cbc(anubis)", 2277 .test = alg_test_skcipher, 2278 .suite = { 2279 .cipher = { 2280 .enc = { 2281 .vecs = anubis_cbc_enc_tv_template, 2282 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2283 }, 2284 .dec = { 2285 .vecs = anubis_cbc_dec_tv_template, 2286 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2287 } 2288 } 2289 } 2290 }, { 2291 .alg = "cbc(blowfish)", 2292 .test = alg_test_skcipher, 2293 .suite = { 2294 .cipher = { 2295 .enc = { 2296 .vecs = bf_cbc_enc_tv_template, 2297 .count = BF_CBC_ENC_TEST_VECTORS 2298 }, 2299 .dec = { 2300 .vecs = bf_cbc_dec_tv_template, 2301 .count = BF_CBC_DEC_TEST_VECTORS 2302 } 2303 } 2304 } 2305 }, { 2306 .alg = "cbc(camellia)", 2307 .test = alg_test_skcipher, 2308 .suite = { 2309 .cipher = { 2310 .enc = { 2311 .vecs = camellia_cbc_enc_tv_template, 2312 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2313 }, 2314 .dec = { 2315 .vecs = camellia_cbc_dec_tv_template, 2316 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2317 } 2318 } 2319 } 2320 }, { 2321 .alg = "cbc(cast5)", 2322 .test = alg_test_skcipher, 2323 .suite = { 2324 .cipher = { 2325 .enc = { 2326 .vecs = cast5_cbc_enc_tv_template, 2327 .count = CAST5_CBC_ENC_TEST_VECTORS 2328 }, 2329 .dec = { 2330 .vecs = cast5_cbc_dec_tv_template, 2331 .count = CAST5_CBC_DEC_TEST_VECTORS 2332 } 2333 } 2334 } 2335 }, { 2336 .alg = "cbc(cast6)", 2337 .test = alg_test_skcipher, 2338 .suite = { 2339 .cipher = { 2340 .enc = { 2341 .vecs = cast6_cbc_enc_tv_template, 2342 .count = CAST6_CBC_ENC_TEST_VECTORS 2343 }, 2344 .dec = { 2345 .vecs = cast6_cbc_dec_tv_template, 2346 .count = CAST6_CBC_DEC_TEST_VECTORS 2347 } 2348 } 2349 } 2350 }, { 2351 .alg = "cbc(des)", 2352 .test = alg_test_skcipher, 2353 .suite = { 2354 .cipher = { 2355 .enc = { 2356 .vecs = des_cbc_enc_tv_template, 2357 .count = DES_CBC_ENC_TEST_VECTORS 2358 }, 2359 .dec = { 2360 .vecs = des_cbc_dec_tv_template, 2361 .count = DES_CBC_DEC_TEST_VECTORS 2362 } 2363 } 2364 } 2365 }, { 2366 .alg = "cbc(des3_ede)", 2367 .test = alg_test_skcipher, 2368 .fips_allowed = 1, 2369 .suite = { 2370 .cipher = { 2371 .enc = { 2372 .vecs = des3_ede_cbc_enc_tv_template, 2373 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2374 }, 2375 .dec = { 2376 .vecs = des3_ede_cbc_dec_tv_template, 2377 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2378 } 2379 } 2380 } 2381 }, { 2382 .alg = "cbc(serpent)", 2383 .test = alg_test_skcipher, 2384 .suite = { 2385 .cipher = { 2386 .enc = { 2387 .vecs = serpent_cbc_enc_tv_template, 2388 .count = SERPENT_CBC_ENC_TEST_VECTORS 2389 }, 2390 .dec = { 2391 .vecs = serpent_cbc_dec_tv_template, 2392 .count = SERPENT_CBC_DEC_TEST_VECTORS 2393 } 2394 } 2395 } 2396 }, { 2397 .alg = "cbc(twofish)", 2398 .test = alg_test_skcipher, 2399 .suite = { 2400 .cipher = { 2401 .enc = { 2402 .vecs = tf_cbc_enc_tv_template, 2403 .count = TF_CBC_ENC_TEST_VECTORS 2404 }, 2405 .dec = { 2406 .vecs = tf_cbc_dec_tv_template, 2407 .count = TF_CBC_DEC_TEST_VECTORS 2408 } 2409 } 2410 } 2411 }, { 2412 .alg = "ccm(aes)", 2413 .test = alg_test_aead, 2414 .fips_allowed = 1, 2415 .suite = { 2416 .aead = { 2417 .enc = { 2418 .vecs = aes_ccm_enc_tv_template, 2419 .count = AES_CCM_ENC_TEST_VECTORS 2420 }, 2421 .dec = { 2422 .vecs = aes_ccm_dec_tv_template, 2423 .count = AES_CCM_DEC_TEST_VECTORS 2424 } 2425 } 2426 } 2427 }, { 2428 .alg = "chacha20", 2429 .test = alg_test_skcipher, 2430 .suite = { 2431 .cipher = { 2432 .enc = { 2433 .vecs = chacha20_enc_tv_template, 2434 .count = CHACHA20_ENC_TEST_VECTORS 2435 }, 2436 .dec = { 2437 .vecs = chacha20_enc_tv_template, 2438 .count = CHACHA20_ENC_TEST_VECTORS 2439 }, 2440 } 2441 } 2442 }, { 2443 .alg = "cmac(aes)", 2444 .fips_allowed = 1, 2445 .test = alg_test_hash, 2446 .suite = { 2447 .hash = { 2448 .vecs = aes_cmac128_tv_template, 2449 .count = CMAC_AES_TEST_VECTORS 2450 } 2451 } 2452 }, { 2453 .alg = "cmac(des3_ede)", 2454 .fips_allowed = 1, 2455 .test = alg_test_hash, 2456 .suite = { 2457 .hash = { 2458 .vecs = des3_ede_cmac64_tv_template, 2459 .count = CMAC_DES3_EDE_TEST_VECTORS 2460 } 2461 } 2462 }, { 2463 .alg = "compress_null", 2464 .test = alg_test_null, 2465 }, { 2466 .alg = "crc32", 2467 .test = alg_test_hash, 2468 .suite = { 2469 .hash = { 2470 .vecs = crc32_tv_template, 2471 .count = CRC32_TEST_VECTORS 2472 } 2473 } 2474 }, { 2475 .alg = "crc32c", 2476 .test = alg_test_crc32c, 2477 .fips_allowed = 1, 2478 .suite = { 2479 .hash = { 2480 .vecs = crc32c_tv_template, 2481 .count = CRC32C_TEST_VECTORS 2482 } 2483 } 2484 }, { 2485 .alg = "crct10dif", 2486 .test = alg_test_hash, 2487 .fips_allowed = 1, 2488 .suite = { 2489 .hash = { 2490 .vecs = crct10dif_tv_template, 2491 .count = CRCT10DIF_TEST_VECTORS 2492 } 2493 } 2494 }, { 2495 .alg = "cryptd(__driver-cbc-aes-aesni)", 2496 .test = alg_test_null, 2497 .fips_allowed = 1, 2498 }, { 2499 .alg = "cryptd(__driver-cbc-camellia-aesni)", 2500 .test = alg_test_null, 2501 }, { 2502 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)", 2503 .test = alg_test_null, 2504 }, { 2505 .alg = "cryptd(__driver-cbc-serpent-avx2)", 2506 .test = alg_test_null, 2507 }, { 2508 .alg = "cryptd(__driver-ecb-aes-aesni)", 2509 .test = alg_test_null, 2510 .fips_allowed = 1, 2511 }, { 2512 .alg = "cryptd(__driver-ecb-camellia-aesni)", 2513 .test = alg_test_null, 2514 }, { 2515 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)", 2516 .test = alg_test_null, 2517 }, { 2518 .alg = "cryptd(__driver-ecb-cast5-avx)", 2519 .test = alg_test_null, 2520 }, { 2521 .alg = "cryptd(__driver-ecb-cast6-avx)", 2522 .test = alg_test_null, 2523 }, { 2524 .alg = "cryptd(__driver-ecb-serpent-avx)", 2525 .test = alg_test_null, 2526 }, { 2527 .alg = "cryptd(__driver-ecb-serpent-avx2)", 2528 .test = alg_test_null, 2529 }, { 2530 .alg = "cryptd(__driver-ecb-serpent-sse2)", 2531 .test = alg_test_null, 2532 }, { 2533 .alg = "cryptd(__driver-ecb-twofish-avx)", 2534 .test = alg_test_null, 2535 }, { 2536 .alg = "cryptd(__driver-gcm-aes-aesni)", 2537 .test = alg_test_null, 2538 .fips_allowed = 1, 2539 }, { 2540 .alg = "cryptd(__ghash-pclmulqdqni)", 2541 .test = alg_test_null, 2542 .fips_allowed = 1, 2543 }, { 2544 .alg = "ctr(aes)", 2545 .test = alg_test_skcipher, 2546 .fips_allowed = 1, 2547 .suite = { 2548 .cipher = { 2549 .enc = { 2550 .vecs = aes_ctr_enc_tv_template, 2551 .count = AES_CTR_ENC_TEST_VECTORS 2552 }, 2553 .dec = { 2554 .vecs = aes_ctr_dec_tv_template, 2555 .count = AES_CTR_DEC_TEST_VECTORS 2556 } 2557 } 2558 } 2559 }, { 2560 .alg = "ctr(blowfish)", 2561 .test = alg_test_skcipher, 2562 .suite = { 2563 .cipher = { 2564 .enc = { 2565 .vecs = bf_ctr_enc_tv_template, 2566 .count = BF_CTR_ENC_TEST_VECTORS 2567 }, 2568 .dec = { 2569 .vecs = bf_ctr_dec_tv_template, 2570 .count = BF_CTR_DEC_TEST_VECTORS 2571 } 2572 } 2573 } 2574 }, { 2575 .alg = "ctr(camellia)", 2576 .test = alg_test_skcipher, 2577 .suite = { 2578 .cipher = { 2579 .enc = { 2580 .vecs = camellia_ctr_enc_tv_template, 2581 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2582 }, 2583 .dec = { 2584 .vecs = camellia_ctr_dec_tv_template, 2585 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2586 } 2587 } 2588 } 2589 }, { 2590 .alg = "ctr(cast5)", 2591 .test = alg_test_skcipher, 2592 .suite = { 2593 .cipher = { 2594 .enc = { 2595 .vecs = cast5_ctr_enc_tv_template, 2596 .count = CAST5_CTR_ENC_TEST_VECTORS 2597 }, 2598 .dec = { 2599 .vecs = cast5_ctr_dec_tv_template, 2600 .count = CAST5_CTR_DEC_TEST_VECTORS 2601 } 2602 } 2603 } 2604 }, { 2605 .alg = "ctr(cast6)", 2606 .test = alg_test_skcipher, 2607 .suite = { 2608 .cipher = { 2609 .enc = { 2610 .vecs = cast6_ctr_enc_tv_template, 2611 .count = CAST6_CTR_ENC_TEST_VECTORS 2612 }, 2613 .dec = { 2614 .vecs = cast6_ctr_dec_tv_template, 2615 .count = CAST6_CTR_DEC_TEST_VECTORS 2616 } 2617 } 2618 } 2619 }, { 2620 .alg = "ctr(des)", 2621 .test = alg_test_skcipher, 2622 .suite = { 2623 .cipher = { 2624 .enc = { 2625 .vecs = des_ctr_enc_tv_template, 2626 .count = DES_CTR_ENC_TEST_VECTORS 2627 }, 2628 .dec = { 2629 .vecs = des_ctr_dec_tv_template, 2630 .count = DES_CTR_DEC_TEST_VECTORS 2631 } 2632 } 2633 } 2634 }, { 2635 .alg = "ctr(des3_ede)", 2636 .test = alg_test_skcipher, 2637 .suite = { 2638 .cipher = { 2639 .enc = { 2640 .vecs = des3_ede_ctr_enc_tv_template, 2641 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2642 }, 2643 .dec = { 2644 .vecs = des3_ede_ctr_dec_tv_template, 2645 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2646 } 2647 } 2648 } 2649 }, { 2650 .alg = "ctr(serpent)", 2651 .test = alg_test_skcipher, 2652 .suite = { 2653 .cipher = { 2654 .enc = { 2655 .vecs = serpent_ctr_enc_tv_template, 2656 .count = SERPENT_CTR_ENC_TEST_VECTORS 2657 }, 2658 .dec = { 2659 .vecs = serpent_ctr_dec_tv_template, 2660 .count = SERPENT_CTR_DEC_TEST_VECTORS 2661 } 2662 } 2663 } 2664 }, { 2665 .alg = "ctr(twofish)", 2666 .test = alg_test_skcipher, 2667 .suite = { 2668 .cipher = { 2669 .enc = { 2670 .vecs = tf_ctr_enc_tv_template, 2671 .count = TF_CTR_ENC_TEST_VECTORS 2672 }, 2673 .dec = { 2674 .vecs = tf_ctr_dec_tv_template, 2675 .count = TF_CTR_DEC_TEST_VECTORS 2676 } 2677 } 2678 } 2679 }, { 2680 .alg = "cts(cbc(aes))", 2681 .test = alg_test_skcipher, 2682 .suite = { 2683 .cipher = { 2684 .enc = { 2685 .vecs = cts_mode_enc_tv_template, 2686 .count = CTS_MODE_ENC_TEST_VECTORS 2687 }, 2688 .dec = { 2689 .vecs = cts_mode_dec_tv_template, 2690 .count = CTS_MODE_DEC_TEST_VECTORS 2691 } 2692 } 2693 } 2694 }, { 2695 .alg = "deflate", 2696 .test = alg_test_comp, 2697 .fips_allowed = 1, 2698 .suite = { 2699 .comp = { 2700 .comp = { 2701 .vecs = deflate_comp_tv_template, 2702 .count = DEFLATE_COMP_TEST_VECTORS 2703 }, 2704 .decomp = { 2705 .vecs = deflate_decomp_tv_template, 2706 .count = DEFLATE_DECOMP_TEST_VECTORS 2707 } 2708 } 2709 } 2710 }, { 2711 .alg = "digest_null", 2712 .test = alg_test_null, 2713 }, { 2714 .alg = "drbg_nopr_ctr_aes128", 2715 .test = alg_test_drbg, 2716 .fips_allowed = 1, 2717 .suite = { 2718 .drbg = { 2719 .vecs = drbg_nopr_ctr_aes128_tv_template, 2720 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2721 } 2722 } 2723 }, { 2724 .alg = "drbg_nopr_ctr_aes192", 2725 .test = alg_test_drbg, 2726 .fips_allowed = 1, 2727 .suite = { 2728 .drbg = { 2729 .vecs = drbg_nopr_ctr_aes192_tv_template, 2730 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2731 } 2732 } 2733 }, { 2734 .alg = "drbg_nopr_ctr_aes256", 2735 .test = alg_test_drbg, 2736 .fips_allowed = 1, 2737 .suite = { 2738 .drbg = { 2739 .vecs = drbg_nopr_ctr_aes256_tv_template, 2740 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2741 } 2742 } 2743 }, { 2744 /* 2745 * There is no need to specifically test the DRBG with every 2746 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2747 */ 2748 .alg = "drbg_nopr_hmac_sha1", 2749 .fips_allowed = 1, 2750 .test = alg_test_null, 2751 }, { 2752 .alg = "drbg_nopr_hmac_sha256", 2753 .test = alg_test_drbg, 2754 .fips_allowed = 1, 2755 .suite = { 2756 .drbg = { 2757 .vecs = drbg_nopr_hmac_sha256_tv_template, 2758 .count = 2759 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2760 } 2761 } 2762 }, { 2763 /* covered by drbg_nopr_hmac_sha256 test */ 2764 .alg = "drbg_nopr_hmac_sha384", 2765 .fips_allowed = 1, 2766 .test = alg_test_null, 2767 }, { 2768 .alg = "drbg_nopr_hmac_sha512", 2769 .test = alg_test_null, 2770 .fips_allowed = 1, 2771 }, { 2772 .alg = "drbg_nopr_sha1", 2773 .fips_allowed = 1, 2774 .test = alg_test_null, 2775 }, { 2776 .alg = "drbg_nopr_sha256", 2777 .test = alg_test_drbg, 2778 .fips_allowed = 1, 2779 .suite = { 2780 .drbg = { 2781 .vecs = drbg_nopr_sha256_tv_template, 2782 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2783 } 2784 } 2785 }, { 2786 /* covered by drbg_nopr_sha256 test */ 2787 .alg = "drbg_nopr_sha384", 2788 .fips_allowed = 1, 2789 .test = alg_test_null, 2790 }, { 2791 .alg = "drbg_nopr_sha512", 2792 .fips_allowed = 1, 2793 .test = alg_test_null, 2794 }, { 2795 .alg = "drbg_pr_ctr_aes128", 2796 .test = alg_test_drbg, 2797 .fips_allowed = 1, 2798 .suite = { 2799 .drbg = { 2800 .vecs = drbg_pr_ctr_aes128_tv_template, 2801 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 2802 } 2803 } 2804 }, { 2805 /* covered by drbg_pr_ctr_aes128 test */ 2806 .alg = "drbg_pr_ctr_aes192", 2807 .fips_allowed = 1, 2808 .test = alg_test_null, 2809 }, { 2810 .alg = "drbg_pr_ctr_aes256", 2811 .fips_allowed = 1, 2812 .test = alg_test_null, 2813 }, { 2814 .alg = "drbg_pr_hmac_sha1", 2815 .fips_allowed = 1, 2816 .test = alg_test_null, 2817 }, { 2818 .alg = "drbg_pr_hmac_sha256", 2819 .test = alg_test_drbg, 2820 .fips_allowed = 1, 2821 .suite = { 2822 .drbg = { 2823 .vecs = drbg_pr_hmac_sha256_tv_template, 2824 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 2825 } 2826 } 2827 }, { 2828 /* covered by drbg_pr_hmac_sha256 test */ 2829 .alg = "drbg_pr_hmac_sha384", 2830 .fips_allowed = 1, 2831 .test = alg_test_null, 2832 }, { 2833 .alg = "drbg_pr_hmac_sha512", 2834 .test = alg_test_null, 2835 .fips_allowed = 1, 2836 }, { 2837 .alg = "drbg_pr_sha1", 2838 .fips_allowed = 1, 2839 .test = alg_test_null, 2840 }, { 2841 .alg = "drbg_pr_sha256", 2842 .test = alg_test_drbg, 2843 .fips_allowed = 1, 2844 .suite = { 2845 .drbg = { 2846 .vecs = drbg_pr_sha256_tv_template, 2847 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 2848 } 2849 } 2850 }, { 2851 /* covered by drbg_pr_sha256 test */ 2852 .alg = "drbg_pr_sha384", 2853 .fips_allowed = 1, 2854 .test = alg_test_null, 2855 }, { 2856 .alg = "drbg_pr_sha512", 2857 .fips_allowed = 1, 2858 .test = alg_test_null, 2859 }, { 2860 .alg = "ecb(__aes-aesni)", 2861 .test = alg_test_null, 2862 .fips_allowed = 1, 2863 }, { 2864 .alg = "ecb(aes)", 2865 .test = alg_test_skcipher, 2866 .fips_allowed = 1, 2867 .suite = { 2868 .cipher = { 2869 .enc = { 2870 .vecs = aes_enc_tv_template, 2871 .count = AES_ENC_TEST_VECTORS 2872 }, 2873 .dec = { 2874 .vecs = aes_dec_tv_template, 2875 .count = AES_DEC_TEST_VECTORS 2876 } 2877 } 2878 } 2879 }, { 2880 .alg = "ecb(anubis)", 2881 .test = alg_test_skcipher, 2882 .suite = { 2883 .cipher = { 2884 .enc = { 2885 .vecs = anubis_enc_tv_template, 2886 .count = ANUBIS_ENC_TEST_VECTORS 2887 }, 2888 .dec = { 2889 .vecs = anubis_dec_tv_template, 2890 .count = ANUBIS_DEC_TEST_VECTORS 2891 } 2892 } 2893 } 2894 }, { 2895 .alg = "ecb(arc4)", 2896 .test = alg_test_skcipher, 2897 .suite = { 2898 .cipher = { 2899 .enc = { 2900 .vecs = arc4_enc_tv_template, 2901 .count = ARC4_ENC_TEST_VECTORS 2902 }, 2903 .dec = { 2904 .vecs = arc4_dec_tv_template, 2905 .count = ARC4_DEC_TEST_VECTORS 2906 } 2907 } 2908 } 2909 }, { 2910 .alg = "ecb(blowfish)", 2911 .test = alg_test_skcipher, 2912 .suite = { 2913 .cipher = { 2914 .enc = { 2915 .vecs = bf_enc_tv_template, 2916 .count = BF_ENC_TEST_VECTORS 2917 }, 2918 .dec = { 2919 .vecs = bf_dec_tv_template, 2920 .count = BF_DEC_TEST_VECTORS 2921 } 2922 } 2923 } 2924 }, { 2925 .alg = "ecb(camellia)", 2926 .test = alg_test_skcipher, 2927 .suite = { 2928 .cipher = { 2929 .enc = { 2930 .vecs = camellia_enc_tv_template, 2931 .count = CAMELLIA_ENC_TEST_VECTORS 2932 }, 2933 .dec = { 2934 .vecs = camellia_dec_tv_template, 2935 .count = CAMELLIA_DEC_TEST_VECTORS 2936 } 2937 } 2938 } 2939 }, { 2940 .alg = "ecb(cast5)", 2941 .test = alg_test_skcipher, 2942 .suite = { 2943 .cipher = { 2944 .enc = { 2945 .vecs = cast5_enc_tv_template, 2946 .count = CAST5_ENC_TEST_VECTORS 2947 }, 2948 .dec = { 2949 .vecs = cast5_dec_tv_template, 2950 .count = CAST5_DEC_TEST_VECTORS 2951 } 2952 } 2953 } 2954 }, { 2955 .alg = "ecb(cast6)", 2956 .test = alg_test_skcipher, 2957 .suite = { 2958 .cipher = { 2959 .enc = { 2960 .vecs = cast6_enc_tv_template, 2961 .count = CAST6_ENC_TEST_VECTORS 2962 }, 2963 .dec = { 2964 .vecs = cast6_dec_tv_template, 2965 .count = CAST6_DEC_TEST_VECTORS 2966 } 2967 } 2968 } 2969 }, { 2970 .alg = "ecb(cipher_null)", 2971 .test = alg_test_null, 2972 }, { 2973 .alg = "ecb(des)", 2974 .test = alg_test_skcipher, 2975 .suite = { 2976 .cipher = { 2977 .enc = { 2978 .vecs = des_enc_tv_template, 2979 .count = DES_ENC_TEST_VECTORS 2980 }, 2981 .dec = { 2982 .vecs = des_dec_tv_template, 2983 .count = DES_DEC_TEST_VECTORS 2984 } 2985 } 2986 } 2987 }, { 2988 .alg = "ecb(des3_ede)", 2989 .test = alg_test_skcipher, 2990 .fips_allowed = 1, 2991 .suite = { 2992 .cipher = { 2993 .enc = { 2994 .vecs = des3_ede_enc_tv_template, 2995 .count = DES3_EDE_ENC_TEST_VECTORS 2996 }, 2997 .dec = { 2998 .vecs = des3_ede_dec_tv_template, 2999 .count = DES3_EDE_DEC_TEST_VECTORS 3000 } 3001 } 3002 } 3003 }, { 3004 .alg = "ecb(fcrypt)", 3005 .test = alg_test_skcipher, 3006 .suite = { 3007 .cipher = { 3008 .enc = { 3009 .vecs = fcrypt_pcbc_enc_tv_template, 3010 .count = 1 3011 }, 3012 .dec = { 3013 .vecs = fcrypt_pcbc_dec_tv_template, 3014 .count = 1 3015 } 3016 } 3017 } 3018 }, { 3019 .alg = "ecb(khazad)", 3020 .test = alg_test_skcipher, 3021 .suite = { 3022 .cipher = { 3023 .enc = { 3024 .vecs = khazad_enc_tv_template, 3025 .count = KHAZAD_ENC_TEST_VECTORS 3026 }, 3027 .dec = { 3028 .vecs = khazad_dec_tv_template, 3029 .count = KHAZAD_DEC_TEST_VECTORS 3030 } 3031 } 3032 } 3033 }, { 3034 .alg = "ecb(seed)", 3035 .test = alg_test_skcipher, 3036 .suite = { 3037 .cipher = { 3038 .enc = { 3039 .vecs = seed_enc_tv_template, 3040 .count = SEED_ENC_TEST_VECTORS 3041 }, 3042 .dec = { 3043 .vecs = seed_dec_tv_template, 3044 .count = SEED_DEC_TEST_VECTORS 3045 } 3046 } 3047 } 3048 }, { 3049 .alg = "ecb(serpent)", 3050 .test = alg_test_skcipher, 3051 .suite = { 3052 .cipher = { 3053 .enc = { 3054 .vecs = serpent_enc_tv_template, 3055 .count = SERPENT_ENC_TEST_VECTORS 3056 }, 3057 .dec = { 3058 .vecs = serpent_dec_tv_template, 3059 .count = SERPENT_DEC_TEST_VECTORS 3060 } 3061 } 3062 } 3063 }, { 3064 .alg = "ecb(tea)", 3065 .test = alg_test_skcipher, 3066 .suite = { 3067 .cipher = { 3068 .enc = { 3069 .vecs = tea_enc_tv_template, 3070 .count = TEA_ENC_TEST_VECTORS 3071 }, 3072 .dec = { 3073 .vecs = tea_dec_tv_template, 3074 .count = TEA_DEC_TEST_VECTORS 3075 } 3076 } 3077 } 3078 }, { 3079 .alg = "ecb(tnepres)", 3080 .test = alg_test_skcipher, 3081 .suite = { 3082 .cipher = { 3083 .enc = { 3084 .vecs = tnepres_enc_tv_template, 3085 .count = TNEPRES_ENC_TEST_VECTORS 3086 }, 3087 .dec = { 3088 .vecs = tnepres_dec_tv_template, 3089 .count = TNEPRES_DEC_TEST_VECTORS 3090 } 3091 } 3092 } 3093 }, { 3094 .alg = "ecb(twofish)", 3095 .test = alg_test_skcipher, 3096 .suite = { 3097 .cipher = { 3098 .enc = { 3099 .vecs = tf_enc_tv_template, 3100 .count = TF_ENC_TEST_VECTORS 3101 }, 3102 .dec = { 3103 .vecs = tf_dec_tv_template, 3104 .count = TF_DEC_TEST_VECTORS 3105 } 3106 } 3107 } 3108 }, { 3109 .alg = "ecb(xeta)", 3110 .test = alg_test_skcipher, 3111 .suite = { 3112 .cipher = { 3113 .enc = { 3114 .vecs = xeta_enc_tv_template, 3115 .count = XETA_ENC_TEST_VECTORS 3116 }, 3117 .dec = { 3118 .vecs = xeta_dec_tv_template, 3119 .count = XETA_DEC_TEST_VECTORS 3120 } 3121 } 3122 } 3123 }, { 3124 .alg = "ecb(xtea)", 3125 .test = alg_test_skcipher, 3126 .suite = { 3127 .cipher = { 3128 .enc = { 3129 .vecs = xtea_enc_tv_template, 3130 .count = XTEA_ENC_TEST_VECTORS 3131 }, 3132 .dec = { 3133 .vecs = xtea_dec_tv_template, 3134 .count = XTEA_DEC_TEST_VECTORS 3135 } 3136 } 3137 } 3138 }, { 3139 .alg = "gcm(aes)", 3140 .test = alg_test_aead, 3141 .fips_allowed = 1, 3142 .suite = { 3143 .aead = { 3144 .enc = { 3145 .vecs = aes_gcm_enc_tv_template, 3146 .count = AES_GCM_ENC_TEST_VECTORS 3147 }, 3148 .dec = { 3149 .vecs = aes_gcm_dec_tv_template, 3150 .count = AES_GCM_DEC_TEST_VECTORS 3151 } 3152 } 3153 } 3154 }, { 3155 .alg = "ghash", 3156 .test = alg_test_hash, 3157 .fips_allowed = 1, 3158 .suite = { 3159 .hash = { 3160 .vecs = ghash_tv_template, 3161 .count = GHASH_TEST_VECTORS 3162 } 3163 } 3164 }, { 3165 .alg = "hmac(crc32)", 3166 .test = alg_test_hash, 3167 .suite = { 3168 .hash = { 3169 .vecs = bfin_crc_tv_template, 3170 .count = BFIN_CRC_TEST_VECTORS 3171 } 3172 } 3173 }, { 3174 .alg = "hmac(md5)", 3175 .test = alg_test_hash, 3176 .suite = { 3177 .hash = { 3178 .vecs = hmac_md5_tv_template, 3179 .count = HMAC_MD5_TEST_VECTORS 3180 } 3181 } 3182 }, { 3183 .alg = "hmac(rmd128)", 3184 .test = alg_test_hash, 3185 .suite = { 3186 .hash = { 3187 .vecs = hmac_rmd128_tv_template, 3188 .count = HMAC_RMD128_TEST_VECTORS 3189 } 3190 } 3191 }, { 3192 .alg = "hmac(rmd160)", 3193 .test = alg_test_hash, 3194 .suite = { 3195 .hash = { 3196 .vecs = hmac_rmd160_tv_template, 3197 .count = HMAC_RMD160_TEST_VECTORS 3198 } 3199 } 3200 }, { 3201 .alg = "hmac(sha1)", 3202 .test = alg_test_hash, 3203 .fips_allowed = 1, 3204 .suite = { 3205 .hash = { 3206 .vecs = hmac_sha1_tv_template, 3207 .count = HMAC_SHA1_TEST_VECTORS 3208 } 3209 } 3210 }, { 3211 .alg = "hmac(sha224)", 3212 .test = alg_test_hash, 3213 .fips_allowed = 1, 3214 .suite = { 3215 .hash = { 3216 .vecs = hmac_sha224_tv_template, 3217 .count = HMAC_SHA224_TEST_VECTORS 3218 } 3219 } 3220 }, { 3221 .alg = "hmac(sha256)", 3222 .test = alg_test_hash, 3223 .fips_allowed = 1, 3224 .suite = { 3225 .hash = { 3226 .vecs = hmac_sha256_tv_template, 3227 .count = HMAC_SHA256_TEST_VECTORS 3228 } 3229 } 3230 }, { 3231 .alg = "hmac(sha384)", 3232 .test = alg_test_hash, 3233 .fips_allowed = 1, 3234 .suite = { 3235 .hash = { 3236 .vecs = hmac_sha384_tv_template, 3237 .count = HMAC_SHA384_TEST_VECTORS 3238 } 3239 } 3240 }, { 3241 .alg = "hmac(sha512)", 3242 .test = alg_test_hash, 3243 .fips_allowed = 1, 3244 .suite = { 3245 .hash = { 3246 .vecs = hmac_sha512_tv_template, 3247 .count = HMAC_SHA512_TEST_VECTORS 3248 } 3249 } 3250 }, { 3251 .alg = "jitterentropy_rng", 3252 .fips_allowed = 1, 3253 .test = alg_test_null, 3254 }, { 3255 .alg = "kw(aes)", 3256 .test = alg_test_skcipher, 3257 .fips_allowed = 1, 3258 .suite = { 3259 .cipher = { 3260 .enc = { 3261 .vecs = aes_kw_enc_tv_template, 3262 .count = ARRAY_SIZE(aes_kw_enc_tv_template) 3263 }, 3264 .dec = { 3265 .vecs = aes_kw_dec_tv_template, 3266 .count = ARRAY_SIZE(aes_kw_dec_tv_template) 3267 } 3268 } 3269 } 3270 }, { 3271 .alg = "lrw(aes)", 3272 .test = alg_test_skcipher, 3273 .suite = { 3274 .cipher = { 3275 .enc = { 3276 .vecs = aes_lrw_enc_tv_template, 3277 .count = AES_LRW_ENC_TEST_VECTORS 3278 }, 3279 .dec = { 3280 .vecs = aes_lrw_dec_tv_template, 3281 .count = AES_LRW_DEC_TEST_VECTORS 3282 } 3283 } 3284 } 3285 }, { 3286 .alg = "lrw(camellia)", 3287 .test = alg_test_skcipher, 3288 .suite = { 3289 .cipher = { 3290 .enc = { 3291 .vecs = camellia_lrw_enc_tv_template, 3292 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3293 }, 3294 .dec = { 3295 .vecs = camellia_lrw_dec_tv_template, 3296 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3297 } 3298 } 3299 } 3300 }, { 3301 .alg = "lrw(cast6)", 3302 .test = alg_test_skcipher, 3303 .suite = { 3304 .cipher = { 3305 .enc = { 3306 .vecs = cast6_lrw_enc_tv_template, 3307 .count = CAST6_LRW_ENC_TEST_VECTORS 3308 }, 3309 .dec = { 3310 .vecs = cast6_lrw_dec_tv_template, 3311 .count = CAST6_LRW_DEC_TEST_VECTORS 3312 } 3313 } 3314 } 3315 }, { 3316 .alg = "lrw(serpent)", 3317 .test = alg_test_skcipher, 3318 .suite = { 3319 .cipher = { 3320 .enc = { 3321 .vecs = serpent_lrw_enc_tv_template, 3322 .count = SERPENT_LRW_ENC_TEST_VECTORS 3323 }, 3324 .dec = { 3325 .vecs = serpent_lrw_dec_tv_template, 3326 .count = SERPENT_LRW_DEC_TEST_VECTORS 3327 } 3328 } 3329 } 3330 }, { 3331 .alg = "lrw(twofish)", 3332 .test = alg_test_skcipher, 3333 .suite = { 3334 .cipher = { 3335 .enc = { 3336 .vecs = tf_lrw_enc_tv_template, 3337 .count = TF_LRW_ENC_TEST_VECTORS 3338 }, 3339 .dec = { 3340 .vecs = tf_lrw_dec_tv_template, 3341 .count = TF_LRW_DEC_TEST_VECTORS 3342 } 3343 } 3344 } 3345 }, { 3346 .alg = "lz4", 3347 .test = alg_test_comp, 3348 .fips_allowed = 1, 3349 .suite = { 3350 .comp = { 3351 .comp = { 3352 .vecs = lz4_comp_tv_template, 3353 .count = LZ4_COMP_TEST_VECTORS 3354 }, 3355 .decomp = { 3356 .vecs = lz4_decomp_tv_template, 3357 .count = LZ4_DECOMP_TEST_VECTORS 3358 } 3359 } 3360 } 3361 }, { 3362 .alg = "lz4hc", 3363 .test = alg_test_comp, 3364 .fips_allowed = 1, 3365 .suite = { 3366 .comp = { 3367 .comp = { 3368 .vecs = lz4hc_comp_tv_template, 3369 .count = LZ4HC_COMP_TEST_VECTORS 3370 }, 3371 .decomp = { 3372 .vecs = lz4hc_decomp_tv_template, 3373 .count = LZ4HC_DECOMP_TEST_VECTORS 3374 } 3375 } 3376 } 3377 }, { 3378 .alg = "lzo", 3379 .test = alg_test_comp, 3380 .fips_allowed = 1, 3381 .suite = { 3382 .comp = { 3383 .comp = { 3384 .vecs = lzo_comp_tv_template, 3385 .count = LZO_COMP_TEST_VECTORS 3386 }, 3387 .decomp = { 3388 .vecs = lzo_decomp_tv_template, 3389 .count = LZO_DECOMP_TEST_VECTORS 3390 } 3391 } 3392 } 3393 }, { 3394 .alg = "md4", 3395 .test = alg_test_hash, 3396 .suite = { 3397 .hash = { 3398 .vecs = md4_tv_template, 3399 .count = MD4_TEST_VECTORS 3400 } 3401 } 3402 }, { 3403 .alg = "md5", 3404 .test = alg_test_hash, 3405 .suite = { 3406 .hash = { 3407 .vecs = md5_tv_template, 3408 .count = MD5_TEST_VECTORS 3409 } 3410 } 3411 }, { 3412 .alg = "michael_mic", 3413 .test = alg_test_hash, 3414 .suite = { 3415 .hash = { 3416 .vecs = michael_mic_tv_template, 3417 .count = MICHAEL_MIC_TEST_VECTORS 3418 } 3419 } 3420 }, { 3421 .alg = "ofb(aes)", 3422 .test = alg_test_skcipher, 3423 .fips_allowed = 1, 3424 .suite = { 3425 .cipher = { 3426 .enc = { 3427 .vecs = aes_ofb_enc_tv_template, 3428 .count = AES_OFB_ENC_TEST_VECTORS 3429 }, 3430 .dec = { 3431 .vecs = aes_ofb_dec_tv_template, 3432 .count = AES_OFB_DEC_TEST_VECTORS 3433 } 3434 } 3435 } 3436 }, { 3437 .alg = "pcbc(fcrypt)", 3438 .test = alg_test_skcipher, 3439 .suite = { 3440 .cipher = { 3441 .enc = { 3442 .vecs = fcrypt_pcbc_enc_tv_template, 3443 .count = FCRYPT_ENC_TEST_VECTORS 3444 }, 3445 .dec = { 3446 .vecs = fcrypt_pcbc_dec_tv_template, 3447 .count = FCRYPT_DEC_TEST_VECTORS 3448 } 3449 } 3450 } 3451 }, { 3452 .alg = "poly1305", 3453 .test = alg_test_hash, 3454 .suite = { 3455 .hash = { 3456 .vecs = poly1305_tv_template, 3457 .count = POLY1305_TEST_VECTORS 3458 } 3459 } 3460 }, { 3461 .alg = "rfc3686(ctr(aes))", 3462 .test = alg_test_skcipher, 3463 .fips_allowed = 1, 3464 .suite = { 3465 .cipher = { 3466 .enc = { 3467 .vecs = aes_ctr_rfc3686_enc_tv_template, 3468 .count = AES_CTR_3686_ENC_TEST_VECTORS 3469 }, 3470 .dec = { 3471 .vecs = aes_ctr_rfc3686_dec_tv_template, 3472 .count = AES_CTR_3686_DEC_TEST_VECTORS 3473 } 3474 } 3475 } 3476 }, { 3477 .alg = "rfc4106(gcm(aes))", 3478 .test = alg_test_aead, 3479 .fips_allowed = 1, 3480 .suite = { 3481 .aead = { 3482 .enc = { 3483 .vecs = aes_gcm_rfc4106_enc_tv_template, 3484 .count = AES_GCM_4106_ENC_TEST_VECTORS 3485 }, 3486 .dec = { 3487 .vecs = aes_gcm_rfc4106_dec_tv_template, 3488 .count = AES_GCM_4106_DEC_TEST_VECTORS 3489 } 3490 } 3491 } 3492 }, { 3493 .alg = "rfc4309(ccm(aes))", 3494 .test = alg_test_aead, 3495 .fips_allowed = 1, 3496 .suite = { 3497 .aead = { 3498 .enc = { 3499 .vecs = aes_ccm_rfc4309_enc_tv_template, 3500 .count = AES_CCM_4309_ENC_TEST_VECTORS 3501 }, 3502 .dec = { 3503 .vecs = aes_ccm_rfc4309_dec_tv_template, 3504 .count = AES_CCM_4309_DEC_TEST_VECTORS 3505 } 3506 } 3507 } 3508 }, { 3509 .alg = "rfc4543(gcm(aes))", 3510 .test = alg_test_aead, 3511 .suite = { 3512 .aead = { 3513 .enc = { 3514 .vecs = aes_gcm_rfc4543_enc_tv_template, 3515 .count = AES_GCM_4543_ENC_TEST_VECTORS 3516 }, 3517 .dec = { 3518 .vecs = aes_gcm_rfc4543_dec_tv_template, 3519 .count = AES_GCM_4543_DEC_TEST_VECTORS 3520 }, 3521 } 3522 } 3523 }, { 3524 .alg = "rfc7539(chacha20,poly1305)", 3525 .test = alg_test_aead, 3526 .suite = { 3527 .aead = { 3528 .enc = { 3529 .vecs = rfc7539_enc_tv_template, 3530 .count = RFC7539_ENC_TEST_VECTORS 3531 }, 3532 .dec = { 3533 .vecs = rfc7539_dec_tv_template, 3534 .count = RFC7539_DEC_TEST_VECTORS 3535 }, 3536 } 3537 } 3538 }, { 3539 .alg = "rfc7539esp(chacha20,poly1305)", 3540 .test = alg_test_aead, 3541 .suite = { 3542 .aead = { 3543 .enc = { 3544 .vecs = rfc7539esp_enc_tv_template, 3545 .count = RFC7539ESP_ENC_TEST_VECTORS 3546 }, 3547 .dec = { 3548 .vecs = rfc7539esp_dec_tv_template, 3549 .count = RFC7539ESP_DEC_TEST_VECTORS 3550 }, 3551 } 3552 } 3553 }, { 3554 .alg = "rmd128", 3555 .test = alg_test_hash, 3556 .suite = { 3557 .hash = { 3558 .vecs = rmd128_tv_template, 3559 .count = RMD128_TEST_VECTORS 3560 } 3561 } 3562 }, { 3563 .alg = "rmd160", 3564 .test = alg_test_hash, 3565 .suite = { 3566 .hash = { 3567 .vecs = rmd160_tv_template, 3568 .count = RMD160_TEST_VECTORS 3569 } 3570 } 3571 }, { 3572 .alg = "rmd256", 3573 .test = alg_test_hash, 3574 .suite = { 3575 .hash = { 3576 .vecs = rmd256_tv_template, 3577 .count = RMD256_TEST_VECTORS 3578 } 3579 } 3580 }, { 3581 .alg = "rmd320", 3582 .test = alg_test_hash, 3583 .suite = { 3584 .hash = { 3585 .vecs = rmd320_tv_template, 3586 .count = RMD320_TEST_VECTORS 3587 } 3588 } 3589 }, { 3590 .alg = "rsa", 3591 .test = alg_test_akcipher, 3592 .fips_allowed = 1, 3593 .suite = { 3594 .akcipher = { 3595 .vecs = rsa_tv_template, 3596 .count = RSA_TEST_VECTORS 3597 } 3598 } 3599 }, { 3600 .alg = "salsa20", 3601 .test = alg_test_skcipher, 3602 .suite = { 3603 .cipher = { 3604 .enc = { 3605 .vecs = salsa20_stream_enc_tv_template, 3606 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3607 } 3608 } 3609 } 3610 }, { 3611 .alg = "sha1", 3612 .test = alg_test_hash, 3613 .fips_allowed = 1, 3614 .suite = { 3615 .hash = { 3616 .vecs = sha1_tv_template, 3617 .count = SHA1_TEST_VECTORS 3618 } 3619 } 3620 }, { 3621 .alg = "sha224", 3622 .test = alg_test_hash, 3623 .fips_allowed = 1, 3624 .suite = { 3625 .hash = { 3626 .vecs = sha224_tv_template, 3627 .count = SHA224_TEST_VECTORS 3628 } 3629 } 3630 }, { 3631 .alg = "sha256", 3632 .test = alg_test_hash, 3633 .fips_allowed = 1, 3634 .suite = { 3635 .hash = { 3636 .vecs = sha256_tv_template, 3637 .count = SHA256_TEST_VECTORS 3638 } 3639 } 3640 }, { 3641 .alg = "sha384", 3642 .test = alg_test_hash, 3643 .fips_allowed = 1, 3644 .suite = { 3645 .hash = { 3646 .vecs = sha384_tv_template, 3647 .count = SHA384_TEST_VECTORS 3648 } 3649 } 3650 }, { 3651 .alg = "sha512", 3652 .test = alg_test_hash, 3653 .fips_allowed = 1, 3654 .suite = { 3655 .hash = { 3656 .vecs = sha512_tv_template, 3657 .count = SHA512_TEST_VECTORS 3658 } 3659 } 3660 }, { 3661 .alg = "tgr128", 3662 .test = alg_test_hash, 3663 .suite = { 3664 .hash = { 3665 .vecs = tgr128_tv_template, 3666 .count = TGR128_TEST_VECTORS 3667 } 3668 } 3669 }, { 3670 .alg = "tgr160", 3671 .test = alg_test_hash, 3672 .suite = { 3673 .hash = { 3674 .vecs = tgr160_tv_template, 3675 .count = TGR160_TEST_VECTORS 3676 } 3677 } 3678 }, { 3679 .alg = "tgr192", 3680 .test = alg_test_hash, 3681 .suite = { 3682 .hash = { 3683 .vecs = tgr192_tv_template, 3684 .count = TGR192_TEST_VECTORS 3685 } 3686 } 3687 }, { 3688 .alg = "vmac(aes)", 3689 .test = alg_test_hash, 3690 .suite = { 3691 .hash = { 3692 .vecs = aes_vmac128_tv_template, 3693 .count = VMAC_AES_TEST_VECTORS 3694 } 3695 } 3696 }, { 3697 .alg = "wp256", 3698 .test = alg_test_hash, 3699 .suite = { 3700 .hash = { 3701 .vecs = wp256_tv_template, 3702 .count = WP256_TEST_VECTORS 3703 } 3704 } 3705 }, { 3706 .alg = "wp384", 3707 .test = alg_test_hash, 3708 .suite = { 3709 .hash = { 3710 .vecs = wp384_tv_template, 3711 .count = WP384_TEST_VECTORS 3712 } 3713 } 3714 }, { 3715 .alg = "wp512", 3716 .test = alg_test_hash, 3717 .suite = { 3718 .hash = { 3719 .vecs = wp512_tv_template, 3720 .count = WP512_TEST_VECTORS 3721 } 3722 } 3723 }, { 3724 .alg = "xcbc(aes)", 3725 .test = alg_test_hash, 3726 .suite = { 3727 .hash = { 3728 .vecs = aes_xcbc128_tv_template, 3729 .count = XCBC_AES_TEST_VECTORS 3730 } 3731 } 3732 }, { 3733 .alg = "xts(aes)", 3734 .test = alg_test_skcipher, 3735 .fips_allowed = 1, 3736 .suite = { 3737 .cipher = { 3738 .enc = { 3739 .vecs = aes_xts_enc_tv_template, 3740 .count = AES_XTS_ENC_TEST_VECTORS 3741 }, 3742 .dec = { 3743 .vecs = aes_xts_dec_tv_template, 3744 .count = AES_XTS_DEC_TEST_VECTORS 3745 } 3746 } 3747 } 3748 }, { 3749 .alg = "xts(camellia)", 3750 .test = alg_test_skcipher, 3751 .suite = { 3752 .cipher = { 3753 .enc = { 3754 .vecs = camellia_xts_enc_tv_template, 3755 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 3756 }, 3757 .dec = { 3758 .vecs = camellia_xts_dec_tv_template, 3759 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 3760 } 3761 } 3762 } 3763 }, { 3764 .alg = "xts(cast6)", 3765 .test = alg_test_skcipher, 3766 .suite = { 3767 .cipher = { 3768 .enc = { 3769 .vecs = cast6_xts_enc_tv_template, 3770 .count = CAST6_XTS_ENC_TEST_VECTORS 3771 }, 3772 .dec = { 3773 .vecs = cast6_xts_dec_tv_template, 3774 .count = CAST6_XTS_DEC_TEST_VECTORS 3775 } 3776 } 3777 } 3778 }, { 3779 .alg = "xts(serpent)", 3780 .test = alg_test_skcipher, 3781 .suite = { 3782 .cipher = { 3783 .enc = { 3784 .vecs = serpent_xts_enc_tv_template, 3785 .count = SERPENT_XTS_ENC_TEST_VECTORS 3786 }, 3787 .dec = { 3788 .vecs = serpent_xts_dec_tv_template, 3789 .count = SERPENT_XTS_DEC_TEST_VECTORS 3790 } 3791 } 3792 } 3793 }, { 3794 .alg = "xts(twofish)", 3795 .test = alg_test_skcipher, 3796 .suite = { 3797 .cipher = { 3798 .enc = { 3799 .vecs = tf_xts_enc_tv_template, 3800 .count = TF_XTS_ENC_TEST_VECTORS 3801 }, 3802 .dec = { 3803 .vecs = tf_xts_dec_tv_template, 3804 .count = TF_XTS_DEC_TEST_VECTORS 3805 } 3806 } 3807 } 3808 } 3809 }; 3810 3811 static bool alg_test_descs_checked; 3812 3813 static void alg_test_descs_check_order(void) 3814 { 3815 int i; 3816 3817 /* only check once */ 3818 if (alg_test_descs_checked) 3819 return; 3820 3821 alg_test_descs_checked = true; 3822 3823 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3824 int diff = strcmp(alg_test_descs[i - 1].alg, 3825 alg_test_descs[i].alg); 3826 3827 if (WARN_ON(diff > 0)) { 3828 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3829 alg_test_descs[i - 1].alg, 3830 alg_test_descs[i].alg); 3831 } 3832 3833 if (WARN_ON(diff == 0)) { 3834 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3835 alg_test_descs[i].alg); 3836 } 3837 } 3838 } 3839 3840 static int alg_find_test(const char *alg) 3841 { 3842 int start = 0; 3843 int end = ARRAY_SIZE(alg_test_descs); 3844 3845 while (start < end) { 3846 int i = (start + end) / 2; 3847 int diff = strcmp(alg_test_descs[i].alg, alg); 3848 3849 if (diff > 0) { 3850 end = i; 3851 continue; 3852 } 3853 3854 if (diff < 0) { 3855 start = i + 1; 3856 continue; 3857 } 3858 3859 return i; 3860 } 3861 3862 return -1; 3863 } 3864 3865 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3866 { 3867 int i; 3868 int j; 3869 int rc; 3870 3871 alg_test_descs_check_order(); 3872 3873 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3874 char nalg[CRYPTO_MAX_ALG_NAME]; 3875 3876 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3877 sizeof(nalg)) 3878 return -ENAMETOOLONG; 3879 3880 i = alg_find_test(nalg); 3881 if (i < 0) 3882 goto notest; 3883 3884 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3885 goto non_fips_alg; 3886 3887 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3888 goto test_done; 3889 } 3890 3891 i = alg_find_test(alg); 3892 j = alg_find_test(driver); 3893 if (i < 0 && j < 0) 3894 goto notest; 3895 3896 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3897 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3898 goto non_fips_alg; 3899 3900 rc = 0; 3901 if (i >= 0) 3902 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3903 type, mask); 3904 if (j >= 0 && j != i) 3905 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3906 type, mask); 3907 3908 test_done: 3909 if (fips_enabled && rc) 3910 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3911 3912 if (fips_enabled && !rc) 3913 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3914 3915 return rc; 3916 3917 notest: 3918 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3919 return 0; 3920 non_fips_alg: 3921 return -EINVAL; 3922 } 3923 3924 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3925 3926 EXPORT_SYMBOL_GPL(alg_test); 3927