1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 #include <crypto/kpp.h> 36 #include <crypto/acompress.h> 37 38 #include "internal.h" 39 40 static bool notests; 41 module_param(notests, bool, 0644); 42 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 43 44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 45 46 /* a perfect nop */ 47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 48 { 49 return 0; 50 } 51 52 #else 53 54 #include "testmgr.h" 55 56 /* 57 * Need slab memory for testing (size in number of pages). 58 */ 59 #define XBUFSIZE 8 60 61 /* 62 * Indexes into the xbuf to simulate cross-page access. 63 */ 64 #define IDX1 32 65 #define IDX2 32400 66 #define IDX3 1511 67 #define IDX4 8193 68 #define IDX5 22222 69 #define IDX6 17101 70 #define IDX7 27333 71 #define IDX8 3000 72 73 /* 74 * Used by test_cipher() 75 */ 76 #define ENCRYPT 1 77 #define DECRYPT 0 78 79 struct tcrypt_result { 80 struct completion completion; 81 int err; 82 }; 83 84 struct aead_test_suite { 85 struct { 86 struct aead_testvec *vecs; 87 unsigned int count; 88 } enc, dec; 89 }; 90 91 struct cipher_test_suite { 92 struct { 93 struct cipher_testvec *vecs; 94 unsigned int count; 95 } enc, dec; 96 }; 97 98 struct comp_test_suite { 99 struct { 100 struct comp_testvec *vecs; 101 unsigned int count; 102 } comp, decomp; 103 }; 104 105 struct hash_test_suite { 106 struct hash_testvec *vecs; 107 unsigned int count; 108 }; 109 110 struct cprng_test_suite { 111 struct cprng_testvec *vecs; 112 unsigned int count; 113 }; 114 115 struct drbg_test_suite { 116 struct drbg_testvec *vecs; 117 unsigned int count; 118 }; 119 120 struct akcipher_test_suite { 121 struct akcipher_testvec *vecs; 122 unsigned int count; 123 }; 124 125 struct kpp_test_suite { 126 struct kpp_testvec *vecs; 127 unsigned int count; 128 }; 129 130 struct alg_test_desc { 131 const char *alg; 132 int (*test)(const struct alg_test_desc *desc, const char *driver, 133 u32 type, u32 mask); 134 int fips_allowed; /* set if alg is allowed in fips mode */ 135 136 union { 137 struct aead_test_suite aead; 138 struct cipher_test_suite cipher; 139 struct comp_test_suite comp; 140 struct hash_test_suite hash; 141 struct cprng_test_suite cprng; 142 struct drbg_test_suite drbg; 143 struct akcipher_test_suite akcipher; 144 struct kpp_test_suite kpp; 145 } suite; 146 }; 147 148 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 149 150 static void hexdump(unsigned char *buf, unsigned int len) 151 { 152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 153 16, 1, 154 buf, len, false); 155 } 156 157 static void tcrypt_complete(struct crypto_async_request *req, int err) 158 { 159 struct tcrypt_result *res = req->data; 160 161 if (err == -EINPROGRESS) 162 return; 163 164 res->err = err; 165 complete(&res->completion); 166 } 167 168 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 169 { 170 int i; 171 172 for (i = 0; i < XBUFSIZE; i++) { 173 buf[i] = (void *)__get_free_page(GFP_KERNEL); 174 if (!buf[i]) 175 goto err_free_buf; 176 } 177 178 return 0; 179 180 err_free_buf: 181 while (i-- > 0) 182 free_page((unsigned long)buf[i]); 183 184 return -ENOMEM; 185 } 186 187 static void testmgr_free_buf(char *buf[XBUFSIZE]) 188 { 189 int i; 190 191 for (i = 0; i < XBUFSIZE; i++) 192 free_page((unsigned long)buf[i]); 193 } 194 195 static int wait_async_op(struct tcrypt_result *tr, int ret) 196 { 197 if (ret == -EINPROGRESS || ret == -EBUSY) { 198 wait_for_completion(&tr->completion); 199 reinit_completion(&tr->completion); 200 ret = tr->err; 201 } 202 return ret; 203 } 204 205 static int ahash_partial_update(struct ahash_request **preq, 206 struct crypto_ahash *tfm, struct hash_testvec *template, 207 void *hash_buff, int k, int temp, struct scatterlist *sg, 208 const char *algo, char *result, struct tcrypt_result *tresult) 209 { 210 char *state; 211 struct ahash_request *req; 212 int statesize, ret = -EINVAL; 213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 214 215 req = *preq; 216 statesize = crypto_ahash_statesize( 217 crypto_ahash_reqtfm(req)); 218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); 219 if (!state) { 220 pr_err("alt: hash: Failed to alloc state for %s\n", algo); 221 goto out_nostate; 222 } 223 memcpy(state + statesize, guard, sizeof(guard)); 224 ret = crypto_ahash_export(req, state); 225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); 226 if (ret) { 227 pr_err("alt: hash: Failed to export() for %s\n", algo); 228 goto out; 229 } 230 ahash_request_free(req); 231 req = ahash_request_alloc(tfm, GFP_KERNEL); 232 if (!req) { 233 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 234 goto out_noreq; 235 } 236 ahash_request_set_callback(req, 237 CRYPTO_TFM_REQ_MAY_BACKLOG, 238 tcrypt_complete, tresult); 239 240 memcpy(hash_buff, template->plaintext + temp, 241 template->tap[k]); 242 sg_init_one(&sg[0], hash_buff, template->tap[k]); 243 ahash_request_set_crypt(req, sg, result, template->tap[k]); 244 ret = crypto_ahash_import(req, state); 245 if (ret) { 246 pr_err("alg: hash: Failed to import() for %s\n", algo); 247 goto out; 248 } 249 ret = wait_async_op(tresult, crypto_ahash_update(req)); 250 if (ret) 251 goto out; 252 *preq = req; 253 ret = 0; 254 goto out_noreq; 255 out: 256 ahash_request_free(req); 257 out_noreq: 258 kfree(state); 259 out_nostate: 260 return ret; 261 } 262 263 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 264 unsigned int tcount, bool use_digest, 265 const int align_offset) 266 { 267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 268 unsigned int i, j, k, temp; 269 struct scatterlist sg[8]; 270 char *result; 271 char *key; 272 struct ahash_request *req; 273 struct tcrypt_result tresult; 274 void *hash_buff; 275 char *xbuf[XBUFSIZE]; 276 int ret = -ENOMEM; 277 278 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 279 if (!result) 280 return ret; 281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 282 if (!key) 283 goto out_nobuf; 284 if (testmgr_alloc_buf(xbuf)) 285 goto out_nobuf; 286 287 init_completion(&tresult.completion); 288 289 req = ahash_request_alloc(tfm, GFP_KERNEL); 290 if (!req) { 291 printk(KERN_ERR "alg: hash: Failed to allocate request for " 292 "%s\n", algo); 293 goto out_noreq; 294 } 295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 296 tcrypt_complete, &tresult); 297 298 j = 0; 299 for (i = 0; i < tcount; i++) { 300 if (template[i].np) 301 continue; 302 303 ret = -EINVAL; 304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 305 goto out; 306 307 j++; 308 memset(result, 0, MAX_DIGEST_SIZE); 309 310 hash_buff = xbuf[0]; 311 hash_buff += align_offset; 312 313 memcpy(hash_buff, template[i].plaintext, template[i].psize); 314 sg_init_one(&sg[0], hash_buff, template[i].psize); 315 316 if (template[i].ksize) { 317 crypto_ahash_clear_flags(tfm, ~0); 318 if (template[i].ksize > MAX_KEYLEN) { 319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 320 j, algo, template[i].ksize, MAX_KEYLEN); 321 ret = -EINVAL; 322 goto out; 323 } 324 memcpy(key, template[i].key, template[i].ksize); 325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 326 if (ret) { 327 printk(KERN_ERR "alg: hash: setkey failed on " 328 "test %d for %s: ret=%d\n", j, algo, 329 -ret); 330 goto out; 331 } 332 } 333 334 ahash_request_set_crypt(req, sg, result, template[i].psize); 335 if (use_digest) { 336 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 337 if (ret) { 338 pr_err("alg: hash: digest failed on test %d " 339 "for %s: ret=%d\n", j, algo, -ret); 340 goto out; 341 } 342 } else { 343 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 344 if (ret) { 345 pr_err("alt: hash: init failed on test %d " 346 "for %s: ret=%d\n", j, algo, -ret); 347 goto out; 348 } 349 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 350 if (ret) { 351 pr_err("alt: hash: update failed on test %d " 352 "for %s: ret=%d\n", j, algo, -ret); 353 goto out; 354 } 355 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 356 if (ret) { 357 pr_err("alt: hash: final failed on test %d " 358 "for %s: ret=%d\n", j, algo, -ret); 359 goto out; 360 } 361 } 362 363 if (memcmp(result, template[i].digest, 364 crypto_ahash_digestsize(tfm))) { 365 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 366 j, algo); 367 hexdump(result, crypto_ahash_digestsize(tfm)); 368 ret = -EINVAL; 369 goto out; 370 } 371 } 372 373 j = 0; 374 for (i = 0; i < tcount; i++) { 375 /* alignment tests are only done with continuous buffers */ 376 if (align_offset != 0) 377 break; 378 379 if (!template[i].np) 380 continue; 381 382 j++; 383 memset(result, 0, MAX_DIGEST_SIZE); 384 385 temp = 0; 386 sg_init_table(sg, template[i].np); 387 ret = -EINVAL; 388 for (k = 0; k < template[i].np; k++) { 389 if (WARN_ON(offset_in_page(IDX[k]) + 390 template[i].tap[k] > PAGE_SIZE)) 391 goto out; 392 sg_set_buf(&sg[k], 393 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 394 offset_in_page(IDX[k]), 395 template[i].plaintext + temp, 396 template[i].tap[k]), 397 template[i].tap[k]); 398 temp += template[i].tap[k]; 399 } 400 401 if (template[i].ksize) { 402 if (template[i].ksize > MAX_KEYLEN) { 403 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 404 j, algo, template[i].ksize, MAX_KEYLEN); 405 ret = -EINVAL; 406 goto out; 407 } 408 crypto_ahash_clear_flags(tfm, ~0); 409 memcpy(key, template[i].key, template[i].ksize); 410 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 411 412 if (ret) { 413 printk(KERN_ERR "alg: hash: setkey " 414 "failed on chunking test %d " 415 "for %s: ret=%d\n", j, algo, -ret); 416 goto out; 417 } 418 } 419 420 ahash_request_set_crypt(req, sg, result, template[i].psize); 421 ret = crypto_ahash_digest(req); 422 switch (ret) { 423 case 0: 424 break; 425 case -EINPROGRESS: 426 case -EBUSY: 427 wait_for_completion(&tresult.completion); 428 reinit_completion(&tresult.completion); 429 ret = tresult.err; 430 if (!ret) 431 break; 432 /* fall through */ 433 default: 434 printk(KERN_ERR "alg: hash: digest failed " 435 "on chunking test %d for %s: " 436 "ret=%d\n", j, algo, -ret); 437 goto out; 438 } 439 440 if (memcmp(result, template[i].digest, 441 crypto_ahash_digestsize(tfm))) { 442 printk(KERN_ERR "alg: hash: Chunking test %d " 443 "failed for %s\n", j, algo); 444 hexdump(result, crypto_ahash_digestsize(tfm)); 445 ret = -EINVAL; 446 goto out; 447 } 448 } 449 450 /* partial update exercise */ 451 j = 0; 452 for (i = 0; i < tcount; i++) { 453 /* alignment tests are only done with continuous buffers */ 454 if (align_offset != 0) 455 break; 456 457 if (template[i].np < 2) 458 continue; 459 460 j++; 461 memset(result, 0, MAX_DIGEST_SIZE); 462 463 ret = -EINVAL; 464 hash_buff = xbuf[0]; 465 memcpy(hash_buff, template[i].plaintext, 466 template[i].tap[0]); 467 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 468 469 if (template[i].ksize) { 470 crypto_ahash_clear_flags(tfm, ~0); 471 if (template[i].ksize > MAX_KEYLEN) { 472 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 473 j, algo, template[i].ksize, MAX_KEYLEN); 474 ret = -EINVAL; 475 goto out; 476 } 477 memcpy(key, template[i].key, template[i].ksize); 478 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 479 if (ret) { 480 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 481 j, algo, -ret); 482 goto out; 483 } 484 } 485 486 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 487 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 488 if (ret) { 489 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n", 490 j, algo, -ret); 491 goto out; 492 } 493 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 494 if (ret) { 495 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n", 496 j, algo, -ret); 497 goto out; 498 } 499 500 temp = template[i].tap[0]; 501 for (k = 1; k < template[i].np; k++) { 502 ret = ahash_partial_update(&req, tfm, &template[i], 503 hash_buff, k, temp, &sg[0], algo, result, 504 &tresult); 505 if (ret) { 506 pr_err("hash: partial update failed on test %d for %s: ret=%d\n", 507 j, algo, -ret); 508 goto out_noreq; 509 } 510 temp += template[i].tap[k]; 511 } 512 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 513 if (ret) { 514 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n", 515 j, algo, -ret); 516 goto out; 517 } 518 if (memcmp(result, template[i].digest, 519 crypto_ahash_digestsize(tfm))) { 520 pr_err("alg: hash: Partial Test %d failed for %s\n", 521 j, algo); 522 hexdump(result, crypto_ahash_digestsize(tfm)); 523 ret = -EINVAL; 524 goto out; 525 } 526 } 527 528 ret = 0; 529 530 out: 531 ahash_request_free(req); 532 out_noreq: 533 testmgr_free_buf(xbuf); 534 out_nobuf: 535 kfree(key); 536 kfree(result); 537 return ret; 538 } 539 540 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 541 unsigned int tcount, bool use_digest) 542 { 543 unsigned int alignmask; 544 int ret; 545 546 ret = __test_hash(tfm, template, tcount, use_digest, 0); 547 if (ret) 548 return ret; 549 550 /* test unaligned buffers, check with one byte offset */ 551 ret = __test_hash(tfm, template, tcount, use_digest, 1); 552 if (ret) 553 return ret; 554 555 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 556 if (alignmask) { 557 /* Check if alignment mask for tfm is correctly set. */ 558 ret = __test_hash(tfm, template, tcount, use_digest, 559 alignmask + 1); 560 if (ret) 561 return ret; 562 } 563 564 return 0; 565 } 566 567 static int __test_aead(struct crypto_aead *tfm, int enc, 568 struct aead_testvec *template, unsigned int tcount, 569 const bool diff_dst, const int align_offset) 570 { 571 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 572 unsigned int i, j, k, n, temp; 573 int ret = -ENOMEM; 574 char *q; 575 char *key; 576 struct aead_request *req; 577 struct scatterlist *sg; 578 struct scatterlist *sgout; 579 const char *e, *d; 580 struct tcrypt_result result; 581 unsigned int authsize, iv_len; 582 void *input; 583 void *output; 584 void *assoc; 585 char *iv; 586 char *xbuf[XBUFSIZE]; 587 char *xoutbuf[XBUFSIZE]; 588 char *axbuf[XBUFSIZE]; 589 590 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 591 if (!iv) 592 return ret; 593 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 594 if (!key) 595 goto out_noxbuf; 596 if (testmgr_alloc_buf(xbuf)) 597 goto out_noxbuf; 598 if (testmgr_alloc_buf(axbuf)) 599 goto out_noaxbuf; 600 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 601 goto out_nooutbuf; 602 603 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 604 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 605 if (!sg) 606 goto out_nosg; 607 sgout = &sg[16]; 608 609 if (diff_dst) 610 d = "-ddst"; 611 else 612 d = ""; 613 614 if (enc == ENCRYPT) 615 e = "encryption"; 616 else 617 e = "decryption"; 618 619 init_completion(&result.completion); 620 621 req = aead_request_alloc(tfm, GFP_KERNEL); 622 if (!req) { 623 pr_err("alg: aead%s: Failed to allocate request for %s\n", 624 d, algo); 625 goto out; 626 } 627 628 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 629 tcrypt_complete, &result); 630 631 iv_len = crypto_aead_ivsize(tfm); 632 633 for (i = 0, j = 0; i < tcount; i++) { 634 if (template[i].np) 635 continue; 636 637 j++; 638 639 /* some templates have no input data but they will 640 * touch input 641 */ 642 input = xbuf[0]; 643 input += align_offset; 644 assoc = axbuf[0]; 645 646 ret = -EINVAL; 647 if (WARN_ON(align_offset + template[i].ilen > 648 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 649 goto out; 650 651 memcpy(input, template[i].input, template[i].ilen); 652 memcpy(assoc, template[i].assoc, template[i].alen); 653 if (template[i].iv) 654 memcpy(iv, template[i].iv, iv_len); 655 else 656 memset(iv, 0, iv_len); 657 658 crypto_aead_clear_flags(tfm, ~0); 659 if (template[i].wk) 660 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 661 662 if (template[i].klen > MAX_KEYLEN) { 663 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 664 d, j, algo, template[i].klen, 665 MAX_KEYLEN); 666 ret = -EINVAL; 667 goto out; 668 } 669 memcpy(key, template[i].key, template[i].klen); 670 671 ret = crypto_aead_setkey(tfm, key, template[i].klen); 672 if (template[i].fail == !ret) { 673 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 674 d, j, algo, crypto_aead_get_flags(tfm)); 675 goto out; 676 } else if (ret) 677 continue; 678 679 authsize = abs(template[i].rlen - template[i].ilen); 680 ret = crypto_aead_setauthsize(tfm, authsize); 681 if (ret) { 682 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 683 d, authsize, j, algo); 684 goto out; 685 } 686 687 k = !!template[i].alen; 688 sg_init_table(sg, k + 1); 689 sg_set_buf(&sg[0], assoc, template[i].alen); 690 sg_set_buf(&sg[k], input, 691 template[i].ilen + (enc ? authsize : 0)); 692 output = input; 693 694 if (diff_dst) { 695 sg_init_table(sgout, k + 1); 696 sg_set_buf(&sgout[0], assoc, template[i].alen); 697 698 output = xoutbuf[0]; 699 output += align_offset; 700 sg_set_buf(&sgout[k], output, 701 template[i].rlen + (enc ? 0 : authsize)); 702 } 703 704 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 705 template[i].ilen, iv); 706 707 aead_request_set_ad(req, template[i].alen); 708 709 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 710 711 switch (ret) { 712 case 0: 713 if (template[i].novrfy) { 714 /* verification was supposed to fail */ 715 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 716 d, e, j, algo); 717 /* so really, we got a bad message */ 718 ret = -EBADMSG; 719 goto out; 720 } 721 break; 722 case -EINPROGRESS: 723 case -EBUSY: 724 wait_for_completion(&result.completion); 725 reinit_completion(&result.completion); 726 ret = result.err; 727 if (!ret) 728 break; 729 case -EBADMSG: 730 if (template[i].novrfy) 731 /* verification failure was expected */ 732 continue; 733 /* fall through */ 734 default: 735 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 736 d, e, j, algo, -ret); 737 goto out; 738 } 739 740 q = output; 741 if (memcmp(q, template[i].result, template[i].rlen)) { 742 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 743 d, j, e, algo); 744 hexdump(q, template[i].rlen); 745 ret = -EINVAL; 746 goto out; 747 } 748 } 749 750 for (i = 0, j = 0; i < tcount; i++) { 751 /* alignment tests are only done with continuous buffers */ 752 if (align_offset != 0) 753 break; 754 755 if (!template[i].np) 756 continue; 757 758 j++; 759 760 if (template[i].iv) 761 memcpy(iv, template[i].iv, iv_len); 762 else 763 memset(iv, 0, MAX_IVLEN); 764 765 crypto_aead_clear_flags(tfm, ~0); 766 if (template[i].wk) 767 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 768 if (template[i].klen > MAX_KEYLEN) { 769 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 770 d, j, algo, template[i].klen, MAX_KEYLEN); 771 ret = -EINVAL; 772 goto out; 773 } 774 memcpy(key, template[i].key, template[i].klen); 775 776 ret = crypto_aead_setkey(tfm, key, template[i].klen); 777 if (template[i].fail == !ret) { 778 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 779 d, j, algo, crypto_aead_get_flags(tfm)); 780 goto out; 781 } else if (ret) 782 continue; 783 784 authsize = abs(template[i].rlen - template[i].ilen); 785 786 ret = -EINVAL; 787 sg_init_table(sg, template[i].anp + template[i].np); 788 if (diff_dst) 789 sg_init_table(sgout, template[i].anp + template[i].np); 790 791 ret = -EINVAL; 792 for (k = 0, temp = 0; k < template[i].anp; k++) { 793 if (WARN_ON(offset_in_page(IDX[k]) + 794 template[i].atap[k] > PAGE_SIZE)) 795 goto out; 796 sg_set_buf(&sg[k], 797 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 798 offset_in_page(IDX[k]), 799 template[i].assoc + temp, 800 template[i].atap[k]), 801 template[i].atap[k]); 802 if (diff_dst) 803 sg_set_buf(&sgout[k], 804 axbuf[IDX[k] >> PAGE_SHIFT] + 805 offset_in_page(IDX[k]), 806 template[i].atap[k]); 807 temp += template[i].atap[k]; 808 } 809 810 for (k = 0, temp = 0; k < template[i].np; k++) { 811 if (WARN_ON(offset_in_page(IDX[k]) + 812 template[i].tap[k] > PAGE_SIZE)) 813 goto out; 814 815 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 816 memcpy(q, template[i].input + temp, template[i].tap[k]); 817 sg_set_buf(&sg[template[i].anp + k], 818 q, template[i].tap[k]); 819 820 if (diff_dst) { 821 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 822 offset_in_page(IDX[k]); 823 824 memset(q, 0, template[i].tap[k]); 825 826 sg_set_buf(&sgout[template[i].anp + k], 827 q, template[i].tap[k]); 828 } 829 830 n = template[i].tap[k]; 831 if (k == template[i].np - 1 && enc) 832 n += authsize; 833 if (offset_in_page(q) + n < PAGE_SIZE) 834 q[n] = 0; 835 836 temp += template[i].tap[k]; 837 } 838 839 ret = crypto_aead_setauthsize(tfm, authsize); 840 if (ret) { 841 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 842 d, authsize, j, algo); 843 goto out; 844 } 845 846 if (enc) { 847 if (WARN_ON(sg[template[i].anp + k - 1].offset + 848 sg[template[i].anp + k - 1].length + 849 authsize > PAGE_SIZE)) { 850 ret = -EINVAL; 851 goto out; 852 } 853 854 if (diff_dst) 855 sgout[template[i].anp + k - 1].length += 856 authsize; 857 sg[template[i].anp + k - 1].length += authsize; 858 } 859 860 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 861 template[i].ilen, 862 iv); 863 864 aead_request_set_ad(req, template[i].alen); 865 866 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 867 868 switch (ret) { 869 case 0: 870 if (template[i].novrfy) { 871 /* verification was supposed to fail */ 872 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 873 d, e, j, algo); 874 /* so really, we got a bad message */ 875 ret = -EBADMSG; 876 goto out; 877 } 878 break; 879 case -EINPROGRESS: 880 case -EBUSY: 881 wait_for_completion(&result.completion); 882 reinit_completion(&result.completion); 883 ret = result.err; 884 if (!ret) 885 break; 886 case -EBADMSG: 887 if (template[i].novrfy) 888 /* verification failure was expected */ 889 continue; 890 /* fall through */ 891 default: 892 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 893 d, e, j, algo, -ret); 894 goto out; 895 } 896 897 ret = -EINVAL; 898 for (k = 0, temp = 0; k < template[i].np; k++) { 899 if (diff_dst) 900 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 901 offset_in_page(IDX[k]); 902 else 903 q = xbuf[IDX[k] >> PAGE_SHIFT] + 904 offset_in_page(IDX[k]); 905 906 n = template[i].tap[k]; 907 if (k == template[i].np - 1) 908 n += enc ? authsize : -authsize; 909 910 if (memcmp(q, template[i].result + temp, n)) { 911 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 912 d, j, e, k, algo); 913 hexdump(q, n); 914 goto out; 915 } 916 917 q += n; 918 if (k == template[i].np - 1 && !enc) { 919 if (!diff_dst && 920 memcmp(q, template[i].input + 921 temp + n, authsize)) 922 n = authsize; 923 else 924 n = 0; 925 } else { 926 for (n = 0; offset_in_page(q + n) && q[n]; n++) 927 ; 928 } 929 if (n) { 930 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 931 d, j, e, k, algo, n); 932 hexdump(q, n); 933 goto out; 934 } 935 936 temp += template[i].tap[k]; 937 } 938 } 939 940 ret = 0; 941 942 out: 943 aead_request_free(req); 944 kfree(sg); 945 out_nosg: 946 if (diff_dst) 947 testmgr_free_buf(xoutbuf); 948 out_nooutbuf: 949 testmgr_free_buf(axbuf); 950 out_noaxbuf: 951 testmgr_free_buf(xbuf); 952 out_noxbuf: 953 kfree(key); 954 kfree(iv); 955 return ret; 956 } 957 958 static int test_aead(struct crypto_aead *tfm, int enc, 959 struct aead_testvec *template, unsigned int tcount) 960 { 961 unsigned int alignmask; 962 int ret; 963 964 /* test 'dst == src' case */ 965 ret = __test_aead(tfm, enc, template, tcount, false, 0); 966 if (ret) 967 return ret; 968 969 /* test 'dst != src' case */ 970 ret = __test_aead(tfm, enc, template, tcount, true, 0); 971 if (ret) 972 return ret; 973 974 /* test unaligned buffers, check with one byte offset */ 975 ret = __test_aead(tfm, enc, template, tcount, true, 1); 976 if (ret) 977 return ret; 978 979 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 980 if (alignmask) { 981 /* Check if alignment mask for tfm is correctly set. */ 982 ret = __test_aead(tfm, enc, template, tcount, true, 983 alignmask + 1); 984 if (ret) 985 return ret; 986 } 987 988 return 0; 989 } 990 991 static int test_cipher(struct crypto_cipher *tfm, int enc, 992 struct cipher_testvec *template, unsigned int tcount) 993 { 994 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 995 unsigned int i, j, k; 996 char *q; 997 const char *e; 998 void *data; 999 char *xbuf[XBUFSIZE]; 1000 int ret = -ENOMEM; 1001 1002 if (testmgr_alloc_buf(xbuf)) 1003 goto out_nobuf; 1004 1005 if (enc == ENCRYPT) 1006 e = "encryption"; 1007 else 1008 e = "decryption"; 1009 1010 j = 0; 1011 for (i = 0; i < tcount; i++) { 1012 if (template[i].np) 1013 continue; 1014 1015 if (fips_enabled && template[i].fips_skip) 1016 continue; 1017 1018 j++; 1019 1020 ret = -EINVAL; 1021 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 1022 goto out; 1023 1024 data = xbuf[0]; 1025 memcpy(data, template[i].input, template[i].ilen); 1026 1027 crypto_cipher_clear_flags(tfm, ~0); 1028 if (template[i].wk) 1029 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1030 1031 ret = crypto_cipher_setkey(tfm, template[i].key, 1032 template[i].klen); 1033 if (template[i].fail == !ret) { 1034 printk(KERN_ERR "alg: cipher: setkey failed " 1035 "on test %d for %s: flags=%x\n", j, 1036 algo, crypto_cipher_get_flags(tfm)); 1037 goto out; 1038 } else if (ret) 1039 continue; 1040 1041 for (k = 0; k < template[i].ilen; 1042 k += crypto_cipher_blocksize(tfm)) { 1043 if (enc) 1044 crypto_cipher_encrypt_one(tfm, data + k, 1045 data + k); 1046 else 1047 crypto_cipher_decrypt_one(tfm, data + k, 1048 data + k); 1049 } 1050 1051 q = data; 1052 if (memcmp(q, template[i].result, template[i].rlen)) { 1053 printk(KERN_ERR "alg: cipher: Test %d failed " 1054 "on %s for %s\n", j, e, algo); 1055 hexdump(q, template[i].rlen); 1056 ret = -EINVAL; 1057 goto out; 1058 } 1059 } 1060 1061 ret = 0; 1062 1063 out: 1064 testmgr_free_buf(xbuf); 1065 out_nobuf: 1066 return ret; 1067 } 1068 1069 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 1070 struct cipher_testvec *template, unsigned int tcount, 1071 const bool diff_dst, const int align_offset) 1072 { 1073 const char *algo = 1074 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 1075 unsigned int i, j, k, n, temp; 1076 char *q; 1077 struct skcipher_request *req; 1078 struct scatterlist sg[8]; 1079 struct scatterlist sgout[8]; 1080 const char *e, *d; 1081 struct tcrypt_result result; 1082 void *data; 1083 char iv[MAX_IVLEN]; 1084 char *xbuf[XBUFSIZE]; 1085 char *xoutbuf[XBUFSIZE]; 1086 int ret = -ENOMEM; 1087 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1088 1089 if (testmgr_alloc_buf(xbuf)) 1090 goto out_nobuf; 1091 1092 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1093 goto out_nooutbuf; 1094 1095 if (diff_dst) 1096 d = "-ddst"; 1097 else 1098 d = ""; 1099 1100 if (enc == ENCRYPT) 1101 e = "encryption"; 1102 else 1103 e = "decryption"; 1104 1105 init_completion(&result.completion); 1106 1107 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1108 if (!req) { 1109 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 1110 d, algo); 1111 goto out; 1112 } 1113 1114 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1115 tcrypt_complete, &result); 1116 1117 j = 0; 1118 for (i = 0; i < tcount; i++) { 1119 if (template[i].np && !template[i].also_non_np) 1120 continue; 1121 1122 if (fips_enabled && template[i].fips_skip) 1123 continue; 1124 1125 if (template[i].iv) 1126 memcpy(iv, template[i].iv, ivsize); 1127 else 1128 memset(iv, 0, MAX_IVLEN); 1129 1130 j++; 1131 ret = -EINVAL; 1132 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 1133 goto out; 1134 1135 data = xbuf[0]; 1136 data += align_offset; 1137 memcpy(data, template[i].input, template[i].ilen); 1138 1139 crypto_skcipher_clear_flags(tfm, ~0); 1140 if (template[i].wk) 1141 crypto_skcipher_set_flags(tfm, 1142 CRYPTO_TFM_REQ_WEAK_KEY); 1143 1144 ret = crypto_skcipher_setkey(tfm, template[i].key, 1145 template[i].klen); 1146 if (template[i].fail == !ret) { 1147 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1148 d, j, algo, crypto_skcipher_get_flags(tfm)); 1149 goto out; 1150 } else if (ret) 1151 continue; 1152 1153 sg_init_one(&sg[0], data, template[i].ilen); 1154 if (diff_dst) { 1155 data = xoutbuf[0]; 1156 data += align_offset; 1157 sg_init_one(&sgout[0], data, template[i].ilen); 1158 } 1159 1160 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1161 template[i].ilen, iv); 1162 ret = enc ? crypto_skcipher_encrypt(req) : 1163 crypto_skcipher_decrypt(req); 1164 1165 switch (ret) { 1166 case 0: 1167 break; 1168 case -EINPROGRESS: 1169 case -EBUSY: 1170 wait_for_completion(&result.completion); 1171 reinit_completion(&result.completion); 1172 ret = result.err; 1173 if (!ret) 1174 break; 1175 /* fall through */ 1176 default: 1177 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1178 d, e, j, algo, -ret); 1179 goto out; 1180 } 1181 1182 q = data; 1183 if (memcmp(q, template[i].result, template[i].rlen)) { 1184 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1185 d, j, e, algo); 1186 hexdump(q, template[i].rlen); 1187 ret = -EINVAL; 1188 goto out; 1189 } 1190 1191 if (template[i].iv_out && 1192 memcmp(iv, template[i].iv_out, 1193 crypto_skcipher_ivsize(tfm))) { 1194 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1195 d, j, e, algo); 1196 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1197 ret = -EINVAL; 1198 goto out; 1199 } 1200 } 1201 1202 j = 0; 1203 for (i = 0; i < tcount; i++) { 1204 /* alignment tests are only done with continuous buffers */ 1205 if (align_offset != 0) 1206 break; 1207 1208 if (!template[i].np) 1209 continue; 1210 1211 if (fips_enabled && template[i].fips_skip) 1212 continue; 1213 1214 if (template[i].iv) 1215 memcpy(iv, template[i].iv, ivsize); 1216 else 1217 memset(iv, 0, MAX_IVLEN); 1218 1219 j++; 1220 crypto_skcipher_clear_flags(tfm, ~0); 1221 if (template[i].wk) 1222 crypto_skcipher_set_flags(tfm, 1223 CRYPTO_TFM_REQ_WEAK_KEY); 1224 1225 ret = crypto_skcipher_setkey(tfm, template[i].key, 1226 template[i].klen); 1227 if (template[i].fail == !ret) { 1228 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1229 d, j, algo, crypto_skcipher_get_flags(tfm)); 1230 goto out; 1231 } else if (ret) 1232 continue; 1233 1234 temp = 0; 1235 ret = -EINVAL; 1236 sg_init_table(sg, template[i].np); 1237 if (diff_dst) 1238 sg_init_table(sgout, template[i].np); 1239 for (k = 0; k < template[i].np; k++) { 1240 if (WARN_ON(offset_in_page(IDX[k]) + 1241 template[i].tap[k] > PAGE_SIZE)) 1242 goto out; 1243 1244 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1245 1246 memcpy(q, template[i].input + temp, template[i].tap[k]); 1247 1248 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1249 q[template[i].tap[k]] = 0; 1250 1251 sg_set_buf(&sg[k], q, template[i].tap[k]); 1252 if (diff_dst) { 1253 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1254 offset_in_page(IDX[k]); 1255 1256 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1257 1258 memset(q, 0, template[i].tap[k]); 1259 if (offset_in_page(q) + 1260 template[i].tap[k] < PAGE_SIZE) 1261 q[template[i].tap[k]] = 0; 1262 } 1263 1264 temp += template[i].tap[k]; 1265 } 1266 1267 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1268 template[i].ilen, iv); 1269 1270 ret = enc ? crypto_skcipher_encrypt(req) : 1271 crypto_skcipher_decrypt(req); 1272 1273 switch (ret) { 1274 case 0: 1275 break; 1276 case -EINPROGRESS: 1277 case -EBUSY: 1278 wait_for_completion(&result.completion); 1279 reinit_completion(&result.completion); 1280 ret = result.err; 1281 if (!ret) 1282 break; 1283 /* fall through */ 1284 default: 1285 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1286 d, e, j, algo, -ret); 1287 goto out; 1288 } 1289 1290 temp = 0; 1291 ret = -EINVAL; 1292 for (k = 0; k < template[i].np; k++) { 1293 if (diff_dst) 1294 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1295 offset_in_page(IDX[k]); 1296 else 1297 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1298 offset_in_page(IDX[k]); 1299 1300 if (memcmp(q, template[i].result + temp, 1301 template[i].tap[k])) { 1302 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1303 d, j, e, k, algo); 1304 hexdump(q, template[i].tap[k]); 1305 goto out; 1306 } 1307 1308 q += template[i].tap[k]; 1309 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1310 ; 1311 if (n) { 1312 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1313 d, j, e, k, algo, n); 1314 hexdump(q, n); 1315 goto out; 1316 } 1317 temp += template[i].tap[k]; 1318 } 1319 } 1320 1321 ret = 0; 1322 1323 out: 1324 skcipher_request_free(req); 1325 if (diff_dst) 1326 testmgr_free_buf(xoutbuf); 1327 out_nooutbuf: 1328 testmgr_free_buf(xbuf); 1329 out_nobuf: 1330 return ret; 1331 } 1332 1333 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1334 struct cipher_testvec *template, unsigned int tcount) 1335 { 1336 unsigned int alignmask; 1337 int ret; 1338 1339 /* test 'dst == src' case */ 1340 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1341 if (ret) 1342 return ret; 1343 1344 /* test 'dst != src' case */ 1345 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1346 if (ret) 1347 return ret; 1348 1349 /* test unaligned buffers, check with one byte offset */ 1350 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1351 if (ret) 1352 return ret; 1353 1354 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1355 if (alignmask) { 1356 /* Check if alignment mask for tfm is correctly set. */ 1357 ret = __test_skcipher(tfm, enc, template, tcount, true, 1358 alignmask + 1); 1359 if (ret) 1360 return ret; 1361 } 1362 1363 return 0; 1364 } 1365 1366 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1367 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1368 { 1369 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1370 unsigned int i; 1371 char result[COMP_BUF_SIZE]; 1372 int ret; 1373 1374 for (i = 0; i < ctcount; i++) { 1375 int ilen; 1376 unsigned int dlen = COMP_BUF_SIZE; 1377 1378 memset(result, 0, sizeof (result)); 1379 1380 ilen = ctemplate[i].inlen; 1381 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1382 ilen, result, &dlen); 1383 if (ret) { 1384 printk(KERN_ERR "alg: comp: compression failed " 1385 "on test %d for %s: ret=%d\n", i + 1, algo, 1386 -ret); 1387 goto out; 1388 } 1389 1390 if (dlen != ctemplate[i].outlen) { 1391 printk(KERN_ERR "alg: comp: Compression test %d " 1392 "failed for %s: output len = %d\n", i + 1, algo, 1393 dlen); 1394 ret = -EINVAL; 1395 goto out; 1396 } 1397 1398 if (memcmp(result, ctemplate[i].output, dlen)) { 1399 printk(KERN_ERR "alg: comp: Compression test %d " 1400 "failed for %s\n", i + 1, algo); 1401 hexdump(result, dlen); 1402 ret = -EINVAL; 1403 goto out; 1404 } 1405 } 1406 1407 for (i = 0; i < dtcount; i++) { 1408 int ilen; 1409 unsigned int dlen = COMP_BUF_SIZE; 1410 1411 memset(result, 0, sizeof (result)); 1412 1413 ilen = dtemplate[i].inlen; 1414 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1415 ilen, result, &dlen); 1416 if (ret) { 1417 printk(KERN_ERR "alg: comp: decompression failed " 1418 "on test %d for %s: ret=%d\n", i + 1, algo, 1419 -ret); 1420 goto out; 1421 } 1422 1423 if (dlen != dtemplate[i].outlen) { 1424 printk(KERN_ERR "alg: comp: Decompression test %d " 1425 "failed for %s: output len = %d\n", i + 1, algo, 1426 dlen); 1427 ret = -EINVAL; 1428 goto out; 1429 } 1430 1431 if (memcmp(result, dtemplate[i].output, dlen)) { 1432 printk(KERN_ERR "alg: comp: Decompression test %d " 1433 "failed for %s\n", i + 1, algo); 1434 hexdump(result, dlen); 1435 ret = -EINVAL; 1436 goto out; 1437 } 1438 } 1439 1440 ret = 0; 1441 1442 out: 1443 return ret; 1444 } 1445 1446 static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate, 1447 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1448 { 1449 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 1450 unsigned int i; 1451 char *output; 1452 int ret; 1453 struct scatterlist src, dst; 1454 struct acomp_req *req; 1455 struct tcrypt_result result; 1456 1457 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1458 if (!output) 1459 return -ENOMEM; 1460 1461 for (i = 0; i < ctcount; i++) { 1462 unsigned int dlen = COMP_BUF_SIZE; 1463 int ilen = ctemplate[i].inlen; 1464 1465 memset(output, 0, dlen); 1466 init_completion(&result.completion); 1467 sg_init_one(&src, ctemplate[i].input, ilen); 1468 sg_init_one(&dst, output, dlen); 1469 1470 req = acomp_request_alloc(tfm); 1471 if (!req) { 1472 pr_err("alg: acomp: request alloc failed for %s\n", 1473 algo); 1474 ret = -ENOMEM; 1475 goto out; 1476 } 1477 1478 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1479 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1480 tcrypt_complete, &result); 1481 1482 ret = wait_async_op(&result, crypto_acomp_compress(req)); 1483 if (ret) { 1484 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1485 i + 1, algo, -ret); 1486 acomp_request_free(req); 1487 goto out; 1488 } 1489 1490 if (req->dlen != ctemplate[i].outlen) { 1491 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 1492 i + 1, algo, req->dlen); 1493 ret = -EINVAL; 1494 acomp_request_free(req); 1495 goto out; 1496 } 1497 1498 if (memcmp(output, ctemplate[i].output, req->dlen)) { 1499 pr_err("alg: acomp: Compression test %d failed for %s\n", 1500 i + 1, algo); 1501 hexdump(output, req->dlen); 1502 ret = -EINVAL; 1503 acomp_request_free(req); 1504 goto out; 1505 } 1506 1507 acomp_request_free(req); 1508 } 1509 1510 for (i = 0; i < dtcount; i++) { 1511 unsigned int dlen = COMP_BUF_SIZE; 1512 int ilen = dtemplate[i].inlen; 1513 1514 memset(output, 0, dlen); 1515 init_completion(&result.completion); 1516 sg_init_one(&src, dtemplate[i].input, ilen); 1517 sg_init_one(&dst, output, dlen); 1518 1519 req = acomp_request_alloc(tfm); 1520 if (!req) { 1521 pr_err("alg: acomp: request alloc failed for %s\n", 1522 algo); 1523 ret = -ENOMEM; 1524 goto out; 1525 } 1526 1527 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1528 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1529 tcrypt_complete, &result); 1530 1531 ret = wait_async_op(&result, crypto_acomp_decompress(req)); 1532 if (ret) { 1533 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 1534 i + 1, algo, -ret); 1535 acomp_request_free(req); 1536 goto out; 1537 } 1538 1539 if (req->dlen != dtemplate[i].outlen) { 1540 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 1541 i + 1, algo, req->dlen); 1542 ret = -EINVAL; 1543 acomp_request_free(req); 1544 goto out; 1545 } 1546 1547 if (memcmp(output, dtemplate[i].output, req->dlen)) { 1548 pr_err("alg: acomp: Decompression test %d failed for %s\n", 1549 i + 1, algo); 1550 hexdump(output, req->dlen); 1551 ret = -EINVAL; 1552 acomp_request_free(req); 1553 goto out; 1554 } 1555 1556 acomp_request_free(req); 1557 } 1558 1559 ret = 0; 1560 1561 out: 1562 kfree(output); 1563 return ret; 1564 } 1565 1566 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1567 unsigned int tcount) 1568 { 1569 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1570 int err = 0, i, j, seedsize; 1571 u8 *seed; 1572 char result[32]; 1573 1574 seedsize = crypto_rng_seedsize(tfm); 1575 1576 seed = kmalloc(seedsize, GFP_KERNEL); 1577 if (!seed) { 1578 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1579 "for %s\n", algo); 1580 return -ENOMEM; 1581 } 1582 1583 for (i = 0; i < tcount; i++) { 1584 memset(result, 0, 32); 1585 1586 memcpy(seed, template[i].v, template[i].vlen); 1587 memcpy(seed + template[i].vlen, template[i].key, 1588 template[i].klen); 1589 memcpy(seed + template[i].vlen + template[i].klen, 1590 template[i].dt, template[i].dtlen); 1591 1592 err = crypto_rng_reset(tfm, seed, seedsize); 1593 if (err) { 1594 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1595 "for %s\n", algo); 1596 goto out; 1597 } 1598 1599 for (j = 0; j < template[i].loops; j++) { 1600 err = crypto_rng_get_bytes(tfm, result, 1601 template[i].rlen); 1602 if (err < 0) { 1603 printk(KERN_ERR "alg: cprng: Failed to obtain " 1604 "the correct amount of random data for " 1605 "%s (requested %d)\n", algo, 1606 template[i].rlen); 1607 goto out; 1608 } 1609 } 1610 1611 err = memcmp(result, template[i].result, 1612 template[i].rlen); 1613 if (err) { 1614 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1615 i, algo); 1616 hexdump(result, template[i].rlen); 1617 err = -EINVAL; 1618 goto out; 1619 } 1620 } 1621 1622 out: 1623 kfree(seed); 1624 return err; 1625 } 1626 1627 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1628 u32 type, u32 mask) 1629 { 1630 struct crypto_aead *tfm; 1631 int err = 0; 1632 1633 tfm = crypto_alloc_aead(driver, type, mask); 1634 if (IS_ERR(tfm)) { 1635 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1636 "%ld\n", driver, PTR_ERR(tfm)); 1637 return PTR_ERR(tfm); 1638 } 1639 1640 if (desc->suite.aead.enc.vecs) { 1641 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1642 desc->suite.aead.enc.count); 1643 if (err) 1644 goto out; 1645 } 1646 1647 if (!err && desc->suite.aead.dec.vecs) 1648 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1649 desc->suite.aead.dec.count); 1650 1651 out: 1652 crypto_free_aead(tfm); 1653 return err; 1654 } 1655 1656 static int alg_test_cipher(const struct alg_test_desc *desc, 1657 const char *driver, u32 type, u32 mask) 1658 { 1659 struct crypto_cipher *tfm; 1660 int err = 0; 1661 1662 tfm = crypto_alloc_cipher(driver, type, mask); 1663 if (IS_ERR(tfm)) { 1664 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1665 "%s: %ld\n", driver, PTR_ERR(tfm)); 1666 return PTR_ERR(tfm); 1667 } 1668 1669 if (desc->suite.cipher.enc.vecs) { 1670 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1671 desc->suite.cipher.enc.count); 1672 if (err) 1673 goto out; 1674 } 1675 1676 if (desc->suite.cipher.dec.vecs) 1677 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1678 desc->suite.cipher.dec.count); 1679 1680 out: 1681 crypto_free_cipher(tfm); 1682 return err; 1683 } 1684 1685 static int alg_test_skcipher(const struct alg_test_desc *desc, 1686 const char *driver, u32 type, u32 mask) 1687 { 1688 struct crypto_skcipher *tfm; 1689 int err = 0; 1690 1691 tfm = crypto_alloc_skcipher(driver, type, mask); 1692 if (IS_ERR(tfm)) { 1693 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1694 "%s: %ld\n", driver, PTR_ERR(tfm)); 1695 return PTR_ERR(tfm); 1696 } 1697 1698 if (desc->suite.cipher.enc.vecs) { 1699 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1700 desc->suite.cipher.enc.count); 1701 if (err) 1702 goto out; 1703 } 1704 1705 if (desc->suite.cipher.dec.vecs) 1706 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1707 desc->suite.cipher.dec.count); 1708 1709 out: 1710 crypto_free_skcipher(tfm); 1711 return err; 1712 } 1713 1714 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1715 u32 type, u32 mask) 1716 { 1717 struct crypto_comp *comp; 1718 struct crypto_acomp *acomp; 1719 int err; 1720 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 1721 1722 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 1723 acomp = crypto_alloc_acomp(driver, type, mask); 1724 if (IS_ERR(acomp)) { 1725 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 1726 driver, PTR_ERR(acomp)); 1727 return PTR_ERR(acomp); 1728 } 1729 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 1730 desc->suite.comp.decomp.vecs, 1731 desc->suite.comp.comp.count, 1732 desc->suite.comp.decomp.count); 1733 crypto_free_acomp(acomp); 1734 } else { 1735 comp = crypto_alloc_comp(driver, type, mask); 1736 if (IS_ERR(comp)) { 1737 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 1738 driver, PTR_ERR(comp)); 1739 return PTR_ERR(comp); 1740 } 1741 1742 err = test_comp(comp, desc->suite.comp.comp.vecs, 1743 desc->suite.comp.decomp.vecs, 1744 desc->suite.comp.comp.count, 1745 desc->suite.comp.decomp.count); 1746 1747 crypto_free_comp(comp); 1748 } 1749 return err; 1750 } 1751 1752 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1753 u32 type, u32 mask) 1754 { 1755 struct crypto_ahash *tfm; 1756 int err; 1757 1758 tfm = crypto_alloc_ahash(driver, type, mask); 1759 if (IS_ERR(tfm)) { 1760 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1761 "%ld\n", driver, PTR_ERR(tfm)); 1762 return PTR_ERR(tfm); 1763 } 1764 1765 err = test_hash(tfm, desc->suite.hash.vecs, 1766 desc->suite.hash.count, true); 1767 if (!err) 1768 err = test_hash(tfm, desc->suite.hash.vecs, 1769 desc->suite.hash.count, false); 1770 1771 crypto_free_ahash(tfm); 1772 return err; 1773 } 1774 1775 static int alg_test_crc32c(const struct alg_test_desc *desc, 1776 const char *driver, u32 type, u32 mask) 1777 { 1778 struct crypto_shash *tfm; 1779 u32 val; 1780 int err; 1781 1782 err = alg_test_hash(desc, driver, type, mask); 1783 if (err) 1784 goto out; 1785 1786 tfm = crypto_alloc_shash(driver, type, mask); 1787 if (IS_ERR(tfm)) { 1788 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1789 "%ld\n", driver, PTR_ERR(tfm)); 1790 err = PTR_ERR(tfm); 1791 goto out; 1792 } 1793 1794 do { 1795 SHASH_DESC_ON_STACK(shash, tfm); 1796 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1797 1798 shash->tfm = tfm; 1799 shash->flags = 0; 1800 1801 *ctx = le32_to_cpu(420553207); 1802 err = crypto_shash_final(shash, (u8 *)&val); 1803 if (err) { 1804 printk(KERN_ERR "alg: crc32c: Operation failed for " 1805 "%s: %d\n", driver, err); 1806 break; 1807 } 1808 1809 if (val != ~420553207) { 1810 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1811 "%d\n", driver, val); 1812 err = -EINVAL; 1813 } 1814 } while (0); 1815 1816 crypto_free_shash(tfm); 1817 1818 out: 1819 return err; 1820 } 1821 1822 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1823 u32 type, u32 mask) 1824 { 1825 struct crypto_rng *rng; 1826 int err; 1827 1828 rng = crypto_alloc_rng(driver, type, mask); 1829 if (IS_ERR(rng)) { 1830 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1831 "%ld\n", driver, PTR_ERR(rng)); 1832 return PTR_ERR(rng); 1833 } 1834 1835 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1836 1837 crypto_free_rng(rng); 1838 1839 return err; 1840 } 1841 1842 1843 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1844 const char *driver, u32 type, u32 mask) 1845 { 1846 int ret = -EAGAIN; 1847 struct crypto_rng *drng; 1848 struct drbg_test_data test_data; 1849 struct drbg_string addtl, pers, testentropy; 1850 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1851 1852 if (!buf) 1853 return -ENOMEM; 1854 1855 drng = crypto_alloc_rng(driver, type, mask); 1856 if (IS_ERR(drng)) { 1857 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1858 "%s\n", driver); 1859 kzfree(buf); 1860 return -ENOMEM; 1861 } 1862 1863 test_data.testentropy = &testentropy; 1864 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1865 drbg_string_fill(&pers, test->pers, test->perslen); 1866 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1867 if (ret) { 1868 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1869 goto outbuf; 1870 } 1871 1872 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1873 if (pr) { 1874 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1875 ret = crypto_drbg_get_bytes_addtl_test(drng, 1876 buf, test->expectedlen, &addtl, &test_data); 1877 } else { 1878 ret = crypto_drbg_get_bytes_addtl(drng, 1879 buf, test->expectedlen, &addtl); 1880 } 1881 if (ret < 0) { 1882 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1883 "driver %s\n", driver); 1884 goto outbuf; 1885 } 1886 1887 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1888 if (pr) { 1889 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1890 ret = crypto_drbg_get_bytes_addtl_test(drng, 1891 buf, test->expectedlen, &addtl, &test_data); 1892 } else { 1893 ret = crypto_drbg_get_bytes_addtl(drng, 1894 buf, test->expectedlen, &addtl); 1895 } 1896 if (ret < 0) { 1897 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1898 "driver %s\n", driver); 1899 goto outbuf; 1900 } 1901 1902 ret = memcmp(test->expected, buf, test->expectedlen); 1903 1904 outbuf: 1905 crypto_free_rng(drng); 1906 kzfree(buf); 1907 return ret; 1908 } 1909 1910 1911 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1912 u32 type, u32 mask) 1913 { 1914 int err = 0; 1915 int pr = 0; 1916 int i = 0; 1917 struct drbg_testvec *template = desc->suite.drbg.vecs; 1918 unsigned int tcount = desc->suite.drbg.count; 1919 1920 if (0 == memcmp(driver, "drbg_pr_", 8)) 1921 pr = 1; 1922 1923 for (i = 0; i < tcount; i++) { 1924 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1925 if (err) { 1926 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1927 i, driver); 1928 err = -EINVAL; 1929 break; 1930 } 1931 } 1932 return err; 1933 1934 } 1935 1936 static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec, 1937 const char *alg) 1938 { 1939 struct kpp_request *req; 1940 void *input_buf = NULL; 1941 void *output_buf = NULL; 1942 struct tcrypt_result result; 1943 unsigned int out_len_max; 1944 int err = -ENOMEM; 1945 struct scatterlist src, dst; 1946 1947 req = kpp_request_alloc(tfm, GFP_KERNEL); 1948 if (!req) 1949 return err; 1950 1951 init_completion(&result.completion); 1952 1953 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 1954 if (err < 0) 1955 goto free_req; 1956 1957 out_len_max = crypto_kpp_maxsize(tfm); 1958 output_buf = kzalloc(out_len_max, GFP_KERNEL); 1959 if (!output_buf) { 1960 err = -ENOMEM; 1961 goto free_req; 1962 } 1963 1964 /* Use appropriate parameter as base */ 1965 kpp_request_set_input(req, NULL, 0); 1966 sg_init_one(&dst, output_buf, out_len_max); 1967 kpp_request_set_output(req, &dst, out_len_max); 1968 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1969 tcrypt_complete, &result); 1970 1971 /* Compute public key */ 1972 err = wait_async_op(&result, crypto_kpp_generate_public_key(req)); 1973 if (err) { 1974 pr_err("alg: %s: generate public key test failed. err %d\n", 1975 alg, err); 1976 goto free_output; 1977 } 1978 /* Verify calculated public key */ 1979 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 1980 vec->expected_a_public_size)) { 1981 pr_err("alg: %s: generate public key test failed. Invalid output\n", 1982 alg); 1983 err = -EINVAL; 1984 goto free_output; 1985 } 1986 1987 /* Calculate shared secret key by using counter part (b) public key. */ 1988 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL); 1989 if (!input_buf) { 1990 err = -ENOMEM; 1991 goto free_output; 1992 } 1993 1994 memcpy(input_buf, vec->b_public, vec->b_public_size); 1995 sg_init_one(&src, input_buf, vec->b_public_size); 1996 sg_init_one(&dst, output_buf, out_len_max); 1997 kpp_request_set_input(req, &src, vec->b_public_size); 1998 kpp_request_set_output(req, &dst, out_len_max); 1999 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2000 tcrypt_complete, &result); 2001 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req)); 2002 if (err) { 2003 pr_err("alg: %s: compute shard secret test failed. err %d\n", 2004 alg, err); 2005 goto free_all; 2006 } 2007 /* 2008 * verify shared secret from which the user will derive 2009 * secret key by executing whatever hash it has chosen 2010 */ 2011 if (memcmp(vec->expected_ss, sg_virt(req->dst), 2012 vec->expected_ss_size)) { 2013 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 2014 alg); 2015 err = -EINVAL; 2016 } 2017 2018 free_all: 2019 kfree(input_buf); 2020 free_output: 2021 kfree(output_buf); 2022 free_req: 2023 kpp_request_free(req); 2024 return err; 2025 } 2026 2027 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 2028 struct kpp_testvec *vecs, unsigned int tcount) 2029 { 2030 int ret, i; 2031 2032 for (i = 0; i < tcount; i++) { 2033 ret = do_test_kpp(tfm, vecs++, alg); 2034 if (ret) { 2035 pr_err("alg: %s: test failed on vector %d, err=%d\n", 2036 alg, i + 1, ret); 2037 return ret; 2038 } 2039 } 2040 return 0; 2041 } 2042 2043 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 2044 u32 type, u32 mask) 2045 { 2046 struct crypto_kpp *tfm; 2047 int err = 0; 2048 2049 tfm = crypto_alloc_kpp(driver, type, mask); 2050 if (IS_ERR(tfm)) { 2051 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 2052 driver, PTR_ERR(tfm)); 2053 return PTR_ERR(tfm); 2054 } 2055 if (desc->suite.kpp.vecs) 2056 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 2057 desc->suite.kpp.count); 2058 2059 crypto_free_kpp(tfm); 2060 return err; 2061 } 2062 2063 static int test_akcipher_one(struct crypto_akcipher *tfm, 2064 struct akcipher_testvec *vecs) 2065 { 2066 char *xbuf[XBUFSIZE]; 2067 struct akcipher_request *req; 2068 void *outbuf_enc = NULL; 2069 void *outbuf_dec = NULL; 2070 struct tcrypt_result result; 2071 unsigned int out_len_max, out_len = 0; 2072 int err = -ENOMEM; 2073 struct scatterlist src, dst, src_tab[2]; 2074 2075 if (testmgr_alloc_buf(xbuf)) 2076 return err; 2077 2078 req = akcipher_request_alloc(tfm, GFP_KERNEL); 2079 if (!req) 2080 goto free_xbuf; 2081 2082 init_completion(&result.completion); 2083 2084 if (vecs->public_key_vec) 2085 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2086 vecs->key_len); 2087 else 2088 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 2089 vecs->key_len); 2090 if (err) 2091 goto free_req; 2092 2093 err = -ENOMEM; 2094 out_len_max = crypto_akcipher_maxsize(tfm); 2095 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 2096 if (!outbuf_enc) 2097 goto free_req; 2098 2099 if (WARN_ON(vecs->m_size > PAGE_SIZE)) 2100 goto free_all; 2101 2102 memcpy(xbuf[0], vecs->m, vecs->m_size); 2103 2104 sg_init_table(src_tab, 2); 2105 sg_set_buf(&src_tab[0], xbuf[0], 8); 2106 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8); 2107 sg_init_one(&dst, outbuf_enc, out_len_max); 2108 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 2109 out_len_max); 2110 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2111 tcrypt_complete, &result); 2112 2113 /* Run RSA encrypt - c = m^e mod n;*/ 2114 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 2115 if (err) { 2116 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 2117 goto free_all; 2118 } 2119 if (req->dst_len != vecs->c_size) { 2120 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n"); 2121 err = -EINVAL; 2122 goto free_all; 2123 } 2124 /* verify that encrypted message is equal to expected */ 2125 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 2126 pr_err("alg: akcipher: encrypt test failed. Invalid output\n"); 2127 hexdump(outbuf_enc, vecs->c_size); 2128 err = -EINVAL; 2129 goto free_all; 2130 } 2131 /* Don't invoke decrypt for vectors with public key */ 2132 if (vecs->public_key_vec) { 2133 err = 0; 2134 goto free_all; 2135 } 2136 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 2137 if (!outbuf_dec) { 2138 err = -ENOMEM; 2139 goto free_all; 2140 } 2141 2142 if (WARN_ON(vecs->c_size > PAGE_SIZE)) 2143 goto free_all; 2144 2145 memcpy(xbuf[0], vecs->c, vecs->c_size); 2146 2147 sg_init_one(&src, xbuf[0], vecs->c_size); 2148 sg_init_one(&dst, outbuf_dec, out_len_max); 2149 init_completion(&result.completion); 2150 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 2151 2152 /* Run RSA decrypt - m = c^d mod n;*/ 2153 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 2154 if (err) { 2155 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 2156 goto free_all; 2157 } 2158 out_len = req->dst_len; 2159 if (out_len < vecs->m_size) { 2160 pr_err("alg: akcipher: decrypt test failed. " 2161 "Invalid output len %u\n", out_len); 2162 err = -EINVAL; 2163 goto free_all; 2164 } 2165 /* verify that decrypted message is equal to the original msg */ 2166 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) || 2167 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size, 2168 vecs->m_size)) { 2169 pr_err("alg: akcipher: decrypt test failed. Invalid output\n"); 2170 hexdump(outbuf_dec, out_len); 2171 err = -EINVAL; 2172 } 2173 free_all: 2174 kfree(outbuf_dec); 2175 kfree(outbuf_enc); 2176 free_req: 2177 akcipher_request_free(req); 2178 free_xbuf: 2179 testmgr_free_buf(xbuf); 2180 return err; 2181 } 2182 2183 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 2184 struct akcipher_testvec *vecs, unsigned int tcount) 2185 { 2186 const char *algo = 2187 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 2188 int ret, i; 2189 2190 for (i = 0; i < tcount; i++) { 2191 ret = test_akcipher_one(tfm, vecs++); 2192 if (!ret) 2193 continue; 2194 2195 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 2196 i + 1, algo, ret); 2197 return ret; 2198 } 2199 return 0; 2200 } 2201 2202 static int alg_test_akcipher(const struct alg_test_desc *desc, 2203 const char *driver, u32 type, u32 mask) 2204 { 2205 struct crypto_akcipher *tfm; 2206 int err = 0; 2207 2208 tfm = crypto_alloc_akcipher(driver, type, mask); 2209 if (IS_ERR(tfm)) { 2210 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 2211 driver, PTR_ERR(tfm)); 2212 return PTR_ERR(tfm); 2213 } 2214 if (desc->suite.akcipher.vecs) 2215 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 2216 desc->suite.akcipher.count); 2217 2218 crypto_free_akcipher(tfm); 2219 return err; 2220 } 2221 2222 static int alg_test_null(const struct alg_test_desc *desc, 2223 const char *driver, u32 type, u32 mask) 2224 { 2225 return 0; 2226 } 2227 2228 /* Please keep this list sorted by algorithm name. */ 2229 static const struct alg_test_desc alg_test_descs[] = { 2230 { 2231 .alg = "ansi_cprng", 2232 .test = alg_test_cprng, 2233 .suite = { 2234 .cprng = { 2235 .vecs = ansi_cprng_aes_tv_template, 2236 .count = ANSI_CPRNG_AES_TEST_VECTORS 2237 } 2238 } 2239 }, { 2240 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2241 .test = alg_test_aead, 2242 .suite = { 2243 .aead = { 2244 .enc = { 2245 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2246 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2247 }, 2248 .dec = { 2249 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2250 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2251 } 2252 } 2253 } 2254 }, { 2255 .alg = "authenc(hmac(sha1),cbc(aes))", 2256 .test = alg_test_aead, 2257 .suite = { 2258 .aead = { 2259 .enc = { 2260 .vecs = 2261 hmac_sha1_aes_cbc_enc_tv_temp, 2262 .count = 2263 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2264 } 2265 } 2266 } 2267 }, { 2268 .alg = "authenc(hmac(sha1),cbc(des))", 2269 .test = alg_test_aead, 2270 .suite = { 2271 .aead = { 2272 .enc = { 2273 .vecs = 2274 hmac_sha1_des_cbc_enc_tv_temp, 2275 .count = 2276 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2277 } 2278 } 2279 } 2280 }, { 2281 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2282 .test = alg_test_aead, 2283 .fips_allowed = 1, 2284 .suite = { 2285 .aead = { 2286 .enc = { 2287 .vecs = 2288 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2289 .count = 2290 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2291 } 2292 } 2293 } 2294 }, { 2295 .alg = "authenc(hmac(sha1),ctr(aes))", 2296 .test = alg_test_null, 2297 .fips_allowed = 1, 2298 }, { 2299 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2300 .test = alg_test_aead, 2301 .suite = { 2302 .aead = { 2303 .enc = { 2304 .vecs = 2305 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2306 .count = 2307 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2308 }, 2309 .dec = { 2310 .vecs = 2311 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2312 .count = 2313 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2314 } 2315 } 2316 } 2317 }, { 2318 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 2319 .test = alg_test_null, 2320 .fips_allowed = 1, 2321 }, { 2322 .alg = "authenc(hmac(sha224),cbc(des))", 2323 .test = alg_test_aead, 2324 .suite = { 2325 .aead = { 2326 .enc = { 2327 .vecs = 2328 hmac_sha224_des_cbc_enc_tv_temp, 2329 .count = 2330 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2331 } 2332 } 2333 } 2334 }, { 2335 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2336 .test = alg_test_aead, 2337 .fips_allowed = 1, 2338 .suite = { 2339 .aead = { 2340 .enc = { 2341 .vecs = 2342 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2343 .count = 2344 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2345 } 2346 } 2347 } 2348 }, { 2349 .alg = "authenc(hmac(sha256),cbc(aes))", 2350 .test = alg_test_aead, 2351 .fips_allowed = 1, 2352 .suite = { 2353 .aead = { 2354 .enc = { 2355 .vecs = 2356 hmac_sha256_aes_cbc_enc_tv_temp, 2357 .count = 2358 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2359 } 2360 } 2361 } 2362 }, { 2363 .alg = "authenc(hmac(sha256),cbc(des))", 2364 .test = alg_test_aead, 2365 .suite = { 2366 .aead = { 2367 .enc = { 2368 .vecs = 2369 hmac_sha256_des_cbc_enc_tv_temp, 2370 .count = 2371 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2372 } 2373 } 2374 } 2375 }, { 2376 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2377 .test = alg_test_aead, 2378 .fips_allowed = 1, 2379 .suite = { 2380 .aead = { 2381 .enc = { 2382 .vecs = 2383 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2384 .count = 2385 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2386 } 2387 } 2388 } 2389 }, { 2390 .alg = "authenc(hmac(sha256),ctr(aes))", 2391 .test = alg_test_null, 2392 .fips_allowed = 1, 2393 }, { 2394 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 2395 .test = alg_test_null, 2396 .fips_allowed = 1, 2397 }, { 2398 .alg = "authenc(hmac(sha384),cbc(des))", 2399 .test = alg_test_aead, 2400 .suite = { 2401 .aead = { 2402 .enc = { 2403 .vecs = 2404 hmac_sha384_des_cbc_enc_tv_temp, 2405 .count = 2406 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2407 } 2408 } 2409 } 2410 }, { 2411 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2412 .test = alg_test_aead, 2413 .fips_allowed = 1, 2414 .suite = { 2415 .aead = { 2416 .enc = { 2417 .vecs = 2418 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2419 .count = 2420 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2421 } 2422 } 2423 } 2424 }, { 2425 .alg = "authenc(hmac(sha384),ctr(aes))", 2426 .test = alg_test_null, 2427 .fips_allowed = 1, 2428 }, { 2429 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 2430 .test = alg_test_null, 2431 .fips_allowed = 1, 2432 }, { 2433 .alg = "authenc(hmac(sha512),cbc(aes))", 2434 .fips_allowed = 1, 2435 .test = alg_test_aead, 2436 .suite = { 2437 .aead = { 2438 .enc = { 2439 .vecs = 2440 hmac_sha512_aes_cbc_enc_tv_temp, 2441 .count = 2442 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2443 } 2444 } 2445 } 2446 }, { 2447 .alg = "authenc(hmac(sha512),cbc(des))", 2448 .test = alg_test_aead, 2449 .suite = { 2450 .aead = { 2451 .enc = { 2452 .vecs = 2453 hmac_sha512_des_cbc_enc_tv_temp, 2454 .count = 2455 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2456 } 2457 } 2458 } 2459 }, { 2460 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2461 .test = alg_test_aead, 2462 .fips_allowed = 1, 2463 .suite = { 2464 .aead = { 2465 .enc = { 2466 .vecs = 2467 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2468 .count = 2469 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2470 } 2471 } 2472 } 2473 }, { 2474 .alg = "authenc(hmac(sha512),ctr(aes))", 2475 .test = alg_test_null, 2476 .fips_allowed = 1, 2477 }, { 2478 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 2479 .test = alg_test_null, 2480 .fips_allowed = 1, 2481 }, { 2482 .alg = "cbc(aes)", 2483 .test = alg_test_skcipher, 2484 .fips_allowed = 1, 2485 .suite = { 2486 .cipher = { 2487 .enc = { 2488 .vecs = aes_cbc_enc_tv_template, 2489 .count = AES_CBC_ENC_TEST_VECTORS 2490 }, 2491 .dec = { 2492 .vecs = aes_cbc_dec_tv_template, 2493 .count = AES_CBC_DEC_TEST_VECTORS 2494 } 2495 } 2496 } 2497 }, { 2498 .alg = "cbc(anubis)", 2499 .test = alg_test_skcipher, 2500 .suite = { 2501 .cipher = { 2502 .enc = { 2503 .vecs = anubis_cbc_enc_tv_template, 2504 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2505 }, 2506 .dec = { 2507 .vecs = anubis_cbc_dec_tv_template, 2508 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2509 } 2510 } 2511 } 2512 }, { 2513 .alg = "cbc(blowfish)", 2514 .test = alg_test_skcipher, 2515 .suite = { 2516 .cipher = { 2517 .enc = { 2518 .vecs = bf_cbc_enc_tv_template, 2519 .count = BF_CBC_ENC_TEST_VECTORS 2520 }, 2521 .dec = { 2522 .vecs = bf_cbc_dec_tv_template, 2523 .count = BF_CBC_DEC_TEST_VECTORS 2524 } 2525 } 2526 } 2527 }, { 2528 .alg = "cbc(camellia)", 2529 .test = alg_test_skcipher, 2530 .suite = { 2531 .cipher = { 2532 .enc = { 2533 .vecs = camellia_cbc_enc_tv_template, 2534 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2535 }, 2536 .dec = { 2537 .vecs = camellia_cbc_dec_tv_template, 2538 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2539 } 2540 } 2541 } 2542 }, { 2543 .alg = "cbc(cast5)", 2544 .test = alg_test_skcipher, 2545 .suite = { 2546 .cipher = { 2547 .enc = { 2548 .vecs = cast5_cbc_enc_tv_template, 2549 .count = CAST5_CBC_ENC_TEST_VECTORS 2550 }, 2551 .dec = { 2552 .vecs = cast5_cbc_dec_tv_template, 2553 .count = CAST5_CBC_DEC_TEST_VECTORS 2554 } 2555 } 2556 } 2557 }, { 2558 .alg = "cbc(cast6)", 2559 .test = alg_test_skcipher, 2560 .suite = { 2561 .cipher = { 2562 .enc = { 2563 .vecs = cast6_cbc_enc_tv_template, 2564 .count = CAST6_CBC_ENC_TEST_VECTORS 2565 }, 2566 .dec = { 2567 .vecs = cast6_cbc_dec_tv_template, 2568 .count = CAST6_CBC_DEC_TEST_VECTORS 2569 } 2570 } 2571 } 2572 }, { 2573 .alg = "cbc(des)", 2574 .test = alg_test_skcipher, 2575 .suite = { 2576 .cipher = { 2577 .enc = { 2578 .vecs = des_cbc_enc_tv_template, 2579 .count = DES_CBC_ENC_TEST_VECTORS 2580 }, 2581 .dec = { 2582 .vecs = des_cbc_dec_tv_template, 2583 .count = DES_CBC_DEC_TEST_VECTORS 2584 } 2585 } 2586 } 2587 }, { 2588 .alg = "cbc(des3_ede)", 2589 .test = alg_test_skcipher, 2590 .fips_allowed = 1, 2591 .suite = { 2592 .cipher = { 2593 .enc = { 2594 .vecs = des3_ede_cbc_enc_tv_template, 2595 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2596 }, 2597 .dec = { 2598 .vecs = des3_ede_cbc_dec_tv_template, 2599 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2600 } 2601 } 2602 } 2603 }, { 2604 .alg = "cbc(serpent)", 2605 .test = alg_test_skcipher, 2606 .suite = { 2607 .cipher = { 2608 .enc = { 2609 .vecs = serpent_cbc_enc_tv_template, 2610 .count = SERPENT_CBC_ENC_TEST_VECTORS 2611 }, 2612 .dec = { 2613 .vecs = serpent_cbc_dec_tv_template, 2614 .count = SERPENT_CBC_DEC_TEST_VECTORS 2615 } 2616 } 2617 } 2618 }, { 2619 .alg = "cbc(twofish)", 2620 .test = alg_test_skcipher, 2621 .suite = { 2622 .cipher = { 2623 .enc = { 2624 .vecs = tf_cbc_enc_tv_template, 2625 .count = TF_CBC_ENC_TEST_VECTORS 2626 }, 2627 .dec = { 2628 .vecs = tf_cbc_dec_tv_template, 2629 .count = TF_CBC_DEC_TEST_VECTORS 2630 } 2631 } 2632 } 2633 }, { 2634 .alg = "ccm(aes)", 2635 .test = alg_test_aead, 2636 .fips_allowed = 1, 2637 .suite = { 2638 .aead = { 2639 .enc = { 2640 .vecs = aes_ccm_enc_tv_template, 2641 .count = AES_CCM_ENC_TEST_VECTORS 2642 }, 2643 .dec = { 2644 .vecs = aes_ccm_dec_tv_template, 2645 .count = AES_CCM_DEC_TEST_VECTORS 2646 } 2647 } 2648 } 2649 }, { 2650 .alg = "chacha20", 2651 .test = alg_test_skcipher, 2652 .suite = { 2653 .cipher = { 2654 .enc = { 2655 .vecs = chacha20_enc_tv_template, 2656 .count = CHACHA20_ENC_TEST_VECTORS 2657 }, 2658 .dec = { 2659 .vecs = chacha20_enc_tv_template, 2660 .count = CHACHA20_ENC_TEST_VECTORS 2661 }, 2662 } 2663 } 2664 }, { 2665 .alg = "cmac(aes)", 2666 .fips_allowed = 1, 2667 .test = alg_test_hash, 2668 .suite = { 2669 .hash = { 2670 .vecs = aes_cmac128_tv_template, 2671 .count = CMAC_AES_TEST_VECTORS 2672 } 2673 } 2674 }, { 2675 .alg = "cmac(des3_ede)", 2676 .fips_allowed = 1, 2677 .test = alg_test_hash, 2678 .suite = { 2679 .hash = { 2680 .vecs = des3_ede_cmac64_tv_template, 2681 .count = CMAC_DES3_EDE_TEST_VECTORS 2682 } 2683 } 2684 }, { 2685 .alg = "compress_null", 2686 .test = alg_test_null, 2687 }, { 2688 .alg = "crc32", 2689 .test = alg_test_hash, 2690 .suite = { 2691 .hash = { 2692 .vecs = crc32_tv_template, 2693 .count = CRC32_TEST_VECTORS 2694 } 2695 } 2696 }, { 2697 .alg = "crc32c", 2698 .test = alg_test_crc32c, 2699 .fips_allowed = 1, 2700 .suite = { 2701 .hash = { 2702 .vecs = crc32c_tv_template, 2703 .count = CRC32C_TEST_VECTORS 2704 } 2705 } 2706 }, { 2707 .alg = "crct10dif", 2708 .test = alg_test_hash, 2709 .fips_allowed = 1, 2710 .suite = { 2711 .hash = { 2712 .vecs = crct10dif_tv_template, 2713 .count = CRCT10DIF_TEST_VECTORS 2714 } 2715 } 2716 }, { 2717 .alg = "ctr(aes)", 2718 .test = alg_test_skcipher, 2719 .fips_allowed = 1, 2720 .suite = { 2721 .cipher = { 2722 .enc = { 2723 .vecs = aes_ctr_enc_tv_template, 2724 .count = AES_CTR_ENC_TEST_VECTORS 2725 }, 2726 .dec = { 2727 .vecs = aes_ctr_dec_tv_template, 2728 .count = AES_CTR_DEC_TEST_VECTORS 2729 } 2730 } 2731 } 2732 }, { 2733 .alg = "ctr(blowfish)", 2734 .test = alg_test_skcipher, 2735 .suite = { 2736 .cipher = { 2737 .enc = { 2738 .vecs = bf_ctr_enc_tv_template, 2739 .count = BF_CTR_ENC_TEST_VECTORS 2740 }, 2741 .dec = { 2742 .vecs = bf_ctr_dec_tv_template, 2743 .count = BF_CTR_DEC_TEST_VECTORS 2744 } 2745 } 2746 } 2747 }, { 2748 .alg = "ctr(camellia)", 2749 .test = alg_test_skcipher, 2750 .suite = { 2751 .cipher = { 2752 .enc = { 2753 .vecs = camellia_ctr_enc_tv_template, 2754 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2755 }, 2756 .dec = { 2757 .vecs = camellia_ctr_dec_tv_template, 2758 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2759 } 2760 } 2761 } 2762 }, { 2763 .alg = "ctr(cast5)", 2764 .test = alg_test_skcipher, 2765 .suite = { 2766 .cipher = { 2767 .enc = { 2768 .vecs = cast5_ctr_enc_tv_template, 2769 .count = CAST5_CTR_ENC_TEST_VECTORS 2770 }, 2771 .dec = { 2772 .vecs = cast5_ctr_dec_tv_template, 2773 .count = CAST5_CTR_DEC_TEST_VECTORS 2774 } 2775 } 2776 } 2777 }, { 2778 .alg = "ctr(cast6)", 2779 .test = alg_test_skcipher, 2780 .suite = { 2781 .cipher = { 2782 .enc = { 2783 .vecs = cast6_ctr_enc_tv_template, 2784 .count = CAST6_CTR_ENC_TEST_VECTORS 2785 }, 2786 .dec = { 2787 .vecs = cast6_ctr_dec_tv_template, 2788 .count = CAST6_CTR_DEC_TEST_VECTORS 2789 } 2790 } 2791 } 2792 }, { 2793 .alg = "ctr(des)", 2794 .test = alg_test_skcipher, 2795 .suite = { 2796 .cipher = { 2797 .enc = { 2798 .vecs = des_ctr_enc_tv_template, 2799 .count = DES_CTR_ENC_TEST_VECTORS 2800 }, 2801 .dec = { 2802 .vecs = des_ctr_dec_tv_template, 2803 .count = DES_CTR_DEC_TEST_VECTORS 2804 } 2805 } 2806 } 2807 }, { 2808 .alg = "ctr(des3_ede)", 2809 .test = alg_test_skcipher, 2810 .suite = { 2811 .cipher = { 2812 .enc = { 2813 .vecs = des3_ede_ctr_enc_tv_template, 2814 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2815 }, 2816 .dec = { 2817 .vecs = des3_ede_ctr_dec_tv_template, 2818 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2819 } 2820 } 2821 } 2822 }, { 2823 .alg = "ctr(serpent)", 2824 .test = alg_test_skcipher, 2825 .suite = { 2826 .cipher = { 2827 .enc = { 2828 .vecs = serpent_ctr_enc_tv_template, 2829 .count = SERPENT_CTR_ENC_TEST_VECTORS 2830 }, 2831 .dec = { 2832 .vecs = serpent_ctr_dec_tv_template, 2833 .count = SERPENT_CTR_DEC_TEST_VECTORS 2834 } 2835 } 2836 } 2837 }, { 2838 .alg = "ctr(twofish)", 2839 .test = alg_test_skcipher, 2840 .suite = { 2841 .cipher = { 2842 .enc = { 2843 .vecs = tf_ctr_enc_tv_template, 2844 .count = TF_CTR_ENC_TEST_VECTORS 2845 }, 2846 .dec = { 2847 .vecs = tf_ctr_dec_tv_template, 2848 .count = TF_CTR_DEC_TEST_VECTORS 2849 } 2850 } 2851 } 2852 }, { 2853 .alg = "cts(cbc(aes))", 2854 .test = alg_test_skcipher, 2855 .suite = { 2856 .cipher = { 2857 .enc = { 2858 .vecs = cts_mode_enc_tv_template, 2859 .count = CTS_MODE_ENC_TEST_VECTORS 2860 }, 2861 .dec = { 2862 .vecs = cts_mode_dec_tv_template, 2863 .count = CTS_MODE_DEC_TEST_VECTORS 2864 } 2865 } 2866 } 2867 }, { 2868 .alg = "deflate", 2869 .test = alg_test_comp, 2870 .fips_allowed = 1, 2871 .suite = { 2872 .comp = { 2873 .comp = { 2874 .vecs = deflate_comp_tv_template, 2875 .count = DEFLATE_COMP_TEST_VECTORS 2876 }, 2877 .decomp = { 2878 .vecs = deflate_decomp_tv_template, 2879 .count = DEFLATE_DECOMP_TEST_VECTORS 2880 } 2881 } 2882 } 2883 }, { 2884 .alg = "dh", 2885 .test = alg_test_kpp, 2886 .fips_allowed = 1, 2887 .suite = { 2888 .kpp = { 2889 .vecs = dh_tv_template, 2890 .count = DH_TEST_VECTORS 2891 } 2892 } 2893 }, { 2894 .alg = "digest_null", 2895 .test = alg_test_null, 2896 }, { 2897 .alg = "drbg_nopr_ctr_aes128", 2898 .test = alg_test_drbg, 2899 .fips_allowed = 1, 2900 .suite = { 2901 .drbg = { 2902 .vecs = drbg_nopr_ctr_aes128_tv_template, 2903 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2904 } 2905 } 2906 }, { 2907 .alg = "drbg_nopr_ctr_aes192", 2908 .test = alg_test_drbg, 2909 .fips_allowed = 1, 2910 .suite = { 2911 .drbg = { 2912 .vecs = drbg_nopr_ctr_aes192_tv_template, 2913 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2914 } 2915 } 2916 }, { 2917 .alg = "drbg_nopr_ctr_aes256", 2918 .test = alg_test_drbg, 2919 .fips_allowed = 1, 2920 .suite = { 2921 .drbg = { 2922 .vecs = drbg_nopr_ctr_aes256_tv_template, 2923 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2924 } 2925 } 2926 }, { 2927 /* 2928 * There is no need to specifically test the DRBG with every 2929 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2930 */ 2931 .alg = "drbg_nopr_hmac_sha1", 2932 .fips_allowed = 1, 2933 .test = alg_test_null, 2934 }, { 2935 .alg = "drbg_nopr_hmac_sha256", 2936 .test = alg_test_drbg, 2937 .fips_allowed = 1, 2938 .suite = { 2939 .drbg = { 2940 .vecs = drbg_nopr_hmac_sha256_tv_template, 2941 .count = 2942 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2943 } 2944 } 2945 }, { 2946 /* covered by drbg_nopr_hmac_sha256 test */ 2947 .alg = "drbg_nopr_hmac_sha384", 2948 .fips_allowed = 1, 2949 .test = alg_test_null, 2950 }, { 2951 .alg = "drbg_nopr_hmac_sha512", 2952 .test = alg_test_null, 2953 .fips_allowed = 1, 2954 }, { 2955 .alg = "drbg_nopr_sha1", 2956 .fips_allowed = 1, 2957 .test = alg_test_null, 2958 }, { 2959 .alg = "drbg_nopr_sha256", 2960 .test = alg_test_drbg, 2961 .fips_allowed = 1, 2962 .suite = { 2963 .drbg = { 2964 .vecs = drbg_nopr_sha256_tv_template, 2965 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2966 } 2967 } 2968 }, { 2969 /* covered by drbg_nopr_sha256 test */ 2970 .alg = "drbg_nopr_sha384", 2971 .fips_allowed = 1, 2972 .test = alg_test_null, 2973 }, { 2974 .alg = "drbg_nopr_sha512", 2975 .fips_allowed = 1, 2976 .test = alg_test_null, 2977 }, { 2978 .alg = "drbg_pr_ctr_aes128", 2979 .test = alg_test_drbg, 2980 .fips_allowed = 1, 2981 .suite = { 2982 .drbg = { 2983 .vecs = drbg_pr_ctr_aes128_tv_template, 2984 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 2985 } 2986 } 2987 }, { 2988 /* covered by drbg_pr_ctr_aes128 test */ 2989 .alg = "drbg_pr_ctr_aes192", 2990 .fips_allowed = 1, 2991 .test = alg_test_null, 2992 }, { 2993 .alg = "drbg_pr_ctr_aes256", 2994 .fips_allowed = 1, 2995 .test = alg_test_null, 2996 }, { 2997 .alg = "drbg_pr_hmac_sha1", 2998 .fips_allowed = 1, 2999 .test = alg_test_null, 3000 }, { 3001 .alg = "drbg_pr_hmac_sha256", 3002 .test = alg_test_drbg, 3003 .fips_allowed = 1, 3004 .suite = { 3005 .drbg = { 3006 .vecs = drbg_pr_hmac_sha256_tv_template, 3007 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 3008 } 3009 } 3010 }, { 3011 /* covered by drbg_pr_hmac_sha256 test */ 3012 .alg = "drbg_pr_hmac_sha384", 3013 .fips_allowed = 1, 3014 .test = alg_test_null, 3015 }, { 3016 .alg = "drbg_pr_hmac_sha512", 3017 .test = alg_test_null, 3018 .fips_allowed = 1, 3019 }, { 3020 .alg = "drbg_pr_sha1", 3021 .fips_allowed = 1, 3022 .test = alg_test_null, 3023 }, { 3024 .alg = "drbg_pr_sha256", 3025 .test = alg_test_drbg, 3026 .fips_allowed = 1, 3027 .suite = { 3028 .drbg = { 3029 .vecs = drbg_pr_sha256_tv_template, 3030 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 3031 } 3032 } 3033 }, { 3034 /* covered by drbg_pr_sha256 test */ 3035 .alg = "drbg_pr_sha384", 3036 .fips_allowed = 1, 3037 .test = alg_test_null, 3038 }, { 3039 .alg = "drbg_pr_sha512", 3040 .fips_allowed = 1, 3041 .test = alg_test_null, 3042 }, { 3043 .alg = "ecb(aes)", 3044 .test = alg_test_skcipher, 3045 .fips_allowed = 1, 3046 .suite = { 3047 .cipher = { 3048 .enc = { 3049 .vecs = aes_enc_tv_template, 3050 .count = AES_ENC_TEST_VECTORS 3051 }, 3052 .dec = { 3053 .vecs = aes_dec_tv_template, 3054 .count = AES_DEC_TEST_VECTORS 3055 } 3056 } 3057 } 3058 }, { 3059 .alg = "ecb(anubis)", 3060 .test = alg_test_skcipher, 3061 .suite = { 3062 .cipher = { 3063 .enc = { 3064 .vecs = anubis_enc_tv_template, 3065 .count = ANUBIS_ENC_TEST_VECTORS 3066 }, 3067 .dec = { 3068 .vecs = anubis_dec_tv_template, 3069 .count = ANUBIS_DEC_TEST_VECTORS 3070 } 3071 } 3072 } 3073 }, { 3074 .alg = "ecb(arc4)", 3075 .test = alg_test_skcipher, 3076 .suite = { 3077 .cipher = { 3078 .enc = { 3079 .vecs = arc4_enc_tv_template, 3080 .count = ARC4_ENC_TEST_VECTORS 3081 }, 3082 .dec = { 3083 .vecs = arc4_dec_tv_template, 3084 .count = ARC4_DEC_TEST_VECTORS 3085 } 3086 } 3087 } 3088 }, { 3089 .alg = "ecb(blowfish)", 3090 .test = alg_test_skcipher, 3091 .suite = { 3092 .cipher = { 3093 .enc = { 3094 .vecs = bf_enc_tv_template, 3095 .count = BF_ENC_TEST_VECTORS 3096 }, 3097 .dec = { 3098 .vecs = bf_dec_tv_template, 3099 .count = BF_DEC_TEST_VECTORS 3100 } 3101 } 3102 } 3103 }, { 3104 .alg = "ecb(camellia)", 3105 .test = alg_test_skcipher, 3106 .suite = { 3107 .cipher = { 3108 .enc = { 3109 .vecs = camellia_enc_tv_template, 3110 .count = CAMELLIA_ENC_TEST_VECTORS 3111 }, 3112 .dec = { 3113 .vecs = camellia_dec_tv_template, 3114 .count = CAMELLIA_DEC_TEST_VECTORS 3115 } 3116 } 3117 } 3118 }, { 3119 .alg = "ecb(cast5)", 3120 .test = alg_test_skcipher, 3121 .suite = { 3122 .cipher = { 3123 .enc = { 3124 .vecs = cast5_enc_tv_template, 3125 .count = CAST5_ENC_TEST_VECTORS 3126 }, 3127 .dec = { 3128 .vecs = cast5_dec_tv_template, 3129 .count = CAST5_DEC_TEST_VECTORS 3130 } 3131 } 3132 } 3133 }, { 3134 .alg = "ecb(cast6)", 3135 .test = alg_test_skcipher, 3136 .suite = { 3137 .cipher = { 3138 .enc = { 3139 .vecs = cast6_enc_tv_template, 3140 .count = CAST6_ENC_TEST_VECTORS 3141 }, 3142 .dec = { 3143 .vecs = cast6_dec_tv_template, 3144 .count = CAST6_DEC_TEST_VECTORS 3145 } 3146 } 3147 } 3148 }, { 3149 .alg = "ecb(cipher_null)", 3150 .test = alg_test_null, 3151 }, { 3152 .alg = "ecb(des)", 3153 .test = alg_test_skcipher, 3154 .suite = { 3155 .cipher = { 3156 .enc = { 3157 .vecs = des_enc_tv_template, 3158 .count = DES_ENC_TEST_VECTORS 3159 }, 3160 .dec = { 3161 .vecs = des_dec_tv_template, 3162 .count = DES_DEC_TEST_VECTORS 3163 } 3164 } 3165 } 3166 }, { 3167 .alg = "ecb(des3_ede)", 3168 .test = alg_test_skcipher, 3169 .fips_allowed = 1, 3170 .suite = { 3171 .cipher = { 3172 .enc = { 3173 .vecs = des3_ede_enc_tv_template, 3174 .count = DES3_EDE_ENC_TEST_VECTORS 3175 }, 3176 .dec = { 3177 .vecs = des3_ede_dec_tv_template, 3178 .count = DES3_EDE_DEC_TEST_VECTORS 3179 } 3180 } 3181 } 3182 }, { 3183 .alg = "ecb(fcrypt)", 3184 .test = alg_test_skcipher, 3185 .suite = { 3186 .cipher = { 3187 .enc = { 3188 .vecs = fcrypt_pcbc_enc_tv_template, 3189 .count = 1 3190 }, 3191 .dec = { 3192 .vecs = fcrypt_pcbc_dec_tv_template, 3193 .count = 1 3194 } 3195 } 3196 } 3197 }, { 3198 .alg = "ecb(khazad)", 3199 .test = alg_test_skcipher, 3200 .suite = { 3201 .cipher = { 3202 .enc = { 3203 .vecs = khazad_enc_tv_template, 3204 .count = KHAZAD_ENC_TEST_VECTORS 3205 }, 3206 .dec = { 3207 .vecs = khazad_dec_tv_template, 3208 .count = KHAZAD_DEC_TEST_VECTORS 3209 } 3210 } 3211 } 3212 }, { 3213 .alg = "ecb(seed)", 3214 .test = alg_test_skcipher, 3215 .suite = { 3216 .cipher = { 3217 .enc = { 3218 .vecs = seed_enc_tv_template, 3219 .count = SEED_ENC_TEST_VECTORS 3220 }, 3221 .dec = { 3222 .vecs = seed_dec_tv_template, 3223 .count = SEED_DEC_TEST_VECTORS 3224 } 3225 } 3226 } 3227 }, { 3228 .alg = "ecb(serpent)", 3229 .test = alg_test_skcipher, 3230 .suite = { 3231 .cipher = { 3232 .enc = { 3233 .vecs = serpent_enc_tv_template, 3234 .count = SERPENT_ENC_TEST_VECTORS 3235 }, 3236 .dec = { 3237 .vecs = serpent_dec_tv_template, 3238 .count = SERPENT_DEC_TEST_VECTORS 3239 } 3240 } 3241 } 3242 }, { 3243 .alg = "ecb(tea)", 3244 .test = alg_test_skcipher, 3245 .suite = { 3246 .cipher = { 3247 .enc = { 3248 .vecs = tea_enc_tv_template, 3249 .count = TEA_ENC_TEST_VECTORS 3250 }, 3251 .dec = { 3252 .vecs = tea_dec_tv_template, 3253 .count = TEA_DEC_TEST_VECTORS 3254 } 3255 } 3256 } 3257 }, { 3258 .alg = "ecb(tnepres)", 3259 .test = alg_test_skcipher, 3260 .suite = { 3261 .cipher = { 3262 .enc = { 3263 .vecs = tnepres_enc_tv_template, 3264 .count = TNEPRES_ENC_TEST_VECTORS 3265 }, 3266 .dec = { 3267 .vecs = tnepres_dec_tv_template, 3268 .count = TNEPRES_DEC_TEST_VECTORS 3269 } 3270 } 3271 } 3272 }, { 3273 .alg = "ecb(twofish)", 3274 .test = alg_test_skcipher, 3275 .suite = { 3276 .cipher = { 3277 .enc = { 3278 .vecs = tf_enc_tv_template, 3279 .count = TF_ENC_TEST_VECTORS 3280 }, 3281 .dec = { 3282 .vecs = tf_dec_tv_template, 3283 .count = TF_DEC_TEST_VECTORS 3284 } 3285 } 3286 } 3287 }, { 3288 .alg = "ecb(xeta)", 3289 .test = alg_test_skcipher, 3290 .suite = { 3291 .cipher = { 3292 .enc = { 3293 .vecs = xeta_enc_tv_template, 3294 .count = XETA_ENC_TEST_VECTORS 3295 }, 3296 .dec = { 3297 .vecs = xeta_dec_tv_template, 3298 .count = XETA_DEC_TEST_VECTORS 3299 } 3300 } 3301 } 3302 }, { 3303 .alg = "ecb(xtea)", 3304 .test = alg_test_skcipher, 3305 .suite = { 3306 .cipher = { 3307 .enc = { 3308 .vecs = xtea_enc_tv_template, 3309 .count = XTEA_ENC_TEST_VECTORS 3310 }, 3311 .dec = { 3312 .vecs = xtea_dec_tv_template, 3313 .count = XTEA_DEC_TEST_VECTORS 3314 } 3315 } 3316 } 3317 }, { 3318 .alg = "ecdh", 3319 .test = alg_test_kpp, 3320 .fips_allowed = 1, 3321 .suite = { 3322 .kpp = { 3323 .vecs = ecdh_tv_template, 3324 .count = ECDH_TEST_VECTORS 3325 } 3326 } 3327 }, { 3328 .alg = "gcm(aes)", 3329 .test = alg_test_aead, 3330 .fips_allowed = 1, 3331 .suite = { 3332 .aead = { 3333 .enc = { 3334 .vecs = aes_gcm_enc_tv_template, 3335 .count = AES_GCM_ENC_TEST_VECTORS 3336 }, 3337 .dec = { 3338 .vecs = aes_gcm_dec_tv_template, 3339 .count = AES_GCM_DEC_TEST_VECTORS 3340 } 3341 } 3342 } 3343 }, { 3344 .alg = "ghash", 3345 .test = alg_test_hash, 3346 .fips_allowed = 1, 3347 .suite = { 3348 .hash = { 3349 .vecs = ghash_tv_template, 3350 .count = GHASH_TEST_VECTORS 3351 } 3352 } 3353 }, { 3354 .alg = "hmac(crc32)", 3355 .test = alg_test_hash, 3356 .suite = { 3357 .hash = { 3358 .vecs = bfin_crc_tv_template, 3359 .count = BFIN_CRC_TEST_VECTORS 3360 } 3361 } 3362 }, { 3363 .alg = "hmac(md5)", 3364 .test = alg_test_hash, 3365 .suite = { 3366 .hash = { 3367 .vecs = hmac_md5_tv_template, 3368 .count = HMAC_MD5_TEST_VECTORS 3369 } 3370 } 3371 }, { 3372 .alg = "hmac(rmd128)", 3373 .test = alg_test_hash, 3374 .suite = { 3375 .hash = { 3376 .vecs = hmac_rmd128_tv_template, 3377 .count = HMAC_RMD128_TEST_VECTORS 3378 } 3379 } 3380 }, { 3381 .alg = "hmac(rmd160)", 3382 .test = alg_test_hash, 3383 .suite = { 3384 .hash = { 3385 .vecs = hmac_rmd160_tv_template, 3386 .count = HMAC_RMD160_TEST_VECTORS 3387 } 3388 } 3389 }, { 3390 .alg = "hmac(sha1)", 3391 .test = alg_test_hash, 3392 .fips_allowed = 1, 3393 .suite = { 3394 .hash = { 3395 .vecs = hmac_sha1_tv_template, 3396 .count = HMAC_SHA1_TEST_VECTORS 3397 } 3398 } 3399 }, { 3400 .alg = "hmac(sha224)", 3401 .test = alg_test_hash, 3402 .fips_allowed = 1, 3403 .suite = { 3404 .hash = { 3405 .vecs = hmac_sha224_tv_template, 3406 .count = HMAC_SHA224_TEST_VECTORS 3407 } 3408 } 3409 }, { 3410 .alg = "hmac(sha256)", 3411 .test = alg_test_hash, 3412 .fips_allowed = 1, 3413 .suite = { 3414 .hash = { 3415 .vecs = hmac_sha256_tv_template, 3416 .count = HMAC_SHA256_TEST_VECTORS 3417 } 3418 } 3419 }, { 3420 .alg = "hmac(sha3-224)", 3421 .test = alg_test_hash, 3422 .fips_allowed = 1, 3423 .suite = { 3424 .hash = { 3425 .vecs = hmac_sha3_224_tv_template, 3426 .count = HMAC_SHA3_224_TEST_VECTORS 3427 } 3428 } 3429 }, { 3430 .alg = "hmac(sha3-256)", 3431 .test = alg_test_hash, 3432 .fips_allowed = 1, 3433 .suite = { 3434 .hash = { 3435 .vecs = hmac_sha3_256_tv_template, 3436 .count = HMAC_SHA3_256_TEST_VECTORS 3437 } 3438 } 3439 }, { 3440 .alg = "hmac(sha3-384)", 3441 .test = alg_test_hash, 3442 .fips_allowed = 1, 3443 .suite = { 3444 .hash = { 3445 .vecs = hmac_sha3_384_tv_template, 3446 .count = HMAC_SHA3_384_TEST_VECTORS 3447 } 3448 } 3449 }, { 3450 .alg = "hmac(sha3-512)", 3451 .test = alg_test_hash, 3452 .fips_allowed = 1, 3453 .suite = { 3454 .hash = { 3455 .vecs = hmac_sha3_512_tv_template, 3456 .count = HMAC_SHA3_512_TEST_VECTORS 3457 } 3458 } 3459 }, { 3460 .alg = "hmac(sha384)", 3461 .test = alg_test_hash, 3462 .fips_allowed = 1, 3463 .suite = { 3464 .hash = { 3465 .vecs = hmac_sha384_tv_template, 3466 .count = HMAC_SHA384_TEST_VECTORS 3467 } 3468 } 3469 }, { 3470 .alg = "hmac(sha512)", 3471 .test = alg_test_hash, 3472 .fips_allowed = 1, 3473 .suite = { 3474 .hash = { 3475 .vecs = hmac_sha512_tv_template, 3476 .count = HMAC_SHA512_TEST_VECTORS 3477 } 3478 } 3479 }, { 3480 .alg = "jitterentropy_rng", 3481 .fips_allowed = 1, 3482 .test = alg_test_null, 3483 }, { 3484 .alg = "kw(aes)", 3485 .test = alg_test_skcipher, 3486 .fips_allowed = 1, 3487 .suite = { 3488 .cipher = { 3489 .enc = { 3490 .vecs = aes_kw_enc_tv_template, 3491 .count = ARRAY_SIZE(aes_kw_enc_tv_template) 3492 }, 3493 .dec = { 3494 .vecs = aes_kw_dec_tv_template, 3495 .count = ARRAY_SIZE(aes_kw_dec_tv_template) 3496 } 3497 } 3498 } 3499 }, { 3500 .alg = "lrw(aes)", 3501 .test = alg_test_skcipher, 3502 .suite = { 3503 .cipher = { 3504 .enc = { 3505 .vecs = aes_lrw_enc_tv_template, 3506 .count = AES_LRW_ENC_TEST_VECTORS 3507 }, 3508 .dec = { 3509 .vecs = aes_lrw_dec_tv_template, 3510 .count = AES_LRW_DEC_TEST_VECTORS 3511 } 3512 } 3513 } 3514 }, { 3515 .alg = "lrw(camellia)", 3516 .test = alg_test_skcipher, 3517 .suite = { 3518 .cipher = { 3519 .enc = { 3520 .vecs = camellia_lrw_enc_tv_template, 3521 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3522 }, 3523 .dec = { 3524 .vecs = camellia_lrw_dec_tv_template, 3525 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3526 } 3527 } 3528 } 3529 }, { 3530 .alg = "lrw(cast6)", 3531 .test = alg_test_skcipher, 3532 .suite = { 3533 .cipher = { 3534 .enc = { 3535 .vecs = cast6_lrw_enc_tv_template, 3536 .count = CAST6_LRW_ENC_TEST_VECTORS 3537 }, 3538 .dec = { 3539 .vecs = cast6_lrw_dec_tv_template, 3540 .count = CAST6_LRW_DEC_TEST_VECTORS 3541 } 3542 } 3543 } 3544 }, { 3545 .alg = "lrw(serpent)", 3546 .test = alg_test_skcipher, 3547 .suite = { 3548 .cipher = { 3549 .enc = { 3550 .vecs = serpent_lrw_enc_tv_template, 3551 .count = SERPENT_LRW_ENC_TEST_VECTORS 3552 }, 3553 .dec = { 3554 .vecs = serpent_lrw_dec_tv_template, 3555 .count = SERPENT_LRW_DEC_TEST_VECTORS 3556 } 3557 } 3558 } 3559 }, { 3560 .alg = "lrw(twofish)", 3561 .test = alg_test_skcipher, 3562 .suite = { 3563 .cipher = { 3564 .enc = { 3565 .vecs = tf_lrw_enc_tv_template, 3566 .count = TF_LRW_ENC_TEST_VECTORS 3567 }, 3568 .dec = { 3569 .vecs = tf_lrw_dec_tv_template, 3570 .count = TF_LRW_DEC_TEST_VECTORS 3571 } 3572 } 3573 } 3574 }, { 3575 .alg = "lz4", 3576 .test = alg_test_comp, 3577 .fips_allowed = 1, 3578 .suite = { 3579 .comp = { 3580 .comp = { 3581 .vecs = lz4_comp_tv_template, 3582 .count = LZ4_COMP_TEST_VECTORS 3583 }, 3584 .decomp = { 3585 .vecs = lz4_decomp_tv_template, 3586 .count = LZ4_DECOMP_TEST_VECTORS 3587 } 3588 } 3589 } 3590 }, { 3591 .alg = "lz4hc", 3592 .test = alg_test_comp, 3593 .fips_allowed = 1, 3594 .suite = { 3595 .comp = { 3596 .comp = { 3597 .vecs = lz4hc_comp_tv_template, 3598 .count = LZ4HC_COMP_TEST_VECTORS 3599 }, 3600 .decomp = { 3601 .vecs = lz4hc_decomp_tv_template, 3602 .count = LZ4HC_DECOMP_TEST_VECTORS 3603 } 3604 } 3605 } 3606 }, { 3607 .alg = "lzo", 3608 .test = alg_test_comp, 3609 .fips_allowed = 1, 3610 .suite = { 3611 .comp = { 3612 .comp = { 3613 .vecs = lzo_comp_tv_template, 3614 .count = LZO_COMP_TEST_VECTORS 3615 }, 3616 .decomp = { 3617 .vecs = lzo_decomp_tv_template, 3618 .count = LZO_DECOMP_TEST_VECTORS 3619 } 3620 } 3621 } 3622 }, { 3623 .alg = "md4", 3624 .test = alg_test_hash, 3625 .suite = { 3626 .hash = { 3627 .vecs = md4_tv_template, 3628 .count = MD4_TEST_VECTORS 3629 } 3630 } 3631 }, { 3632 .alg = "md5", 3633 .test = alg_test_hash, 3634 .suite = { 3635 .hash = { 3636 .vecs = md5_tv_template, 3637 .count = MD5_TEST_VECTORS 3638 } 3639 } 3640 }, { 3641 .alg = "michael_mic", 3642 .test = alg_test_hash, 3643 .suite = { 3644 .hash = { 3645 .vecs = michael_mic_tv_template, 3646 .count = MICHAEL_MIC_TEST_VECTORS 3647 } 3648 } 3649 }, { 3650 .alg = "ofb(aes)", 3651 .test = alg_test_skcipher, 3652 .fips_allowed = 1, 3653 .suite = { 3654 .cipher = { 3655 .enc = { 3656 .vecs = aes_ofb_enc_tv_template, 3657 .count = AES_OFB_ENC_TEST_VECTORS 3658 }, 3659 .dec = { 3660 .vecs = aes_ofb_dec_tv_template, 3661 .count = AES_OFB_DEC_TEST_VECTORS 3662 } 3663 } 3664 } 3665 }, { 3666 .alg = "pcbc(fcrypt)", 3667 .test = alg_test_skcipher, 3668 .suite = { 3669 .cipher = { 3670 .enc = { 3671 .vecs = fcrypt_pcbc_enc_tv_template, 3672 .count = FCRYPT_ENC_TEST_VECTORS 3673 }, 3674 .dec = { 3675 .vecs = fcrypt_pcbc_dec_tv_template, 3676 .count = FCRYPT_DEC_TEST_VECTORS 3677 } 3678 } 3679 } 3680 }, { 3681 .alg = "poly1305", 3682 .test = alg_test_hash, 3683 .suite = { 3684 .hash = { 3685 .vecs = poly1305_tv_template, 3686 .count = POLY1305_TEST_VECTORS 3687 } 3688 } 3689 }, { 3690 .alg = "rfc3686(ctr(aes))", 3691 .test = alg_test_skcipher, 3692 .fips_allowed = 1, 3693 .suite = { 3694 .cipher = { 3695 .enc = { 3696 .vecs = aes_ctr_rfc3686_enc_tv_template, 3697 .count = AES_CTR_3686_ENC_TEST_VECTORS 3698 }, 3699 .dec = { 3700 .vecs = aes_ctr_rfc3686_dec_tv_template, 3701 .count = AES_CTR_3686_DEC_TEST_VECTORS 3702 } 3703 } 3704 } 3705 }, { 3706 .alg = "rfc4106(gcm(aes))", 3707 .test = alg_test_aead, 3708 .fips_allowed = 1, 3709 .suite = { 3710 .aead = { 3711 .enc = { 3712 .vecs = aes_gcm_rfc4106_enc_tv_template, 3713 .count = AES_GCM_4106_ENC_TEST_VECTORS 3714 }, 3715 .dec = { 3716 .vecs = aes_gcm_rfc4106_dec_tv_template, 3717 .count = AES_GCM_4106_DEC_TEST_VECTORS 3718 } 3719 } 3720 } 3721 }, { 3722 .alg = "rfc4309(ccm(aes))", 3723 .test = alg_test_aead, 3724 .fips_allowed = 1, 3725 .suite = { 3726 .aead = { 3727 .enc = { 3728 .vecs = aes_ccm_rfc4309_enc_tv_template, 3729 .count = AES_CCM_4309_ENC_TEST_VECTORS 3730 }, 3731 .dec = { 3732 .vecs = aes_ccm_rfc4309_dec_tv_template, 3733 .count = AES_CCM_4309_DEC_TEST_VECTORS 3734 } 3735 } 3736 } 3737 }, { 3738 .alg = "rfc4543(gcm(aes))", 3739 .test = alg_test_aead, 3740 .suite = { 3741 .aead = { 3742 .enc = { 3743 .vecs = aes_gcm_rfc4543_enc_tv_template, 3744 .count = AES_GCM_4543_ENC_TEST_VECTORS 3745 }, 3746 .dec = { 3747 .vecs = aes_gcm_rfc4543_dec_tv_template, 3748 .count = AES_GCM_4543_DEC_TEST_VECTORS 3749 }, 3750 } 3751 } 3752 }, { 3753 .alg = "rfc7539(chacha20,poly1305)", 3754 .test = alg_test_aead, 3755 .suite = { 3756 .aead = { 3757 .enc = { 3758 .vecs = rfc7539_enc_tv_template, 3759 .count = RFC7539_ENC_TEST_VECTORS 3760 }, 3761 .dec = { 3762 .vecs = rfc7539_dec_tv_template, 3763 .count = RFC7539_DEC_TEST_VECTORS 3764 }, 3765 } 3766 } 3767 }, { 3768 .alg = "rfc7539esp(chacha20,poly1305)", 3769 .test = alg_test_aead, 3770 .suite = { 3771 .aead = { 3772 .enc = { 3773 .vecs = rfc7539esp_enc_tv_template, 3774 .count = RFC7539ESP_ENC_TEST_VECTORS 3775 }, 3776 .dec = { 3777 .vecs = rfc7539esp_dec_tv_template, 3778 .count = RFC7539ESP_DEC_TEST_VECTORS 3779 }, 3780 } 3781 } 3782 }, { 3783 .alg = "rmd128", 3784 .test = alg_test_hash, 3785 .suite = { 3786 .hash = { 3787 .vecs = rmd128_tv_template, 3788 .count = RMD128_TEST_VECTORS 3789 } 3790 } 3791 }, { 3792 .alg = "rmd160", 3793 .test = alg_test_hash, 3794 .suite = { 3795 .hash = { 3796 .vecs = rmd160_tv_template, 3797 .count = RMD160_TEST_VECTORS 3798 } 3799 } 3800 }, { 3801 .alg = "rmd256", 3802 .test = alg_test_hash, 3803 .suite = { 3804 .hash = { 3805 .vecs = rmd256_tv_template, 3806 .count = RMD256_TEST_VECTORS 3807 } 3808 } 3809 }, { 3810 .alg = "rmd320", 3811 .test = alg_test_hash, 3812 .suite = { 3813 .hash = { 3814 .vecs = rmd320_tv_template, 3815 .count = RMD320_TEST_VECTORS 3816 } 3817 } 3818 }, { 3819 .alg = "rsa", 3820 .test = alg_test_akcipher, 3821 .fips_allowed = 1, 3822 .suite = { 3823 .akcipher = { 3824 .vecs = rsa_tv_template, 3825 .count = RSA_TEST_VECTORS 3826 } 3827 } 3828 }, { 3829 .alg = "salsa20", 3830 .test = alg_test_skcipher, 3831 .suite = { 3832 .cipher = { 3833 .enc = { 3834 .vecs = salsa20_stream_enc_tv_template, 3835 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3836 } 3837 } 3838 } 3839 }, { 3840 .alg = "sha1", 3841 .test = alg_test_hash, 3842 .fips_allowed = 1, 3843 .suite = { 3844 .hash = { 3845 .vecs = sha1_tv_template, 3846 .count = SHA1_TEST_VECTORS 3847 } 3848 } 3849 }, { 3850 .alg = "sha224", 3851 .test = alg_test_hash, 3852 .fips_allowed = 1, 3853 .suite = { 3854 .hash = { 3855 .vecs = sha224_tv_template, 3856 .count = SHA224_TEST_VECTORS 3857 } 3858 } 3859 }, { 3860 .alg = "sha256", 3861 .test = alg_test_hash, 3862 .fips_allowed = 1, 3863 .suite = { 3864 .hash = { 3865 .vecs = sha256_tv_template, 3866 .count = SHA256_TEST_VECTORS 3867 } 3868 } 3869 }, { 3870 .alg = "sha3-224", 3871 .test = alg_test_hash, 3872 .fips_allowed = 1, 3873 .suite = { 3874 .hash = { 3875 .vecs = sha3_224_tv_template, 3876 .count = SHA3_224_TEST_VECTORS 3877 } 3878 } 3879 }, { 3880 .alg = "sha3-256", 3881 .test = alg_test_hash, 3882 .fips_allowed = 1, 3883 .suite = { 3884 .hash = { 3885 .vecs = sha3_256_tv_template, 3886 .count = SHA3_256_TEST_VECTORS 3887 } 3888 } 3889 }, { 3890 .alg = "sha3-384", 3891 .test = alg_test_hash, 3892 .fips_allowed = 1, 3893 .suite = { 3894 .hash = { 3895 .vecs = sha3_384_tv_template, 3896 .count = SHA3_384_TEST_VECTORS 3897 } 3898 } 3899 }, { 3900 .alg = "sha3-512", 3901 .test = alg_test_hash, 3902 .fips_allowed = 1, 3903 .suite = { 3904 .hash = { 3905 .vecs = sha3_512_tv_template, 3906 .count = SHA3_512_TEST_VECTORS 3907 } 3908 } 3909 }, { 3910 .alg = "sha384", 3911 .test = alg_test_hash, 3912 .fips_allowed = 1, 3913 .suite = { 3914 .hash = { 3915 .vecs = sha384_tv_template, 3916 .count = SHA384_TEST_VECTORS 3917 } 3918 } 3919 }, { 3920 .alg = "sha512", 3921 .test = alg_test_hash, 3922 .fips_allowed = 1, 3923 .suite = { 3924 .hash = { 3925 .vecs = sha512_tv_template, 3926 .count = SHA512_TEST_VECTORS 3927 } 3928 } 3929 }, { 3930 .alg = "tgr128", 3931 .test = alg_test_hash, 3932 .suite = { 3933 .hash = { 3934 .vecs = tgr128_tv_template, 3935 .count = TGR128_TEST_VECTORS 3936 } 3937 } 3938 }, { 3939 .alg = "tgr160", 3940 .test = alg_test_hash, 3941 .suite = { 3942 .hash = { 3943 .vecs = tgr160_tv_template, 3944 .count = TGR160_TEST_VECTORS 3945 } 3946 } 3947 }, { 3948 .alg = "tgr192", 3949 .test = alg_test_hash, 3950 .suite = { 3951 .hash = { 3952 .vecs = tgr192_tv_template, 3953 .count = TGR192_TEST_VECTORS 3954 } 3955 } 3956 }, { 3957 .alg = "vmac(aes)", 3958 .test = alg_test_hash, 3959 .suite = { 3960 .hash = { 3961 .vecs = aes_vmac128_tv_template, 3962 .count = VMAC_AES_TEST_VECTORS 3963 } 3964 } 3965 }, { 3966 .alg = "wp256", 3967 .test = alg_test_hash, 3968 .suite = { 3969 .hash = { 3970 .vecs = wp256_tv_template, 3971 .count = WP256_TEST_VECTORS 3972 } 3973 } 3974 }, { 3975 .alg = "wp384", 3976 .test = alg_test_hash, 3977 .suite = { 3978 .hash = { 3979 .vecs = wp384_tv_template, 3980 .count = WP384_TEST_VECTORS 3981 } 3982 } 3983 }, { 3984 .alg = "wp512", 3985 .test = alg_test_hash, 3986 .suite = { 3987 .hash = { 3988 .vecs = wp512_tv_template, 3989 .count = WP512_TEST_VECTORS 3990 } 3991 } 3992 }, { 3993 .alg = "xcbc(aes)", 3994 .test = alg_test_hash, 3995 .suite = { 3996 .hash = { 3997 .vecs = aes_xcbc128_tv_template, 3998 .count = XCBC_AES_TEST_VECTORS 3999 } 4000 } 4001 }, { 4002 .alg = "xts(aes)", 4003 .test = alg_test_skcipher, 4004 .fips_allowed = 1, 4005 .suite = { 4006 .cipher = { 4007 .enc = { 4008 .vecs = aes_xts_enc_tv_template, 4009 .count = AES_XTS_ENC_TEST_VECTORS 4010 }, 4011 .dec = { 4012 .vecs = aes_xts_dec_tv_template, 4013 .count = AES_XTS_DEC_TEST_VECTORS 4014 } 4015 } 4016 } 4017 }, { 4018 .alg = "xts(camellia)", 4019 .test = alg_test_skcipher, 4020 .suite = { 4021 .cipher = { 4022 .enc = { 4023 .vecs = camellia_xts_enc_tv_template, 4024 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 4025 }, 4026 .dec = { 4027 .vecs = camellia_xts_dec_tv_template, 4028 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 4029 } 4030 } 4031 } 4032 }, { 4033 .alg = "xts(cast6)", 4034 .test = alg_test_skcipher, 4035 .suite = { 4036 .cipher = { 4037 .enc = { 4038 .vecs = cast6_xts_enc_tv_template, 4039 .count = CAST6_XTS_ENC_TEST_VECTORS 4040 }, 4041 .dec = { 4042 .vecs = cast6_xts_dec_tv_template, 4043 .count = CAST6_XTS_DEC_TEST_VECTORS 4044 } 4045 } 4046 } 4047 }, { 4048 .alg = "xts(serpent)", 4049 .test = alg_test_skcipher, 4050 .suite = { 4051 .cipher = { 4052 .enc = { 4053 .vecs = serpent_xts_enc_tv_template, 4054 .count = SERPENT_XTS_ENC_TEST_VECTORS 4055 }, 4056 .dec = { 4057 .vecs = serpent_xts_dec_tv_template, 4058 .count = SERPENT_XTS_DEC_TEST_VECTORS 4059 } 4060 } 4061 } 4062 }, { 4063 .alg = "xts(twofish)", 4064 .test = alg_test_skcipher, 4065 .suite = { 4066 .cipher = { 4067 .enc = { 4068 .vecs = tf_xts_enc_tv_template, 4069 .count = TF_XTS_ENC_TEST_VECTORS 4070 }, 4071 .dec = { 4072 .vecs = tf_xts_dec_tv_template, 4073 .count = TF_XTS_DEC_TEST_VECTORS 4074 } 4075 } 4076 } 4077 } 4078 }; 4079 4080 static bool alg_test_descs_checked; 4081 4082 static void alg_test_descs_check_order(void) 4083 { 4084 int i; 4085 4086 /* only check once */ 4087 if (alg_test_descs_checked) 4088 return; 4089 4090 alg_test_descs_checked = true; 4091 4092 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 4093 int diff = strcmp(alg_test_descs[i - 1].alg, 4094 alg_test_descs[i].alg); 4095 4096 if (WARN_ON(diff > 0)) { 4097 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 4098 alg_test_descs[i - 1].alg, 4099 alg_test_descs[i].alg); 4100 } 4101 4102 if (WARN_ON(diff == 0)) { 4103 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 4104 alg_test_descs[i].alg); 4105 } 4106 } 4107 } 4108 4109 static int alg_find_test(const char *alg) 4110 { 4111 int start = 0; 4112 int end = ARRAY_SIZE(alg_test_descs); 4113 4114 while (start < end) { 4115 int i = (start + end) / 2; 4116 int diff = strcmp(alg_test_descs[i].alg, alg); 4117 4118 if (diff > 0) { 4119 end = i; 4120 continue; 4121 } 4122 4123 if (diff < 0) { 4124 start = i + 1; 4125 continue; 4126 } 4127 4128 return i; 4129 } 4130 4131 return -1; 4132 } 4133 4134 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 4135 { 4136 int i; 4137 int j; 4138 int rc; 4139 4140 if (!fips_enabled && notests) { 4141 printk_once(KERN_INFO "alg: self-tests disabled\n"); 4142 return 0; 4143 } 4144 4145 alg_test_descs_check_order(); 4146 4147 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 4148 char nalg[CRYPTO_MAX_ALG_NAME]; 4149 4150 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 4151 sizeof(nalg)) 4152 return -ENAMETOOLONG; 4153 4154 i = alg_find_test(nalg); 4155 if (i < 0) 4156 goto notest; 4157 4158 if (fips_enabled && !alg_test_descs[i].fips_allowed) 4159 goto non_fips_alg; 4160 4161 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 4162 goto test_done; 4163 } 4164 4165 i = alg_find_test(alg); 4166 j = alg_find_test(driver); 4167 if (i < 0 && j < 0) 4168 goto notest; 4169 4170 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 4171 (j >= 0 && !alg_test_descs[j].fips_allowed))) 4172 goto non_fips_alg; 4173 4174 rc = 0; 4175 if (i >= 0) 4176 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 4177 type, mask); 4178 if (j >= 0 && j != i) 4179 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 4180 type, mask); 4181 4182 test_done: 4183 if (fips_enabled && rc) 4184 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 4185 4186 if (fips_enabled && !rc) 4187 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 4188 4189 return rc; 4190 4191 notest: 4192 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 4193 return 0; 4194 non_fips_alg: 4195 return -EINVAL; 4196 } 4197 4198 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 4199 4200 EXPORT_SYMBOL_GPL(alg_test); 4201