1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 #include <crypto/kpp.h> 36 #include <crypto/acompress.h> 37 38 #include "internal.h" 39 40 static bool notests; 41 module_param(notests, bool, 0644); 42 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 43 44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 45 46 /* a perfect nop */ 47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 48 { 49 return 0; 50 } 51 52 #else 53 54 #include "testmgr.h" 55 56 /* 57 * Need slab memory for testing (size in number of pages). 58 */ 59 #define XBUFSIZE 8 60 61 /* 62 * Indexes into the xbuf to simulate cross-page access. 63 */ 64 #define IDX1 32 65 #define IDX2 32400 66 #define IDX3 1511 67 #define IDX4 8193 68 #define IDX5 22222 69 #define IDX6 17101 70 #define IDX7 27333 71 #define IDX8 3000 72 73 /* 74 * Used by test_cipher() 75 */ 76 #define ENCRYPT 1 77 #define DECRYPT 0 78 79 struct tcrypt_result { 80 struct completion completion; 81 int err; 82 }; 83 84 struct aead_test_suite { 85 struct { 86 struct aead_testvec *vecs; 87 unsigned int count; 88 } enc, dec; 89 }; 90 91 struct cipher_test_suite { 92 struct { 93 struct cipher_testvec *vecs; 94 unsigned int count; 95 } enc, dec; 96 }; 97 98 struct comp_test_suite { 99 struct { 100 struct comp_testvec *vecs; 101 unsigned int count; 102 } comp, decomp; 103 }; 104 105 struct hash_test_suite { 106 struct hash_testvec *vecs; 107 unsigned int count; 108 }; 109 110 struct cprng_test_suite { 111 struct cprng_testvec *vecs; 112 unsigned int count; 113 }; 114 115 struct drbg_test_suite { 116 struct drbg_testvec *vecs; 117 unsigned int count; 118 }; 119 120 struct akcipher_test_suite { 121 struct akcipher_testvec *vecs; 122 unsigned int count; 123 }; 124 125 struct kpp_test_suite { 126 struct kpp_testvec *vecs; 127 unsigned int count; 128 }; 129 130 struct alg_test_desc { 131 const char *alg; 132 int (*test)(const struct alg_test_desc *desc, const char *driver, 133 u32 type, u32 mask); 134 int fips_allowed; /* set if alg is allowed in fips mode */ 135 136 union { 137 struct aead_test_suite aead; 138 struct cipher_test_suite cipher; 139 struct comp_test_suite comp; 140 struct hash_test_suite hash; 141 struct cprng_test_suite cprng; 142 struct drbg_test_suite drbg; 143 struct akcipher_test_suite akcipher; 144 struct kpp_test_suite kpp; 145 } suite; 146 }; 147 148 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 149 150 static void hexdump(unsigned char *buf, unsigned int len) 151 { 152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 153 16, 1, 154 buf, len, false); 155 } 156 157 static void tcrypt_complete(struct crypto_async_request *req, int err) 158 { 159 struct tcrypt_result *res = req->data; 160 161 if (err == -EINPROGRESS) 162 return; 163 164 res->err = err; 165 complete(&res->completion); 166 } 167 168 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 169 { 170 int i; 171 172 for (i = 0; i < XBUFSIZE; i++) { 173 buf[i] = (void *)__get_free_page(GFP_KERNEL); 174 if (!buf[i]) 175 goto err_free_buf; 176 } 177 178 return 0; 179 180 err_free_buf: 181 while (i-- > 0) 182 free_page((unsigned long)buf[i]); 183 184 return -ENOMEM; 185 } 186 187 static void testmgr_free_buf(char *buf[XBUFSIZE]) 188 { 189 int i; 190 191 for (i = 0; i < XBUFSIZE; i++) 192 free_page((unsigned long)buf[i]); 193 } 194 195 static int wait_async_op(struct tcrypt_result *tr, int ret) 196 { 197 if (ret == -EINPROGRESS || ret == -EBUSY) { 198 wait_for_completion(&tr->completion); 199 reinit_completion(&tr->completion); 200 ret = tr->err; 201 } 202 return ret; 203 } 204 205 static int ahash_partial_update(struct ahash_request **preq, 206 struct crypto_ahash *tfm, struct hash_testvec *template, 207 void *hash_buff, int k, int temp, struct scatterlist *sg, 208 const char *algo, char *result, struct tcrypt_result *tresult) 209 { 210 char *state; 211 struct ahash_request *req; 212 int statesize, ret = -EINVAL; 213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 214 215 req = *preq; 216 statesize = crypto_ahash_statesize( 217 crypto_ahash_reqtfm(req)); 218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); 219 if (!state) { 220 pr_err("alt: hash: Failed to alloc state for %s\n", algo); 221 goto out_nostate; 222 } 223 memcpy(state + statesize, guard, sizeof(guard)); 224 ret = crypto_ahash_export(req, state); 225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); 226 if (ret) { 227 pr_err("alt: hash: Failed to export() for %s\n", algo); 228 goto out; 229 } 230 ahash_request_free(req); 231 req = ahash_request_alloc(tfm, GFP_KERNEL); 232 if (!req) { 233 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 234 goto out_noreq; 235 } 236 ahash_request_set_callback(req, 237 CRYPTO_TFM_REQ_MAY_BACKLOG, 238 tcrypt_complete, tresult); 239 240 memcpy(hash_buff, template->plaintext + temp, 241 template->tap[k]); 242 sg_init_one(&sg[0], hash_buff, template->tap[k]); 243 ahash_request_set_crypt(req, sg, result, template->tap[k]); 244 ret = crypto_ahash_import(req, state); 245 if (ret) { 246 pr_err("alg: hash: Failed to import() for %s\n", algo); 247 goto out; 248 } 249 ret = wait_async_op(tresult, crypto_ahash_update(req)); 250 if (ret) 251 goto out; 252 *preq = req; 253 ret = 0; 254 goto out_noreq; 255 out: 256 ahash_request_free(req); 257 out_noreq: 258 kfree(state); 259 out_nostate: 260 return ret; 261 } 262 263 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 264 unsigned int tcount, bool use_digest, 265 const int align_offset) 266 { 267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 268 unsigned int i, j, k, temp; 269 struct scatterlist sg[8]; 270 char *result; 271 char *key; 272 struct ahash_request *req; 273 struct tcrypt_result tresult; 274 void *hash_buff; 275 char *xbuf[XBUFSIZE]; 276 int ret = -ENOMEM; 277 278 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL); 279 if (!result) 280 return ret; 281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 282 if (!key) 283 goto out_nobuf; 284 if (testmgr_alloc_buf(xbuf)) 285 goto out_nobuf; 286 287 init_completion(&tresult.completion); 288 289 req = ahash_request_alloc(tfm, GFP_KERNEL); 290 if (!req) { 291 printk(KERN_ERR "alg: hash: Failed to allocate request for " 292 "%s\n", algo); 293 goto out_noreq; 294 } 295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 296 tcrypt_complete, &tresult); 297 298 j = 0; 299 for (i = 0; i < tcount; i++) { 300 if (template[i].np) 301 continue; 302 303 ret = -EINVAL; 304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 305 goto out; 306 307 j++; 308 memset(result, 0, MAX_DIGEST_SIZE); 309 310 hash_buff = xbuf[0]; 311 hash_buff += align_offset; 312 313 memcpy(hash_buff, template[i].plaintext, template[i].psize); 314 sg_init_one(&sg[0], hash_buff, template[i].psize); 315 316 if (template[i].ksize) { 317 crypto_ahash_clear_flags(tfm, ~0); 318 if (template[i].ksize > MAX_KEYLEN) { 319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 320 j, algo, template[i].ksize, MAX_KEYLEN); 321 ret = -EINVAL; 322 goto out; 323 } 324 memcpy(key, template[i].key, template[i].ksize); 325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 326 if (ret) { 327 printk(KERN_ERR "alg: hash: setkey failed on " 328 "test %d for %s: ret=%d\n", j, algo, 329 -ret); 330 goto out; 331 } 332 } 333 334 ahash_request_set_crypt(req, sg, result, template[i].psize); 335 if (use_digest) { 336 ret = wait_async_op(&tresult, crypto_ahash_digest(req)); 337 if (ret) { 338 pr_err("alg: hash: digest failed on test %d " 339 "for %s: ret=%d\n", j, algo, -ret); 340 goto out; 341 } 342 } else { 343 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 344 if (ret) { 345 pr_err("alt: hash: init failed on test %d " 346 "for %s: ret=%d\n", j, algo, -ret); 347 goto out; 348 } 349 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 350 if (ret) { 351 pr_err("alt: hash: update failed on test %d " 352 "for %s: ret=%d\n", j, algo, -ret); 353 goto out; 354 } 355 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 356 if (ret) { 357 pr_err("alt: hash: final failed on test %d " 358 "for %s: ret=%d\n", j, algo, -ret); 359 goto out; 360 } 361 } 362 363 if (memcmp(result, template[i].digest, 364 crypto_ahash_digestsize(tfm))) { 365 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 366 j, algo); 367 hexdump(result, crypto_ahash_digestsize(tfm)); 368 ret = -EINVAL; 369 goto out; 370 } 371 } 372 373 j = 0; 374 for (i = 0; i < tcount; i++) { 375 /* alignment tests are only done with continuous buffers */ 376 if (align_offset != 0) 377 break; 378 379 if (!template[i].np) 380 continue; 381 382 j++; 383 memset(result, 0, MAX_DIGEST_SIZE); 384 385 temp = 0; 386 sg_init_table(sg, template[i].np); 387 ret = -EINVAL; 388 for (k = 0; k < template[i].np; k++) { 389 if (WARN_ON(offset_in_page(IDX[k]) + 390 template[i].tap[k] > PAGE_SIZE)) 391 goto out; 392 sg_set_buf(&sg[k], 393 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 394 offset_in_page(IDX[k]), 395 template[i].plaintext + temp, 396 template[i].tap[k]), 397 template[i].tap[k]); 398 temp += template[i].tap[k]; 399 } 400 401 if (template[i].ksize) { 402 if (template[i].ksize > MAX_KEYLEN) { 403 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 404 j, algo, template[i].ksize, MAX_KEYLEN); 405 ret = -EINVAL; 406 goto out; 407 } 408 crypto_ahash_clear_flags(tfm, ~0); 409 memcpy(key, template[i].key, template[i].ksize); 410 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 411 412 if (ret) { 413 printk(KERN_ERR "alg: hash: setkey " 414 "failed on chunking test %d " 415 "for %s: ret=%d\n", j, algo, -ret); 416 goto out; 417 } 418 } 419 420 ahash_request_set_crypt(req, sg, result, template[i].psize); 421 ret = crypto_ahash_digest(req); 422 switch (ret) { 423 case 0: 424 break; 425 case -EINPROGRESS: 426 case -EBUSY: 427 wait_for_completion(&tresult.completion); 428 reinit_completion(&tresult.completion); 429 ret = tresult.err; 430 if (!ret) 431 break; 432 /* fall through */ 433 default: 434 printk(KERN_ERR "alg: hash: digest failed " 435 "on chunking test %d for %s: " 436 "ret=%d\n", j, algo, -ret); 437 goto out; 438 } 439 440 if (memcmp(result, template[i].digest, 441 crypto_ahash_digestsize(tfm))) { 442 printk(KERN_ERR "alg: hash: Chunking test %d " 443 "failed for %s\n", j, algo); 444 hexdump(result, crypto_ahash_digestsize(tfm)); 445 ret = -EINVAL; 446 goto out; 447 } 448 } 449 450 /* partial update exercise */ 451 j = 0; 452 for (i = 0; i < tcount; i++) { 453 /* alignment tests are only done with continuous buffers */ 454 if (align_offset != 0) 455 break; 456 457 if (template[i].np < 2) 458 continue; 459 460 j++; 461 memset(result, 0, MAX_DIGEST_SIZE); 462 463 ret = -EINVAL; 464 hash_buff = xbuf[0]; 465 memcpy(hash_buff, template[i].plaintext, 466 template[i].tap[0]); 467 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 468 469 if (template[i].ksize) { 470 crypto_ahash_clear_flags(tfm, ~0); 471 if (template[i].ksize > MAX_KEYLEN) { 472 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 473 j, algo, template[i].ksize, MAX_KEYLEN); 474 ret = -EINVAL; 475 goto out; 476 } 477 memcpy(key, template[i].key, template[i].ksize); 478 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 479 if (ret) { 480 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 481 j, algo, -ret); 482 goto out; 483 } 484 } 485 486 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 487 ret = wait_async_op(&tresult, crypto_ahash_init(req)); 488 if (ret) { 489 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n", 490 j, algo, -ret); 491 goto out; 492 } 493 ret = wait_async_op(&tresult, crypto_ahash_update(req)); 494 if (ret) { 495 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n", 496 j, algo, -ret); 497 goto out; 498 } 499 500 temp = template[i].tap[0]; 501 for (k = 1; k < template[i].np; k++) { 502 ret = ahash_partial_update(&req, tfm, &template[i], 503 hash_buff, k, temp, &sg[0], algo, result, 504 &tresult); 505 if (ret) { 506 pr_err("hash: partial update failed on test %d for %s: ret=%d\n", 507 j, algo, -ret); 508 goto out_noreq; 509 } 510 temp += template[i].tap[k]; 511 } 512 ret = wait_async_op(&tresult, crypto_ahash_final(req)); 513 if (ret) { 514 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n", 515 j, algo, -ret); 516 goto out; 517 } 518 if (memcmp(result, template[i].digest, 519 crypto_ahash_digestsize(tfm))) { 520 pr_err("alg: hash: Partial Test %d failed for %s\n", 521 j, algo); 522 hexdump(result, crypto_ahash_digestsize(tfm)); 523 ret = -EINVAL; 524 goto out; 525 } 526 } 527 528 ret = 0; 529 530 out: 531 ahash_request_free(req); 532 out_noreq: 533 testmgr_free_buf(xbuf); 534 out_nobuf: 535 kfree(key); 536 kfree(result); 537 return ret; 538 } 539 540 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 541 unsigned int tcount, bool use_digest) 542 { 543 unsigned int alignmask; 544 int ret; 545 546 ret = __test_hash(tfm, template, tcount, use_digest, 0); 547 if (ret) 548 return ret; 549 550 /* test unaligned buffers, check with one byte offset */ 551 ret = __test_hash(tfm, template, tcount, use_digest, 1); 552 if (ret) 553 return ret; 554 555 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 556 if (alignmask) { 557 /* Check if alignment mask for tfm is correctly set. */ 558 ret = __test_hash(tfm, template, tcount, use_digest, 559 alignmask + 1); 560 if (ret) 561 return ret; 562 } 563 564 return 0; 565 } 566 567 static int __test_aead(struct crypto_aead *tfm, int enc, 568 struct aead_testvec *template, unsigned int tcount, 569 const bool diff_dst, const int align_offset) 570 { 571 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 572 unsigned int i, j, k, n, temp; 573 int ret = -ENOMEM; 574 char *q; 575 char *key; 576 struct aead_request *req; 577 struct scatterlist *sg; 578 struct scatterlist *sgout; 579 const char *e, *d; 580 struct tcrypt_result result; 581 unsigned int authsize, iv_len; 582 void *input; 583 void *output; 584 void *assoc; 585 char *iv; 586 char *xbuf[XBUFSIZE]; 587 char *xoutbuf[XBUFSIZE]; 588 char *axbuf[XBUFSIZE]; 589 590 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 591 if (!iv) 592 return ret; 593 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 594 if (!key) 595 goto out_noxbuf; 596 if (testmgr_alloc_buf(xbuf)) 597 goto out_noxbuf; 598 if (testmgr_alloc_buf(axbuf)) 599 goto out_noaxbuf; 600 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 601 goto out_nooutbuf; 602 603 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 604 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL); 605 if (!sg) 606 goto out_nosg; 607 sgout = &sg[16]; 608 609 if (diff_dst) 610 d = "-ddst"; 611 else 612 d = ""; 613 614 if (enc == ENCRYPT) 615 e = "encryption"; 616 else 617 e = "decryption"; 618 619 init_completion(&result.completion); 620 621 req = aead_request_alloc(tfm, GFP_KERNEL); 622 if (!req) { 623 pr_err("alg: aead%s: Failed to allocate request for %s\n", 624 d, algo); 625 goto out; 626 } 627 628 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 629 tcrypt_complete, &result); 630 631 iv_len = crypto_aead_ivsize(tfm); 632 633 for (i = 0, j = 0; i < tcount; i++) { 634 if (template[i].np) 635 continue; 636 637 j++; 638 639 /* some templates have no input data but they will 640 * touch input 641 */ 642 input = xbuf[0]; 643 input += align_offset; 644 assoc = axbuf[0]; 645 646 ret = -EINVAL; 647 if (WARN_ON(align_offset + template[i].ilen > 648 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 649 goto out; 650 651 memcpy(input, template[i].input, template[i].ilen); 652 memcpy(assoc, template[i].assoc, template[i].alen); 653 if (template[i].iv) 654 memcpy(iv, template[i].iv, iv_len); 655 else 656 memset(iv, 0, iv_len); 657 658 crypto_aead_clear_flags(tfm, ~0); 659 if (template[i].wk) 660 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 661 662 if (template[i].klen > MAX_KEYLEN) { 663 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 664 d, j, algo, template[i].klen, 665 MAX_KEYLEN); 666 ret = -EINVAL; 667 goto out; 668 } 669 memcpy(key, template[i].key, template[i].klen); 670 671 ret = crypto_aead_setkey(tfm, key, template[i].klen); 672 if (template[i].fail == !ret) { 673 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 674 d, j, algo, crypto_aead_get_flags(tfm)); 675 goto out; 676 } else if (ret) 677 continue; 678 679 authsize = abs(template[i].rlen - template[i].ilen); 680 ret = crypto_aead_setauthsize(tfm, authsize); 681 if (ret) { 682 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 683 d, authsize, j, algo); 684 goto out; 685 } 686 687 k = !!template[i].alen; 688 sg_init_table(sg, k + 1); 689 sg_set_buf(&sg[0], assoc, template[i].alen); 690 sg_set_buf(&sg[k], input, 691 template[i].ilen + (enc ? authsize : 0)); 692 output = input; 693 694 if (diff_dst) { 695 sg_init_table(sgout, k + 1); 696 sg_set_buf(&sgout[0], assoc, template[i].alen); 697 698 output = xoutbuf[0]; 699 output += align_offset; 700 sg_set_buf(&sgout[k], output, 701 template[i].rlen + (enc ? 0 : authsize)); 702 } 703 704 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 705 template[i].ilen, iv); 706 707 aead_request_set_ad(req, template[i].alen); 708 709 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 710 711 switch (ret) { 712 case 0: 713 if (template[i].novrfy) { 714 /* verification was supposed to fail */ 715 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 716 d, e, j, algo); 717 /* so really, we got a bad message */ 718 ret = -EBADMSG; 719 goto out; 720 } 721 break; 722 case -EINPROGRESS: 723 case -EBUSY: 724 wait_for_completion(&result.completion); 725 reinit_completion(&result.completion); 726 ret = result.err; 727 if (!ret) 728 break; 729 case -EBADMSG: 730 if (template[i].novrfy) 731 /* verification failure was expected */ 732 continue; 733 /* fall through */ 734 default: 735 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 736 d, e, j, algo, -ret); 737 goto out; 738 } 739 740 q = output; 741 if (memcmp(q, template[i].result, template[i].rlen)) { 742 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 743 d, j, e, algo); 744 hexdump(q, template[i].rlen); 745 ret = -EINVAL; 746 goto out; 747 } 748 } 749 750 for (i = 0, j = 0; i < tcount; i++) { 751 /* alignment tests are only done with continuous buffers */ 752 if (align_offset != 0) 753 break; 754 755 if (!template[i].np) 756 continue; 757 758 j++; 759 760 if (template[i].iv) 761 memcpy(iv, template[i].iv, iv_len); 762 else 763 memset(iv, 0, MAX_IVLEN); 764 765 crypto_aead_clear_flags(tfm, ~0); 766 if (template[i].wk) 767 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 768 if (template[i].klen > MAX_KEYLEN) { 769 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 770 d, j, algo, template[i].klen, MAX_KEYLEN); 771 ret = -EINVAL; 772 goto out; 773 } 774 memcpy(key, template[i].key, template[i].klen); 775 776 ret = crypto_aead_setkey(tfm, key, template[i].klen); 777 if (template[i].fail == !ret) { 778 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 779 d, j, algo, crypto_aead_get_flags(tfm)); 780 goto out; 781 } else if (ret) 782 continue; 783 784 authsize = abs(template[i].rlen - template[i].ilen); 785 786 ret = -EINVAL; 787 sg_init_table(sg, template[i].anp + template[i].np); 788 if (diff_dst) 789 sg_init_table(sgout, template[i].anp + template[i].np); 790 791 ret = -EINVAL; 792 for (k = 0, temp = 0; k < template[i].anp; k++) { 793 if (WARN_ON(offset_in_page(IDX[k]) + 794 template[i].atap[k] > PAGE_SIZE)) 795 goto out; 796 sg_set_buf(&sg[k], 797 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 798 offset_in_page(IDX[k]), 799 template[i].assoc + temp, 800 template[i].atap[k]), 801 template[i].atap[k]); 802 if (diff_dst) 803 sg_set_buf(&sgout[k], 804 axbuf[IDX[k] >> PAGE_SHIFT] + 805 offset_in_page(IDX[k]), 806 template[i].atap[k]); 807 temp += template[i].atap[k]; 808 } 809 810 for (k = 0, temp = 0; k < template[i].np; k++) { 811 if (WARN_ON(offset_in_page(IDX[k]) + 812 template[i].tap[k] > PAGE_SIZE)) 813 goto out; 814 815 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 816 memcpy(q, template[i].input + temp, template[i].tap[k]); 817 sg_set_buf(&sg[template[i].anp + k], 818 q, template[i].tap[k]); 819 820 if (diff_dst) { 821 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 822 offset_in_page(IDX[k]); 823 824 memset(q, 0, template[i].tap[k]); 825 826 sg_set_buf(&sgout[template[i].anp + k], 827 q, template[i].tap[k]); 828 } 829 830 n = template[i].tap[k]; 831 if (k == template[i].np - 1 && enc) 832 n += authsize; 833 if (offset_in_page(q) + n < PAGE_SIZE) 834 q[n] = 0; 835 836 temp += template[i].tap[k]; 837 } 838 839 ret = crypto_aead_setauthsize(tfm, authsize); 840 if (ret) { 841 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 842 d, authsize, j, algo); 843 goto out; 844 } 845 846 if (enc) { 847 if (WARN_ON(sg[template[i].anp + k - 1].offset + 848 sg[template[i].anp + k - 1].length + 849 authsize > PAGE_SIZE)) { 850 ret = -EINVAL; 851 goto out; 852 } 853 854 if (diff_dst) 855 sgout[template[i].anp + k - 1].length += 856 authsize; 857 sg[template[i].anp + k - 1].length += authsize; 858 } 859 860 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 861 template[i].ilen, 862 iv); 863 864 aead_request_set_ad(req, template[i].alen); 865 866 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req); 867 868 switch (ret) { 869 case 0: 870 if (template[i].novrfy) { 871 /* verification was supposed to fail */ 872 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 873 d, e, j, algo); 874 /* so really, we got a bad message */ 875 ret = -EBADMSG; 876 goto out; 877 } 878 break; 879 case -EINPROGRESS: 880 case -EBUSY: 881 wait_for_completion(&result.completion); 882 reinit_completion(&result.completion); 883 ret = result.err; 884 if (!ret) 885 break; 886 case -EBADMSG: 887 if (template[i].novrfy) 888 /* verification failure was expected */ 889 continue; 890 /* fall through */ 891 default: 892 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 893 d, e, j, algo, -ret); 894 goto out; 895 } 896 897 ret = -EINVAL; 898 for (k = 0, temp = 0; k < template[i].np; k++) { 899 if (diff_dst) 900 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 901 offset_in_page(IDX[k]); 902 else 903 q = xbuf[IDX[k] >> PAGE_SHIFT] + 904 offset_in_page(IDX[k]); 905 906 n = template[i].tap[k]; 907 if (k == template[i].np - 1) 908 n += enc ? authsize : -authsize; 909 910 if (memcmp(q, template[i].result + temp, n)) { 911 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 912 d, j, e, k, algo); 913 hexdump(q, n); 914 goto out; 915 } 916 917 q += n; 918 if (k == template[i].np - 1 && !enc) { 919 if (!diff_dst && 920 memcmp(q, template[i].input + 921 temp + n, authsize)) 922 n = authsize; 923 else 924 n = 0; 925 } else { 926 for (n = 0; offset_in_page(q + n) && q[n]; n++) 927 ; 928 } 929 if (n) { 930 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 931 d, j, e, k, algo, n); 932 hexdump(q, n); 933 goto out; 934 } 935 936 temp += template[i].tap[k]; 937 } 938 } 939 940 ret = 0; 941 942 out: 943 aead_request_free(req); 944 kfree(sg); 945 out_nosg: 946 if (diff_dst) 947 testmgr_free_buf(xoutbuf); 948 out_nooutbuf: 949 testmgr_free_buf(axbuf); 950 out_noaxbuf: 951 testmgr_free_buf(xbuf); 952 out_noxbuf: 953 kfree(key); 954 kfree(iv); 955 return ret; 956 } 957 958 static int test_aead(struct crypto_aead *tfm, int enc, 959 struct aead_testvec *template, unsigned int tcount) 960 { 961 unsigned int alignmask; 962 int ret; 963 964 /* test 'dst == src' case */ 965 ret = __test_aead(tfm, enc, template, tcount, false, 0); 966 if (ret) 967 return ret; 968 969 /* test 'dst != src' case */ 970 ret = __test_aead(tfm, enc, template, tcount, true, 0); 971 if (ret) 972 return ret; 973 974 /* test unaligned buffers, check with one byte offset */ 975 ret = __test_aead(tfm, enc, template, tcount, true, 1); 976 if (ret) 977 return ret; 978 979 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 980 if (alignmask) { 981 /* Check if alignment mask for tfm is correctly set. */ 982 ret = __test_aead(tfm, enc, template, tcount, true, 983 alignmask + 1); 984 if (ret) 985 return ret; 986 } 987 988 return 0; 989 } 990 991 static int test_cipher(struct crypto_cipher *tfm, int enc, 992 struct cipher_testvec *template, unsigned int tcount) 993 { 994 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 995 unsigned int i, j, k; 996 char *q; 997 const char *e; 998 void *data; 999 char *xbuf[XBUFSIZE]; 1000 int ret = -ENOMEM; 1001 1002 if (testmgr_alloc_buf(xbuf)) 1003 goto out_nobuf; 1004 1005 if (enc == ENCRYPT) 1006 e = "encryption"; 1007 else 1008 e = "decryption"; 1009 1010 j = 0; 1011 for (i = 0; i < tcount; i++) { 1012 if (template[i].np) 1013 continue; 1014 1015 if (fips_enabled && template[i].fips_skip) 1016 continue; 1017 1018 j++; 1019 1020 ret = -EINVAL; 1021 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 1022 goto out; 1023 1024 data = xbuf[0]; 1025 memcpy(data, template[i].input, template[i].ilen); 1026 1027 crypto_cipher_clear_flags(tfm, ~0); 1028 if (template[i].wk) 1029 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1030 1031 ret = crypto_cipher_setkey(tfm, template[i].key, 1032 template[i].klen); 1033 if (template[i].fail == !ret) { 1034 printk(KERN_ERR "alg: cipher: setkey failed " 1035 "on test %d for %s: flags=%x\n", j, 1036 algo, crypto_cipher_get_flags(tfm)); 1037 goto out; 1038 } else if (ret) 1039 continue; 1040 1041 for (k = 0; k < template[i].ilen; 1042 k += crypto_cipher_blocksize(tfm)) { 1043 if (enc) 1044 crypto_cipher_encrypt_one(tfm, data + k, 1045 data + k); 1046 else 1047 crypto_cipher_decrypt_one(tfm, data + k, 1048 data + k); 1049 } 1050 1051 q = data; 1052 if (memcmp(q, template[i].result, template[i].rlen)) { 1053 printk(KERN_ERR "alg: cipher: Test %d failed " 1054 "on %s for %s\n", j, e, algo); 1055 hexdump(q, template[i].rlen); 1056 ret = -EINVAL; 1057 goto out; 1058 } 1059 } 1060 1061 ret = 0; 1062 1063 out: 1064 testmgr_free_buf(xbuf); 1065 out_nobuf: 1066 return ret; 1067 } 1068 1069 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 1070 struct cipher_testvec *template, unsigned int tcount, 1071 const bool diff_dst, const int align_offset) 1072 { 1073 const char *algo = 1074 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 1075 unsigned int i, j, k, n, temp; 1076 char *q; 1077 struct skcipher_request *req; 1078 struct scatterlist sg[8]; 1079 struct scatterlist sgout[8]; 1080 const char *e, *d; 1081 struct tcrypt_result result; 1082 void *data; 1083 char iv[MAX_IVLEN]; 1084 char *xbuf[XBUFSIZE]; 1085 char *xoutbuf[XBUFSIZE]; 1086 int ret = -ENOMEM; 1087 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1088 1089 if (testmgr_alloc_buf(xbuf)) 1090 goto out_nobuf; 1091 1092 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1093 goto out_nooutbuf; 1094 1095 if (diff_dst) 1096 d = "-ddst"; 1097 else 1098 d = ""; 1099 1100 if (enc == ENCRYPT) 1101 e = "encryption"; 1102 else 1103 e = "decryption"; 1104 1105 init_completion(&result.completion); 1106 1107 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1108 if (!req) { 1109 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 1110 d, algo); 1111 goto out; 1112 } 1113 1114 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1115 tcrypt_complete, &result); 1116 1117 j = 0; 1118 for (i = 0; i < tcount; i++) { 1119 if (template[i].np && !template[i].also_non_np) 1120 continue; 1121 1122 if (fips_enabled && template[i].fips_skip) 1123 continue; 1124 1125 if (template[i].iv) 1126 memcpy(iv, template[i].iv, ivsize); 1127 else 1128 memset(iv, 0, MAX_IVLEN); 1129 1130 j++; 1131 ret = -EINVAL; 1132 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE)) 1133 goto out; 1134 1135 data = xbuf[0]; 1136 data += align_offset; 1137 memcpy(data, template[i].input, template[i].ilen); 1138 1139 crypto_skcipher_clear_flags(tfm, ~0); 1140 if (template[i].wk) 1141 crypto_skcipher_set_flags(tfm, 1142 CRYPTO_TFM_REQ_WEAK_KEY); 1143 1144 ret = crypto_skcipher_setkey(tfm, template[i].key, 1145 template[i].klen); 1146 if (template[i].fail == !ret) { 1147 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1148 d, j, algo, crypto_skcipher_get_flags(tfm)); 1149 goto out; 1150 } else if (ret) 1151 continue; 1152 1153 sg_init_one(&sg[0], data, template[i].ilen); 1154 if (diff_dst) { 1155 data = xoutbuf[0]; 1156 data += align_offset; 1157 sg_init_one(&sgout[0], data, template[i].ilen); 1158 } 1159 1160 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1161 template[i].ilen, iv); 1162 ret = enc ? crypto_skcipher_encrypt(req) : 1163 crypto_skcipher_decrypt(req); 1164 1165 switch (ret) { 1166 case 0: 1167 break; 1168 case -EINPROGRESS: 1169 case -EBUSY: 1170 wait_for_completion(&result.completion); 1171 reinit_completion(&result.completion); 1172 ret = result.err; 1173 if (!ret) 1174 break; 1175 /* fall through */ 1176 default: 1177 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1178 d, e, j, algo, -ret); 1179 goto out; 1180 } 1181 1182 q = data; 1183 if (memcmp(q, template[i].result, template[i].rlen)) { 1184 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1185 d, j, e, algo); 1186 hexdump(q, template[i].rlen); 1187 ret = -EINVAL; 1188 goto out; 1189 } 1190 1191 if (template[i].iv_out && 1192 memcmp(iv, template[i].iv_out, 1193 crypto_skcipher_ivsize(tfm))) { 1194 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1195 d, j, e, algo); 1196 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1197 ret = -EINVAL; 1198 goto out; 1199 } 1200 } 1201 1202 j = 0; 1203 for (i = 0; i < tcount; i++) { 1204 /* alignment tests are only done with continuous buffers */ 1205 if (align_offset != 0) 1206 break; 1207 1208 if (!template[i].np) 1209 continue; 1210 1211 if (fips_enabled && template[i].fips_skip) 1212 continue; 1213 1214 if (template[i].iv) 1215 memcpy(iv, template[i].iv, ivsize); 1216 else 1217 memset(iv, 0, MAX_IVLEN); 1218 1219 j++; 1220 crypto_skcipher_clear_flags(tfm, ~0); 1221 if (template[i].wk) 1222 crypto_skcipher_set_flags(tfm, 1223 CRYPTO_TFM_REQ_WEAK_KEY); 1224 1225 ret = crypto_skcipher_setkey(tfm, template[i].key, 1226 template[i].klen); 1227 if (template[i].fail == !ret) { 1228 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1229 d, j, algo, crypto_skcipher_get_flags(tfm)); 1230 goto out; 1231 } else if (ret) 1232 continue; 1233 1234 temp = 0; 1235 ret = -EINVAL; 1236 sg_init_table(sg, template[i].np); 1237 if (diff_dst) 1238 sg_init_table(sgout, template[i].np); 1239 for (k = 0; k < template[i].np; k++) { 1240 if (WARN_ON(offset_in_page(IDX[k]) + 1241 template[i].tap[k] > PAGE_SIZE)) 1242 goto out; 1243 1244 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1245 1246 memcpy(q, template[i].input + temp, template[i].tap[k]); 1247 1248 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1249 q[template[i].tap[k]] = 0; 1250 1251 sg_set_buf(&sg[k], q, template[i].tap[k]); 1252 if (diff_dst) { 1253 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1254 offset_in_page(IDX[k]); 1255 1256 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1257 1258 memset(q, 0, template[i].tap[k]); 1259 if (offset_in_page(q) + 1260 template[i].tap[k] < PAGE_SIZE) 1261 q[template[i].tap[k]] = 0; 1262 } 1263 1264 temp += template[i].tap[k]; 1265 } 1266 1267 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1268 template[i].ilen, iv); 1269 1270 ret = enc ? crypto_skcipher_encrypt(req) : 1271 crypto_skcipher_decrypt(req); 1272 1273 switch (ret) { 1274 case 0: 1275 break; 1276 case -EINPROGRESS: 1277 case -EBUSY: 1278 wait_for_completion(&result.completion); 1279 reinit_completion(&result.completion); 1280 ret = result.err; 1281 if (!ret) 1282 break; 1283 /* fall through */ 1284 default: 1285 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1286 d, e, j, algo, -ret); 1287 goto out; 1288 } 1289 1290 temp = 0; 1291 ret = -EINVAL; 1292 for (k = 0; k < template[i].np; k++) { 1293 if (diff_dst) 1294 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1295 offset_in_page(IDX[k]); 1296 else 1297 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1298 offset_in_page(IDX[k]); 1299 1300 if (memcmp(q, template[i].result + temp, 1301 template[i].tap[k])) { 1302 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1303 d, j, e, k, algo); 1304 hexdump(q, template[i].tap[k]); 1305 goto out; 1306 } 1307 1308 q += template[i].tap[k]; 1309 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1310 ; 1311 if (n) { 1312 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1313 d, j, e, k, algo, n); 1314 hexdump(q, n); 1315 goto out; 1316 } 1317 temp += template[i].tap[k]; 1318 } 1319 } 1320 1321 ret = 0; 1322 1323 out: 1324 skcipher_request_free(req); 1325 if (diff_dst) 1326 testmgr_free_buf(xoutbuf); 1327 out_nooutbuf: 1328 testmgr_free_buf(xbuf); 1329 out_nobuf: 1330 return ret; 1331 } 1332 1333 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1334 struct cipher_testvec *template, unsigned int tcount) 1335 { 1336 unsigned int alignmask; 1337 int ret; 1338 1339 /* test 'dst == src' case */ 1340 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1341 if (ret) 1342 return ret; 1343 1344 /* test 'dst != src' case */ 1345 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1346 if (ret) 1347 return ret; 1348 1349 /* test unaligned buffers, check with one byte offset */ 1350 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1351 if (ret) 1352 return ret; 1353 1354 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1355 if (alignmask) { 1356 /* Check if alignment mask for tfm is correctly set. */ 1357 ret = __test_skcipher(tfm, enc, template, tcount, true, 1358 alignmask + 1); 1359 if (ret) 1360 return ret; 1361 } 1362 1363 return 0; 1364 } 1365 1366 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 1367 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1368 { 1369 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1370 unsigned int i; 1371 char result[COMP_BUF_SIZE]; 1372 int ret; 1373 1374 for (i = 0; i < ctcount; i++) { 1375 int ilen; 1376 unsigned int dlen = COMP_BUF_SIZE; 1377 1378 memset(result, 0, sizeof (result)); 1379 1380 ilen = ctemplate[i].inlen; 1381 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1382 ilen, result, &dlen); 1383 if (ret) { 1384 printk(KERN_ERR "alg: comp: compression failed " 1385 "on test %d for %s: ret=%d\n", i + 1, algo, 1386 -ret); 1387 goto out; 1388 } 1389 1390 if (dlen != ctemplate[i].outlen) { 1391 printk(KERN_ERR "alg: comp: Compression test %d " 1392 "failed for %s: output len = %d\n", i + 1, algo, 1393 dlen); 1394 ret = -EINVAL; 1395 goto out; 1396 } 1397 1398 if (memcmp(result, ctemplate[i].output, dlen)) { 1399 printk(KERN_ERR "alg: comp: Compression test %d " 1400 "failed for %s\n", i + 1, algo); 1401 hexdump(result, dlen); 1402 ret = -EINVAL; 1403 goto out; 1404 } 1405 } 1406 1407 for (i = 0; i < dtcount; i++) { 1408 int ilen; 1409 unsigned int dlen = COMP_BUF_SIZE; 1410 1411 memset(result, 0, sizeof (result)); 1412 1413 ilen = dtemplate[i].inlen; 1414 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1415 ilen, result, &dlen); 1416 if (ret) { 1417 printk(KERN_ERR "alg: comp: decompression failed " 1418 "on test %d for %s: ret=%d\n", i + 1, algo, 1419 -ret); 1420 goto out; 1421 } 1422 1423 if (dlen != dtemplate[i].outlen) { 1424 printk(KERN_ERR "alg: comp: Decompression test %d " 1425 "failed for %s: output len = %d\n", i + 1, algo, 1426 dlen); 1427 ret = -EINVAL; 1428 goto out; 1429 } 1430 1431 if (memcmp(result, dtemplate[i].output, dlen)) { 1432 printk(KERN_ERR "alg: comp: Decompression test %d " 1433 "failed for %s\n", i + 1, algo); 1434 hexdump(result, dlen); 1435 ret = -EINVAL; 1436 goto out; 1437 } 1438 } 1439 1440 ret = 0; 1441 1442 out: 1443 return ret; 1444 } 1445 1446 static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate, 1447 struct comp_testvec *dtemplate, int ctcount, int dtcount) 1448 { 1449 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 1450 unsigned int i; 1451 char *output; 1452 int ret; 1453 struct scatterlist src, dst; 1454 struct acomp_req *req; 1455 struct tcrypt_result result; 1456 1457 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1458 if (!output) 1459 return -ENOMEM; 1460 1461 for (i = 0; i < ctcount; i++) { 1462 unsigned int dlen = COMP_BUF_SIZE; 1463 int ilen = ctemplate[i].inlen; 1464 void *input_vec; 1465 1466 input_vec = kmalloc(ilen, GFP_KERNEL); 1467 if (!input_vec) { 1468 ret = -ENOMEM; 1469 goto out; 1470 } 1471 1472 memcpy(input_vec, ctemplate[i].input, ilen); 1473 memset(output, 0, dlen); 1474 init_completion(&result.completion); 1475 sg_init_one(&src, input_vec, ilen); 1476 sg_init_one(&dst, output, dlen); 1477 1478 req = acomp_request_alloc(tfm); 1479 if (!req) { 1480 pr_err("alg: acomp: request alloc failed for %s\n", 1481 algo); 1482 kfree(input_vec); 1483 ret = -ENOMEM; 1484 goto out; 1485 } 1486 1487 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1488 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1489 tcrypt_complete, &result); 1490 1491 ret = wait_async_op(&result, crypto_acomp_compress(req)); 1492 if (ret) { 1493 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1494 i + 1, algo, -ret); 1495 kfree(input_vec); 1496 acomp_request_free(req); 1497 goto out; 1498 } 1499 1500 if (req->dlen != ctemplate[i].outlen) { 1501 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 1502 i + 1, algo, req->dlen); 1503 ret = -EINVAL; 1504 kfree(input_vec); 1505 acomp_request_free(req); 1506 goto out; 1507 } 1508 1509 if (memcmp(output, ctemplate[i].output, req->dlen)) { 1510 pr_err("alg: acomp: Compression test %d failed for %s\n", 1511 i + 1, algo); 1512 hexdump(output, req->dlen); 1513 ret = -EINVAL; 1514 kfree(input_vec); 1515 acomp_request_free(req); 1516 goto out; 1517 } 1518 1519 kfree(input_vec); 1520 acomp_request_free(req); 1521 } 1522 1523 for (i = 0; i < dtcount; i++) { 1524 unsigned int dlen = COMP_BUF_SIZE; 1525 int ilen = dtemplate[i].inlen; 1526 void *input_vec; 1527 1528 input_vec = kmalloc(ilen, GFP_KERNEL); 1529 if (!input_vec) { 1530 ret = -ENOMEM; 1531 goto out; 1532 } 1533 1534 memcpy(input_vec, dtemplate[i].input, ilen); 1535 memset(output, 0, dlen); 1536 init_completion(&result.completion); 1537 sg_init_one(&src, input_vec, ilen); 1538 sg_init_one(&dst, output, dlen); 1539 1540 req = acomp_request_alloc(tfm); 1541 if (!req) { 1542 pr_err("alg: acomp: request alloc failed for %s\n", 1543 algo); 1544 kfree(input_vec); 1545 ret = -ENOMEM; 1546 goto out; 1547 } 1548 1549 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1550 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1551 tcrypt_complete, &result); 1552 1553 ret = wait_async_op(&result, crypto_acomp_decompress(req)); 1554 if (ret) { 1555 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 1556 i + 1, algo, -ret); 1557 kfree(input_vec); 1558 acomp_request_free(req); 1559 goto out; 1560 } 1561 1562 if (req->dlen != dtemplate[i].outlen) { 1563 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 1564 i + 1, algo, req->dlen); 1565 ret = -EINVAL; 1566 kfree(input_vec); 1567 acomp_request_free(req); 1568 goto out; 1569 } 1570 1571 if (memcmp(output, dtemplate[i].output, req->dlen)) { 1572 pr_err("alg: acomp: Decompression test %d failed for %s\n", 1573 i + 1, algo); 1574 hexdump(output, req->dlen); 1575 ret = -EINVAL; 1576 kfree(input_vec); 1577 acomp_request_free(req); 1578 goto out; 1579 } 1580 1581 kfree(input_vec); 1582 acomp_request_free(req); 1583 } 1584 1585 ret = 0; 1586 1587 out: 1588 kfree(output); 1589 return ret; 1590 } 1591 1592 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1593 unsigned int tcount) 1594 { 1595 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1596 int err = 0, i, j, seedsize; 1597 u8 *seed; 1598 char result[32]; 1599 1600 seedsize = crypto_rng_seedsize(tfm); 1601 1602 seed = kmalloc(seedsize, GFP_KERNEL); 1603 if (!seed) { 1604 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1605 "for %s\n", algo); 1606 return -ENOMEM; 1607 } 1608 1609 for (i = 0; i < tcount; i++) { 1610 memset(result, 0, 32); 1611 1612 memcpy(seed, template[i].v, template[i].vlen); 1613 memcpy(seed + template[i].vlen, template[i].key, 1614 template[i].klen); 1615 memcpy(seed + template[i].vlen + template[i].klen, 1616 template[i].dt, template[i].dtlen); 1617 1618 err = crypto_rng_reset(tfm, seed, seedsize); 1619 if (err) { 1620 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1621 "for %s\n", algo); 1622 goto out; 1623 } 1624 1625 for (j = 0; j < template[i].loops; j++) { 1626 err = crypto_rng_get_bytes(tfm, result, 1627 template[i].rlen); 1628 if (err < 0) { 1629 printk(KERN_ERR "alg: cprng: Failed to obtain " 1630 "the correct amount of random data for " 1631 "%s (requested %d)\n", algo, 1632 template[i].rlen); 1633 goto out; 1634 } 1635 } 1636 1637 err = memcmp(result, template[i].result, 1638 template[i].rlen); 1639 if (err) { 1640 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1641 i, algo); 1642 hexdump(result, template[i].rlen); 1643 err = -EINVAL; 1644 goto out; 1645 } 1646 } 1647 1648 out: 1649 kfree(seed); 1650 return err; 1651 } 1652 1653 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1654 u32 type, u32 mask) 1655 { 1656 struct crypto_aead *tfm; 1657 int err = 0; 1658 1659 tfm = crypto_alloc_aead(driver, type, mask); 1660 if (IS_ERR(tfm)) { 1661 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1662 "%ld\n", driver, PTR_ERR(tfm)); 1663 return PTR_ERR(tfm); 1664 } 1665 1666 if (desc->suite.aead.enc.vecs) { 1667 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1668 desc->suite.aead.enc.count); 1669 if (err) 1670 goto out; 1671 } 1672 1673 if (!err && desc->suite.aead.dec.vecs) 1674 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1675 desc->suite.aead.dec.count); 1676 1677 out: 1678 crypto_free_aead(tfm); 1679 return err; 1680 } 1681 1682 static int alg_test_cipher(const struct alg_test_desc *desc, 1683 const char *driver, u32 type, u32 mask) 1684 { 1685 struct crypto_cipher *tfm; 1686 int err = 0; 1687 1688 tfm = crypto_alloc_cipher(driver, type, mask); 1689 if (IS_ERR(tfm)) { 1690 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1691 "%s: %ld\n", driver, PTR_ERR(tfm)); 1692 return PTR_ERR(tfm); 1693 } 1694 1695 if (desc->suite.cipher.enc.vecs) { 1696 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1697 desc->suite.cipher.enc.count); 1698 if (err) 1699 goto out; 1700 } 1701 1702 if (desc->suite.cipher.dec.vecs) 1703 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1704 desc->suite.cipher.dec.count); 1705 1706 out: 1707 crypto_free_cipher(tfm); 1708 return err; 1709 } 1710 1711 static int alg_test_skcipher(const struct alg_test_desc *desc, 1712 const char *driver, u32 type, u32 mask) 1713 { 1714 struct crypto_skcipher *tfm; 1715 int err = 0; 1716 1717 tfm = crypto_alloc_skcipher(driver, type, mask); 1718 if (IS_ERR(tfm)) { 1719 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1720 "%s: %ld\n", driver, PTR_ERR(tfm)); 1721 return PTR_ERR(tfm); 1722 } 1723 1724 if (desc->suite.cipher.enc.vecs) { 1725 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1726 desc->suite.cipher.enc.count); 1727 if (err) 1728 goto out; 1729 } 1730 1731 if (desc->suite.cipher.dec.vecs) 1732 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1733 desc->suite.cipher.dec.count); 1734 1735 out: 1736 crypto_free_skcipher(tfm); 1737 return err; 1738 } 1739 1740 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1741 u32 type, u32 mask) 1742 { 1743 struct crypto_comp *comp; 1744 struct crypto_acomp *acomp; 1745 int err; 1746 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 1747 1748 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 1749 acomp = crypto_alloc_acomp(driver, type, mask); 1750 if (IS_ERR(acomp)) { 1751 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 1752 driver, PTR_ERR(acomp)); 1753 return PTR_ERR(acomp); 1754 } 1755 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 1756 desc->suite.comp.decomp.vecs, 1757 desc->suite.comp.comp.count, 1758 desc->suite.comp.decomp.count); 1759 crypto_free_acomp(acomp); 1760 } else { 1761 comp = crypto_alloc_comp(driver, type, mask); 1762 if (IS_ERR(comp)) { 1763 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 1764 driver, PTR_ERR(comp)); 1765 return PTR_ERR(comp); 1766 } 1767 1768 err = test_comp(comp, desc->suite.comp.comp.vecs, 1769 desc->suite.comp.decomp.vecs, 1770 desc->suite.comp.comp.count, 1771 desc->suite.comp.decomp.count); 1772 1773 crypto_free_comp(comp); 1774 } 1775 return err; 1776 } 1777 1778 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1779 u32 type, u32 mask) 1780 { 1781 struct crypto_ahash *tfm; 1782 int err; 1783 1784 tfm = crypto_alloc_ahash(driver, type, mask); 1785 if (IS_ERR(tfm)) { 1786 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1787 "%ld\n", driver, PTR_ERR(tfm)); 1788 return PTR_ERR(tfm); 1789 } 1790 1791 err = test_hash(tfm, desc->suite.hash.vecs, 1792 desc->suite.hash.count, true); 1793 if (!err) 1794 err = test_hash(tfm, desc->suite.hash.vecs, 1795 desc->suite.hash.count, false); 1796 1797 crypto_free_ahash(tfm); 1798 return err; 1799 } 1800 1801 static int alg_test_crc32c(const struct alg_test_desc *desc, 1802 const char *driver, u32 type, u32 mask) 1803 { 1804 struct crypto_shash *tfm; 1805 u32 val; 1806 int err; 1807 1808 err = alg_test_hash(desc, driver, type, mask); 1809 if (err) 1810 goto out; 1811 1812 tfm = crypto_alloc_shash(driver, type, mask); 1813 if (IS_ERR(tfm)) { 1814 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1815 "%ld\n", driver, PTR_ERR(tfm)); 1816 err = PTR_ERR(tfm); 1817 goto out; 1818 } 1819 1820 do { 1821 SHASH_DESC_ON_STACK(shash, tfm); 1822 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1823 1824 shash->tfm = tfm; 1825 shash->flags = 0; 1826 1827 *ctx = le32_to_cpu(420553207); 1828 err = crypto_shash_final(shash, (u8 *)&val); 1829 if (err) { 1830 printk(KERN_ERR "alg: crc32c: Operation failed for " 1831 "%s: %d\n", driver, err); 1832 break; 1833 } 1834 1835 if (val != ~420553207) { 1836 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1837 "%d\n", driver, val); 1838 err = -EINVAL; 1839 } 1840 } while (0); 1841 1842 crypto_free_shash(tfm); 1843 1844 out: 1845 return err; 1846 } 1847 1848 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1849 u32 type, u32 mask) 1850 { 1851 struct crypto_rng *rng; 1852 int err; 1853 1854 rng = crypto_alloc_rng(driver, type, mask); 1855 if (IS_ERR(rng)) { 1856 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1857 "%ld\n", driver, PTR_ERR(rng)); 1858 return PTR_ERR(rng); 1859 } 1860 1861 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1862 1863 crypto_free_rng(rng); 1864 1865 return err; 1866 } 1867 1868 1869 static int drbg_cavs_test(struct drbg_testvec *test, int pr, 1870 const char *driver, u32 type, u32 mask) 1871 { 1872 int ret = -EAGAIN; 1873 struct crypto_rng *drng; 1874 struct drbg_test_data test_data; 1875 struct drbg_string addtl, pers, testentropy; 1876 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1877 1878 if (!buf) 1879 return -ENOMEM; 1880 1881 drng = crypto_alloc_rng(driver, type, mask); 1882 if (IS_ERR(drng)) { 1883 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1884 "%s\n", driver); 1885 kzfree(buf); 1886 return -ENOMEM; 1887 } 1888 1889 test_data.testentropy = &testentropy; 1890 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1891 drbg_string_fill(&pers, test->pers, test->perslen); 1892 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1893 if (ret) { 1894 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1895 goto outbuf; 1896 } 1897 1898 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1899 if (pr) { 1900 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1901 ret = crypto_drbg_get_bytes_addtl_test(drng, 1902 buf, test->expectedlen, &addtl, &test_data); 1903 } else { 1904 ret = crypto_drbg_get_bytes_addtl(drng, 1905 buf, test->expectedlen, &addtl); 1906 } 1907 if (ret < 0) { 1908 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1909 "driver %s\n", driver); 1910 goto outbuf; 1911 } 1912 1913 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 1914 if (pr) { 1915 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 1916 ret = crypto_drbg_get_bytes_addtl_test(drng, 1917 buf, test->expectedlen, &addtl, &test_data); 1918 } else { 1919 ret = crypto_drbg_get_bytes_addtl(drng, 1920 buf, test->expectedlen, &addtl); 1921 } 1922 if (ret < 0) { 1923 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1924 "driver %s\n", driver); 1925 goto outbuf; 1926 } 1927 1928 ret = memcmp(test->expected, buf, test->expectedlen); 1929 1930 outbuf: 1931 crypto_free_rng(drng); 1932 kzfree(buf); 1933 return ret; 1934 } 1935 1936 1937 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 1938 u32 type, u32 mask) 1939 { 1940 int err = 0; 1941 int pr = 0; 1942 int i = 0; 1943 struct drbg_testvec *template = desc->suite.drbg.vecs; 1944 unsigned int tcount = desc->suite.drbg.count; 1945 1946 if (0 == memcmp(driver, "drbg_pr_", 8)) 1947 pr = 1; 1948 1949 for (i = 0; i < tcount; i++) { 1950 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 1951 if (err) { 1952 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 1953 i, driver); 1954 err = -EINVAL; 1955 break; 1956 } 1957 } 1958 return err; 1959 1960 } 1961 1962 static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec, 1963 const char *alg) 1964 { 1965 struct kpp_request *req; 1966 void *input_buf = NULL; 1967 void *output_buf = NULL; 1968 struct tcrypt_result result; 1969 unsigned int out_len_max; 1970 int err = -ENOMEM; 1971 struct scatterlist src, dst; 1972 1973 req = kpp_request_alloc(tfm, GFP_KERNEL); 1974 if (!req) 1975 return err; 1976 1977 init_completion(&result.completion); 1978 1979 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 1980 if (err < 0) 1981 goto free_req; 1982 1983 out_len_max = crypto_kpp_maxsize(tfm); 1984 output_buf = kzalloc(out_len_max, GFP_KERNEL); 1985 if (!output_buf) { 1986 err = -ENOMEM; 1987 goto free_req; 1988 } 1989 1990 /* Use appropriate parameter as base */ 1991 kpp_request_set_input(req, NULL, 0); 1992 sg_init_one(&dst, output_buf, out_len_max); 1993 kpp_request_set_output(req, &dst, out_len_max); 1994 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1995 tcrypt_complete, &result); 1996 1997 /* Compute public key */ 1998 err = wait_async_op(&result, crypto_kpp_generate_public_key(req)); 1999 if (err) { 2000 pr_err("alg: %s: generate public key test failed. err %d\n", 2001 alg, err); 2002 goto free_output; 2003 } 2004 /* Verify calculated public key */ 2005 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 2006 vec->expected_a_public_size)) { 2007 pr_err("alg: %s: generate public key test failed. Invalid output\n", 2008 alg); 2009 err = -EINVAL; 2010 goto free_output; 2011 } 2012 2013 /* Calculate shared secret key by using counter part (b) public key. */ 2014 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL); 2015 if (!input_buf) { 2016 err = -ENOMEM; 2017 goto free_output; 2018 } 2019 2020 memcpy(input_buf, vec->b_public, vec->b_public_size); 2021 sg_init_one(&src, input_buf, vec->b_public_size); 2022 sg_init_one(&dst, output_buf, out_len_max); 2023 kpp_request_set_input(req, &src, vec->b_public_size); 2024 kpp_request_set_output(req, &dst, out_len_max); 2025 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2026 tcrypt_complete, &result); 2027 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req)); 2028 if (err) { 2029 pr_err("alg: %s: compute shard secret test failed. err %d\n", 2030 alg, err); 2031 goto free_all; 2032 } 2033 /* 2034 * verify shared secret from which the user will derive 2035 * secret key by executing whatever hash it has chosen 2036 */ 2037 if (memcmp(vec->expected_ss, sg_virt(req->dst), 2038 vec->expected_ss_size)) { 2039 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 2040 alg); 2041 err = -EINVAL; 2042 } 2043 2044 free_all: 2045 kfree(input_buf); 2046 free_output: 2047 kfree(output_buf); 2048 free_req: 2049 kpp_request_free(req); 2050 return err; 2051 } 2052 2053 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 2054 struct kpp_testvec *vecs, unsigned int tcount) 2055 { 2056 int ret, i; 2057 2058 for (i = 0; i < tcount; i++) { 2059 ret = do_test_kpp(tfm, vecs++, alg); 2060 if (ret) { 2061 pr_err("alg: %s: test failed on vector %d, err=%d\n", 2062 alg, i + 1, ret); 2063 return ret; 2064 } 2065 } 2066 return 0; 2067 } 2068 2069 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 2070 u32 type, u32 mask) 2071 { 2072 struct crypto_kpp *tfm; 2073 int err = 0; 2074 2075 tfm = crypto_alloc_kpp(driver, type, mask); 2076 if (IS_ERR(tfm)) { 2077 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 2078 driver, PTR_ERR(tfm)); 2079 return PTR_ERR(tfm); 2080 } 2081 if (desc->suite.kpp.vecs) 2082 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 2083 desc->suite.kpp.count); 2084 2085 crypto_free_kpp(tfm); 2086 return err; 2087 } 2088 2089 static int test_akcipher_one(struct crypto_akcipher *tfm, 2090 struct akcipher_testvec *vecs) 2091 { 2092 char *xbuf[XBUFSIZE]; 2093 struct akcipher_request *req; 2094 void *outbuf_enc = NULL; 2095 void *outbuf_dec = NULL; 2096 struct tcrypt_result result; 2097 unsigned int out_len_max, out_len = 0; 2098 int err = -ENOMEM; 2099 struct scatterlist src, dst, src_tab[2]; 2100 2101 if (testmgr_alloc_buf(xbuf)) 2102 return err; 2103 2104 req = akcipher_request_alloc(tfm, GFP_KERNEL); 2105 if (!req) 2106 goto free_xbuf; 2107 2108 init_completion(&result.completion); 2109 2110 if (vecs->public_key_vec) 2111 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2112 vecs->key_len); 2113 else 2114 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 2115 vecs->key_len); 2116 if (err) 2117 goto free_req; 2118 2119 err = -ENOMEM; 2120 out_len_max = crypto_akcipher_maxsize(tfm); 2121 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 2122 if (!outbuf_enc) 2123 goto free_req; 2124 2125 if (WARN_ON(vecs->m_size > PAGE_SIZE)) 2126 goto free_all; 2127 2128 memcpy(xbuf[0], vecs->m, vecs->m_size); 2129 2130 sg_init_table(src_tab, 2); 2131 sg_set_buf(&src_tab[0], xbuf[0], 8); 2132 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8); 2133 sg_init_one(&dst, outbuf_enc, out_len_max); 2134 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 2135 out_len_max); 2136 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2137 tcrypt_complete, &result); 2138 2139 /* Run RSA encrypt - c = m^e mod n;*/ 2140 err = wait_async_op(&result, crypto_akcipher_encrypt(req)); 2141 if (err) { 2142 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 2143 goto free_all; 2144 } 2145 if (req->dst_len != vecs->c_size) { 2146 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n"); 2147 err = -EINVAL; 2148 goto free_all; 2149 } 2150 /* verify that encrypted message is equal to expected */ 2151 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 2152 pr_err("alg: akcipher: encrypt test failed. Invalid output\n"); 2153 hexdump(outbuf_enc, vecs->c_size); 2154 err = -EINVAL; 2155 goto free_all; 2156 } 2157 /* Don't invoke decrypt for vectors with public key */ 2158 if (vecs->public_key_vec) { 2159 err = 0; 2160 goto free_all; 2161 } 2162 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 2163 if (!outbuf_dec) { 2164 err = -ENOMEM; 2165 goto free_all; 2166 } 2167 2168 if (WARN_ON(vecs->c_size > PAGE_SIZE)) 2169 goto free_all; 2170 2171 memcpy(xbuf[0], vecs->c, vecs->c_size); 2172 2173 sg_init_one(&src, xbuf[0], vecs->c_size); 2174 sg_init_one(&dst, outbuf_dec, out_len_max); 2175 init_completion(&result.completion); 2176 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 2177 2178 /* Run RSA decrypt - m = c^d mod n;*/ 2179 err = wait_async_op(&result, crypto_akcipher_decrypt(req)); 2180 if (err) { 2181 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 2182 goto free_all; 2183 } 2184 out_len = req->dst_len; 2185 if (out_len < vecs->m_size) { 2186 pr_err("alg: akcipher: decrypt test failed. " 2187 "Invalid output len %u\n", out_len); 2188 err = -EINVAL; 2189 goto free_all; 2190 } 2191 /* verify that decrypted message is equal to the original msg */ 2192 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) || 2193 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size, 2194 vecs->m_size)) { 2195 pr_err("alg: akcipher: decrypt test failed. Invalid output\n"); 2196 hexdump(outbuf_dec, out_len); 2197 err = -EINVAL; 2198 } 2199 free_all: 2200 kfree(outbuf_dec); 2201 kfree(outbuf_enc); 2202 free_req: 2203 akcipher_request_free(req); 2204 free_xbuf: 2205 testmgr_free_buf(xbuf); 2206 return err; 2207 } 2208 2209 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 2210 struct akcipher_testvec *vecs, unsigned int tcount) 2211 { 2212 const char *algo = 2213 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 2214 int ret, i; 2215 2216 for (i = 0; i < tcount; i++) { 2217 ret = test_akcipher_one(tfm, vecs++); 2218 if (!ret) 2219 continue; 2220 2221 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 2222 i + 1, algo, ret); 2223 return ret; 2224 } 2225 return 0; 2226 } 2227 2228 static int alg_test_akcipher(const struct alg_test_desc *desc, 2229 const char *driver, u32 type, u32 mask) 2230 { 2231 struct crypto_akcipher *tfm; 2232 int err = 0; 2233 2234 tfm = crypto_alloc_akcipher(driver, type, mask); 2235 if (IS_ERR(tfm)) { 2236 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 2237 driver, PTR_ERR(tfm)); 2238 return PTR_ERR(tfm); 2239 } 2240 if (desc->suite.akcipher.vecs) 2241 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 2242 desc->suite.akcipher.count); 2243 2244 crypto_free_akcipher(tfm); 2245 return err; 2246 } 2247 2248 static int alg_test_null(const struct alg_test_desc *desc, 2249 const char *driver, u32 type, u32 mask) 2250 { 2251 return 0; 2252 } 2253 2254 /* Please keep this list sorted by algorithm name. */ 2255 static const struct alg_test_desc alg_test_descs[] = { 2256 { 2257 .alg = "ansi_cprng", 2258 .test = alg_test_cprng, 2259 .suite = { 2260 .cprng = { 2261 .vecs = ansi_cprng_aes_tv_template, 2262 .count = ANSI_CPRNG_AES_TEST_VECTORS 2263 } 2264 } 2265 }, { 2266 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2267 .test = alg_test_aead, 2268 .suite = { 2269 .aead = { 2270 .enc = { 2271 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template, 2272 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS 2273 }, 2274 .dec = { 2275 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template, 2276 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS 2277 } 2278 } 2279 } 2280 }, { 2281 .alg = "authenc(hmac(sha1),cbc(aes))", 2282 .test = alg_test_aead, 2283 .suite = { 2284 .aead = { 2285 .enc = { 2286 .vecs = 2287 hmac_sha1_aes_cbc_enc_tv_temp, 2288 .count = 2289 HMAC_SHA1_AES_CBC_ENC_TEST_VEC 2290 } 2291 } 2292 } 2293 }, { 2294 .alg = "authenc(hmac(sha1),cbc(des))", 2295 .test = alg_test_aead, 2296 .suite = { 2297 .aead = { 2298 .enc = { 2299 .vecs = 2300 hmac_sha1_des_cbc_enc_tv_temp, 2301 .count = 2302 HMAC_SHA1_DES_CBC_ENC_TEST_VEC 2303 } 2304 } 2305 } 2306 }, { 2307 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2308 .test = alg_test_aead, 2309 .fips_allowed = 1, 2310 .suite = { 2311 .aead = { 2312 .enc = { 2313 .vecs = 2314 hmac_sha1_des3_ede_cbc_enc_tv_temp, 2315 .count = 2316 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC 2317 } 2318 } 2319 } 2320 }, { 2321 .alg = "authenc(hmac(sha1),ctr(aes))", 2322 .test = alg_test_null, 2323 .fips_allowed = 1, 2324 }, { 2325 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2326 .test = alg_test_aead, 2327 .suite = { 2328 .aead = { 2329 .enc = { 2330 .vecs = 2331 hmac_sha1_ecb_cipher_null_enc_tv_temp, 2332 .count = 2333 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC 2334 }, 2335 .dec = { 2336 .vecs = 2337 hmac_sha1_ecb_cipher_null_dec_tv_temp, 2338 .count = 2339 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC 2340 } 2341 } 2342 } 2343 }, { 2344 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 2345 .test = alg_test_null, 2346 .fips_allowed = 1, 2347 }, { 2348 .alg = "authenc(hmac(sha224),cbc(des))", 2349 .test = alg_test_aead, 2350 .suite = { 2351 .aead = { 2352 .enc = { 2353 .vecs = 2354 hmac_sha224_des_cbc_enc_tv_temp, 2355 .count = 2356 HMAC_SHA224_DES_CBC_ENC_TEST_VEC 2357 } 2358 } 2359 } 2360 }, { 2361 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2362 .test = alg_test_aead, 2363 .fips_allowed = 1, 2364 .suite = { 2365 .aead = { 2366 .enc = { 2367 .vecs = 2368 hmac_sha224_des3_ede_cbc_enc_tv_temp, 2369 .count = 2370 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC 2371 } 2372 } 2373 } 2374 }, { 2375 .alg = "authenc(hmac(sha256),cbc(aes))", 2376 .test = alg_test_aead, 2377 .fips_allowed = 1, 2378 .suite = { 2379 .aead = { 2380 .enc = { 2381 .vecs = 2382 hmac_sha256_aes_cbc_enc_tv_temp, 2383 .count = 2384 HMAC_SHA256_AES_CBC_ENC_TEST_VEC 2385 } 2386 } 2387 } 2388 }, { 2389 .alg = "authenc(hmac(sha256),cbc(des))", 2390 .test = alg_test_aead, 2391 .suite = { 2392 .aead = { 2393 .enc = { 2394 .vecs = 2395 hmac_sha256_des_cbc_enc_tv_temp, 2396 .count = 2397 HMAC_SHA256_DES_CBC_ENC_TEST_VEC 2398 } 2399 } 2400 } 2401 }, { 2402 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2403 .test = alg_test_aead, 2404 .fips_allowed = 1, 2405 .suite = { 2406 .aead = { 2407 .enc = { 2408 .vecs = 2409 hmac_sha256_des3_ede_cbc_enc_tv_temp, 2410 .count = 2411 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC 2412 } 2413 } 2414 } 2415 }, { 2416 .alg = "authenc(hmac(sha256),ctr(aes))", 2417 .test = alg_test_null, 2418 .fips_allowed = 1, 2419 }, { 2420 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 2421 .test = alg_test_null, 2422 .fips_allowed = 1, 2423 }, { 2424 .alg = "authenc(hmac(sha384),cbc(des))", 2425 .test = alg_test_aead, 2426 .suite = { 2427 .aead = { 2428 .enc = { 2429 .vecs = 2430 hmac_sha384_des_cbc_enc_tv_temp, 2431 .count = 2432 HMAC_SHA384_DES_CBC_ENC_TEST_VEC 2433 } 2434 } 2435 } 2436 }, { 2437 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2438 .test = alg_test_aead, 2439 .fips_allowed = 1, 2440 .suite = { 2441 .aead = { 2442 .enc = { 2443 .vecs = 2444 hmac_sha384_des3_ede_cbc_enc_tv_temp, 2445 .count = 2446 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC 2447 } 2448 } 2449 } 2450 }, { 2451 .alg = "authenc(hmac(sha384),ctr(aes))", 2452 .test = alg_test_null, 2453 .fips_allowed = 1, 2454 }, { 2455 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 2456 .test = alg_test_null, 2457 .fips_allowed = 1, 2458 }, { 2459 .alg = "authenc(hmac(sha512),cbc(aes))", 2460 .fips_allowed = 1, 2461 .test = alg_test_aead, 2462 .suite = { 2463 .aead = { 2464 .enc = { 2465 .vecs = 2466 hmac_sha512_aes_cbc_enc_tv_temp, 2467 .count = 2468 HMAC_SHA512_AES_CBC_ENC_TEST_VEC 2469 } 2470 } 2471 } 2472 }, { 2473 .alg = "authenc(hmac(sha512),cbc(des))", 2474 .test = alg_test_aead, 2475 .suite = { 2476 .aead = { 2477 .enc = { 2478 .vecs = 2479 hmac_sha512_des_cbc_enc_tv_temp, 2480 .count = 2481 HMAC_SHA512_DES_CBC_ENC_TEST_VEC 2482 } 2483 } 2484 } 2485 }, { 2486 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2487 .test = alg_test_aead, 2488 .fips_allowed = 1, 2489 .suite = { 2490 .aead = { 2491 .enc = { 2492 .vecs = 2493 hmac_sha512_des3_ede_cbc_enc_tv_temp, 2494 .count = 2495 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC 2496 } 2497 } 2498 } 2499 }, { 2500 .alg = "authenc(hmac(sha512),ctr(aes))", 2501 .test = alg_test_null, 2502 .fips_allowed = 1, 2503 }, { 2504 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 2505 .test = alg_test_null, 2506 .fips_allowed = 1, 2507 }, { 2508 .alg = "cbc(aes)", 2509 .test = alg_test_skcipher, 2510 .fips_allowed = 1, 2511 .suite = { 2512 .cipher = { 2513 .enc = { 2514 .vecs = aes_cbc_enc_tv_template, 2515 .count = AES_CBC_ENC_TEST_VECTORS 2516 }, 2517 .dec = { 2518 .vecs = aes_cbc_dec_tv_template, 2519 .count = AES_CBC_DEC_TEST_VECTORS 2520 } 2521 } 2522 } 2523 }, { 2524 .alg = "cbc(anubis)", 2525 .test = alg_test_skcipher, 2526 .suite = { 2527 .cipher = { 2528 .enc = { 2529 .vecs = anubis_cbc_enc_tv_template, 2530 .count = ANUBIS_CBC_ENC_TEST_VECTORS 2531 }, 2532 .dec = { 2533 .vecs = anubis_cbc_dec_tv_template, 2534 .count = ANUBIS_CBC_DEC_TEST_VECTORS 2535 } 2536 } 2537 } 2538 }, { 2539 .alg = "cbc(blowfish)", 2540 .test = alg_test_skcipher, 2541 .suite = { 2542 .cipher = { 2543 .enc = { 2544 .vecs = bf_cbc_enc_tv_template, 2545 .count = BF_CBC_ENC_TEST_VECTORS 2546 }, 2547 .dec = { 2548 .vecs = bf_cbc_dec_tv_template, 2549 .count = BF_CBC_DEC_TEST_VECTORS 2550 } 2551 } 2552 } 2553 }, { 2554 .alg = "cbc(camellia)", 2555 .test = alg_test_skcipher, 2556 .suite = { 2557 .cipher = { 2558 .enc = { 2559 .vecs = camellia_cbc_enc_tv_template, 2560 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 2561 }, 2562 .dec = { 2563 .vecs = camellia_cbc_dec_tv_template, 2564 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 2565 } 2566 } 2567 } 2568 }, { 2569 .alg = "cbc(cast5)", 2570 .test = alg_test_skcipher, 2571 .suite = { 2572 .cipher = { 2573 .enc = { 2574 .vecs = cast5_cbc_enc_tv_template, 2575 .count = CAST5_CBC_ENC_TEST_VECTORS 2576 }, 2577 .dec = { 2578 .vecs = cast5_cbc_dec_tv_template, 2579 .count = CAST5_CBC_DEC_TEST_VECTORS 2580 } 2581 } 2582 } 2583 }, { 2584 .alg = "cbc(cast6)", 2585 .test = alg_test_skcipher, 2586 .suite = { 2587 .cipher = { 2588 .enc = { 2589 .vecs = cast6_cbc_enc_tv_template, 2590 .count = CAST6_CBC_ENC_TEST_VECTORS 2591 }, 2592 .dec = { 2593 .vecs = cast6_cbc_dec_tv_template, 2594 .count = CAST6_CBC_DEC_TEST_VECTORS 2595 } 2596 } 2597 } 2598 }, { 2599 .alg = "cbc(des)", 2600 .test = alg_test_skcipher, 2601 .suite = { 2602 .cipher = { 2603 .enc = { 2604 .vecs = des_cbc_enc_tv_template, 2605 .count = DES_CBC_ENC_TEST_VECTORS 2606 }, 2607 .dec = { 2608 .vecs = des_cbc_dec_tv_template, 2609 .count = DES_CBC_DEC_TEST_VECTORS 2610 } 2611 } 2612 } 2613 }, { 2614 .alg = "cbc(des3_ede)", 2615 .test = alg_test_skcipher, 2616 .fips_allowed = 1, 2617 .suite = { 2618 .cipher = { 2619 .enc = { 2620 .vecs = des3_ede_cbc_enc_tv_template, 2621 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 2622 }, 2623 .dec = { 2624 .vecs = des3_ede_cbc_dec_tv_template, 2625 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 2626 } 2627 } 2628 } 2629 }, { 2630 .alg = "cbc(serpent)", 2631 .test = alg_test_skcipher, 2632 .suite = { 2633 .cipher = { 2634 .enc = { 2635 .vecs = serpent_cbc_enc_tv_template, 2636 .count = SERPENT_CBC_ENC_TEST_VECTORS 2637 }, 2638 .dec = { 2639 .vecs = serpent_cbc_dec_tv_template, 2640 .count = SERPENT_CBC_DEC_TEST_VECTORS 2641 } 2642 } 2643 } 2644 }, { 2645 .alg = "cbc(twofish)", 2646 .test = alg_test_skcipher, 2647 .suite = { 2648 .cipher = { 2649 .enc = { 2650 .vecs = tf_cbc_enc_tv_template, 2651 .count = TF_CBC_ENC_TEST_VECTORS 2652 }, 2653 .dec = { 2654 .vecs = tf_cbc_dec_tv_template, 2655 .count = TF_CBC_DEC_TEST_VECTORS 2656 } 2657 } 2658 } 2659 }, { 2660 .alg = "ccm(aes)", 2661 .test = alg_test_aead, 2662 .fips_allowed = 1, 2663 .suite = { 2664 .aead = { 2665 .enc = { 2666 .vecs = aes_ccm_enc_tv_template, 2667 .count = AES_CCM_ENC_TEST_VECTORS 2668 }, 2669 .dec = { 2670 .vecs = aes_ccm_dec_tv_template, 2671 .count = AES_CCM_DEC_TEST_VECTORS 2672 } 2673 } 2674 } 2675 }, { 2676 .alg = "chacha20", 2677 .test = alg_test_skcipher, 2678 .suite = { 2679 .cipher = { 2680 .enc = { 2681 .vecs = chacha20_enc_tv_template, 2682 .count = CHACHA20_ENC_TEST_VECTORS 2683 }, 2684 .dec = { 2685 .vecs = chacha20_enc_tv_template, 2686 .count = CHACHA20_ENC_TEST_VECTORS 2687 }, 2688 } 2689 } 2690 }, { 2691 .alg = "cmac(aes)", 2692 .fips_allowed = 1, 2693 .test = alg_test_hash, 2694 .suite = { 2695 .hash = { 2696 .vecs = aes_cmac128_tv_template, 2697 .count = CMAC_AES_TEST_VECTORS 2698 } 2699 } 2700 }, { 2701 .alg = "cmac(des3_ede)", 2702 .fips_allowed = 1, 2703 .test = alg_test_hash, 2704 .suite = { 2705 .hash = { 2706 .vecs = des3_ede_cmac64_tv_template, 2707 .count = CMAC_DES3_EDE_TEST_VECTORS 2708 } 2709 } 2710 }, { 2711 .alg = "compress_null", 2712 .test = alg_test_null, 2713 }, { 2714 .alg = "crc32", 2715 .test = alg_test_hash, 2716 .suite = { 2717 .hash = { 2718 .vecs = crc32_tv_template, 2719 .count = CRC32_TEST_VECTORS 2720 } 2721 } 2722 }, { 2723 .alg = "crc32c", 2724 .test = alg_test_crc32c, 2725 .fips_allowed = 1, 2726 .suite = { 2727 .hash = { 2728 .vecs = crc32c_tv_template, 2729 .count = CRC32C_TEST_VECTORS 2730 } 2731 } 2732 }, { 2733 .alg = "crct10dif", 2734 .test = alg_test_hash, 2735 .fips_allowed = 1, 2736 .suite = { 2737 .hash = { 2738 .vecs = crct10dif_tv_template, 2739 .count = CRCT10DIF_TEST_VECTORS 2740 } 2741 } 2742 }, { 2743 .alg = "ctr(aes)", 2744 .test = alg_test_skcipher, 2745 .fips_allowed = 1, 2746 .suite = { 2747 .cipher = { 2748 .enc = { 2749 .vecs = aes_ctr_enc_tv_template, 2750 .count = AES_CTR_ENC_TEST_VECTORS 2751 }, 2752 .dec = { 2753 .vecs = aes_ctr_dec_tv_template, 2754 .count = AES_CTR_DEC_TEST_VECTORS 2755 } 2756 } 2757 } 2758 }, { 2759 .alg = "ctr(blowfish)", 2760 .test = alg_test_skcipher, 2761 .suite = { 2762 .cipher = { 2763 .enc = { 2764 .vecs = bf_ctr_enc_tv_template, 2765 .count = BF_CTR_ENC_TEST_VECTORS 2766 }, 2767 .dec = { 2768 .vecs = bf_ctr_dec_tv_template, 2769 .count = BF_CTR_DEC_TEST_VECTORS 2770 } 2771 } 2772 } 2773 }, { 2774 .alg = "ctr(camellia)", 2775 .test = alg_test_skcipher, 2776 .suite = { 2777 .cipher = { 2778 .enc = { 2779 .vecs = camellia_ctr_enc_tv_template, 2780 .count = CAMELLIA_CTR_ENC_TEST_VECTORS 2781 }, 2782 .dec = { 2783 .vecs = camellia_ctr_dec_tv_template, 2784 .count = CAMELLIA_CTR_DEC_TEST_VECTORS 2785 } 2786 } 2787 } 2788 }, { 2789 .alg = "ctr(cast5)", 2790 .test = alg_test_skcipher, 2791 .suite = { 2792 .cipher = { 2793 .enc = { 2794 .vecs = cast5_ctr_enc_tv_template, 2795 .count = CAST5_CTR_ENC_TEST_VECTORS 2796 }, 2797 .dec = { 2798 .vecs = cast5_ctr_dec_tv_template, 2799 .count = CAST5_CTR_DEC_TEST_VECTORS 2800 } 2801 } 2802 } 2803 }, { 2804 .alg = "ctr(cast6)", 2805 .test = alg_test_skcipher, 2806 .suite = { 2807 .cipher = { 2808 .enc = { 2809 .vecs = cast6_ctr_enc_tv_template, 2810 .count = CAST6_CTR_ENC_TEST_VECTORS 2811 }, 2812 .dec = { 2813 .vecs = cast6_ctr_dec_tv_template, 2814 .count = CAST6_CTR_DEC_TEST_VECTORS 2815 } 2816 } 2817 } 2818 }, { 2819 .alg = "ctr(des)", 2820 .test = alg_test_skcipher, 2821 .suite = { 2822 .cipher = { 2823 .enc = { 2824 .vecs = des_ctr_enc_tv_template, 2825 .count = DES_CTR_ENC_TEST_VECTORS 2826 }, 2827 .dec = { 2828 .vecs = des_ctr_dec_tv_template, 2829 .count = DES_CTR_DEC_TEST_VECTORS 2830 } 2831 } 2832 } 2833 }, { 2834 .alg = "ctr(des3_ede)", 2835 .test = alg_test_skcipher, 2836 .suite = { 2837 .cipher = { 2838 .enc = { 2839 .vecs = des3_ede_ctr_enc_tv_template, 2840 .count = DES3_EDE_CTR_ENC_TEST_VECTORS 2841 }, 2842 .dec = { 2843 .vecs = des3_ede_ctr_dec_tv_template, 2844 .count = DES3_EDE_CTR_DEC_TEST_VECTORS 2845 } 2846 } 2847 } 2848 }, { 2849 .alg = "ctr(serpent)", 2850 .test = alg_test_skcipher, 2851 .suite = { 2852 .cipher = { 2853 .enc = { 2854 .vecs = serpent_ctr_enc_tv_template, 2855 .count = SERPENT_CTR_ENC_TEST_VECTORS 2856 }, 2857 .dec = { 2858 .vecs = serpent_ctr_dec_tv_template, 2859 .count = SERPENT_CTR_DEC_TEST_VECTORS 2860 } 2861 } 2862 } 2863 }, { 2864 .alg = "ctr(twofish)", 2865 .test = alg_test_skcipher, 2866 .suite = { 2867 .cipher = { 2868 .enc = { 2869 .vecs = tf_ctr_enc_tv_template, 2870 .count = TF_CTR_ENC_TEST_VECTORS 2871 }, 2872 .dec = { 2873 .vecs = tf_ctr_dec_tv_template, 2874 .count = TF_CTR_DEC_TEST_VECTORS 2875 } 2876 } 2877 } 2878 }, { 2879 .alg = "cts(cbc(aes))", 2880 .test = alg_test_skcipher, 2881 .suite = { 2882 .cipher = { 2883 .enc = { 2884 .vecs = cts_mode_enc_tv_template, 2885 .count = CTS_MODE_ENC_TEST_VECTORS 2886 }, 2887 .dec = { 2888 .vecs = cts_mode_dec_tv_template, 2889 .count = CTS_MODE_DEC_TEST_VECTORS 2890 } 2891 } 2892 } 2893 }, { 2894 .alg = "deflate", 2895 .test = alg_test_comp, 2896 .fips_allowed = 1, 2897 .suite = { 2898 .comp = { 2899 .comp = { 2900 .vecs = deflate_comp_tv_template, 2901 .count = DEFLATE_COMP_TEST_VECTORS 2902 }, 2903 .decomp = { 2904 .vecs = deflate_decomp_tv_template, 2905 .count = DEFLATE_DECOMP_TEST_VECTORS 2906 } 2907 } 2908 } 2909 }, { 2910 .alg = "dh", 2911 .test = alg_test_kpp, 2912 .fips_allowed = 1, 2913 .suite = { 2914 .kpp = { 2915 .vecs = dh_tv_template, 2916 .count = DH_TEST_VECTORS 2917 } 2918 } 2919 }, { 2920 .alg = "digest_null", 2921 .test = alg_test_null, 2922 }, { 2923 .alg = "drbg_nopr_ctr_aes128", 2924 .test = alg_test_drbg, 2925 .fips_allowed = 1, 2926 .suite = { 2927 .drbg = { 2928 .vecs = drbg_nopr_ctr_aes128_tv_template, 2929 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template) 2930 } 2931 } 2932 }, { 2933 .alg = "drbg_nopr_ctr_aes192", 2934 .test = alg_test_drbg, 2935 .fips_allowed = 1, 2936 .suite = { 2937 .drbg = { 2938 .vecs = drbg_nopr_ctr_aes192_tv_template, 2939 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template) 2940 } 2941 } 2942 }, { 2943 .alg = "drbg_nopr_ctr_aes256", 2944 .test = alg_test_drbg, 2945 .fips_allowed = 1, 2946 .suite = { 2947 .drbg = { 2948 .vecs = drbg_nopr_ctr_aes256_tv_template, 2949 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template) 2950 } 2951 } 2952 }, { 2953 /* 2954 * There is no need to specifically test the DRBG with every 2955 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2956 */ 2957 .alg = "drbg_nopr_hmac_sha1", 2958 .fips_allowed = 1, 2959 .test = alg_test_null, 2960 }, { 2961 .alg = "drbg_nopr_hmac_sha256", 2962 .test = alg_test_drbg, 2963 .fips_allowed = 1, 2964 .suite = { 2965 .drbg = { 2966 .vecs = drbg_nopr_hmac_sha256_tv_template, 2967 .count = 2968 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template) 2969 } 2970 } 2971 }, { 2972 /* covered by drbg_nopr_hmac_sha256 test */ 2973 .alg = "drbg_nopr_hmac_sha384", 2974 .fips_allowed = 1, 2975 .test = alg_test_null, 2976 }, { 2977 .alg = "drbg_nopr_hmac_sha512", 2978 .test = alg_test_null, 2979 .fips_allowed = 1, 2980 }, { 2981 .alg = "drbg_nopr_sha1", 2982 .fips_allowed = 1, 2983 .test = alg_test_null, 2984 }, { 2985 .alg = "drbg_nopr_sha256", 2986 .test = alg_test_drbg, 2987 .fips_allowed = 1, 2988 .suite = { 2989 .drbg = { 2990 .vecs = drbg_nopr_sha256_tv_template, 2991 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template) 2992 } 2993 } 2994 }, { 2995 /* covered by drbg_nopr_sha256 test */ 2996 .alg = "drbg_nopr_sha384", 2997 .fips_allowed = 1, 2998 .test = alg_test_null, 2999 }, { 3000 .alg = "drbg_nopr_sha512", 3001 .fips_allowed = 1, 3002 .test = alg_test_null, 3003 }, { 3004 .alg = "drbg_pr_ctr_aes128", 3005 .test = alg_test_drbg, 3006 .fips_allowed = 1, 3007 .suite = { 3008 .drbg = { 3009 .vecs = drbg_pr_ctr_aes128_tv_template, 3010 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template) 3011 } 3012 } 3013 }, { 3014 /* covered by drbg_pr_ctr_aes128 test */ 3015 .alg = "drbg_pr_ctr_aes192", 3016 .fips_allowed = 1, 3017 .test = alg_test_null, 3018 }, { 3019 .alg = "drbg_pr_ctr_aes256", 3020 .fips_allowed = 1, 3021 .test = alg_test_null, 3022 }, { 3023 .alg = "drbg_pr_hmac_sha1", 3024 .fips_allowed = 1, 3025 .test = alg_test_null, 3026 }, { 3027 .alg = "drbg_pr_hmac_sha256", 3028 .test = alg_test_drbg, 3029 .fips_allowed = 1, 3030 .suite = { 3031 .drbg = { 3032 .vecs = drbg_pr_hmac_sha256_tv_template, 3033 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template) 3034 } 3035 } 3036 }, { 3037 /* covered by drbg_pr_hmac_sha256 test */ 3038 .alg = "drbg_pr_hmac_sha384", 3039 .fips_allowed = 1, 3040 .test = alg_test_null, 3041 }, { 3042 .alg = "drbg_pr_hmac_sha512", 3043 .test = alg_test_null, 3044 .fips_allowed = 1, 3045 }, { 3046 .alg = "drbg_pr_sha1", 3047 .fips_allowed = 1, 3048 .test = alg_test_null, 3049 }, { 3050 .alg = "drbg_pr_sha256", 3051 .test = alg_test_drbg, 3052 .fips_allowed = 1, 3053 .suite = { 3054 .drbg = { 3055 .vecs = drbg_pr_sha256_tv_template, 3056 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template) 3057 } 3058 } 3059 }, { 3060 /* covered by drbg_pr_sha256 test */ 3061 .alg = "drbg_pr_sha384", 3062 .fips_allowed = 1, 3063 .test = alg_test_null, 3064 }, { 3065 .alg = "drbg_pr_sha512", 3066 .fips_allowed = 1, 3067 .test = alg_test_null, 3068 }, { 3069 .alg = "ecb(aes)", 3070 .test = alg_test_skcipher, 3071 .fips_allowed = 1, 3072 .suite = { 3073 .cipher = { 3074 .enc = { 3075 .vecs = aes_enc_tv_template, 3076 .count = AES_ENC_TEST_VECTORS 3077 }, 3078 .dec = { 3079 .vecs = aes_dec_tv_template, 3080 .count = AES_DEC_TEST_VECTORS 3081 } 3082 } 3083 } 3084 }, { 3085 .alg = "ecb(anubis)", 3086 .test = alg_test_skcipher, 3087 .suite = { 3088 .cipher = { 3089 .enc = { 3090 .vecs = anubis_enc_tv_template, 3091 .count = ANUBIS_ENC_TEST_VECTORS 3092 }, 3093 .dec = { 3094 .vecs = anubis_dec_tv_template, 3095 .count = ANUBIS_DEC_TEST_VECTORS 3096 } 3097 } 3098 } 3099 }, { 3100 .alg = "ecb(arc4)", 3101 .test = alg_test_skcipher, 3102 .suite = { 3103 .cipher = { 3104 .enc = { 3105 .vecs = arc4_enc_tv_template, 3106 .count = ARC4_ENC_TEST_VECTORS 3107 }, 3108 .dec = { 3109 .vecs = arc4_dec_tv_template, 3110 .count = ARC4_DEC_TEST_VECTORS 3111 } 3112 } 3113 } 3114 }, { 3115 .alg = "ecb(blowfish)", 3116 .test = alg_test_skcipher, 3117 .suite = { 3118 .cipher = { 3119 .enc = { 3120 .vecs = bf_enc_tv_template, 3121 .count = BF_ENC_TEST_VECTORS 3122 }, 3123 .dec = { 3124 .vecs = bf_dec_tv_template, 3125 .count = BF_DEC_TEST_VECTORS 3126 } 3127 } 3128 } 3129 }, { 3130 .alg = "ecb(camellia)", 3131 .test = alg_test_skcipher, 3132 .suite = { 3133 .cipher = { 3134 .enc = { 3135 .vecs = camellia_enc_tv_template, 3136 .count = CAMELLIA_ENC_TEST_VECTORS 3137 }, 3138 .dec = { 3139 .vecs = camellia_dec_tv_template, 3140 .count = CAMELLIA_DEC_TEST_VECTORS 3141 } 3142 } 3143 } 3144 }, { 3145 .alg = "ecb(cast5)", 3146 .test = alg_test_skcipher, 3147 .suite = { 3148 .cipher = { 3149 .enc = { 3150 .vecs = cast5_enc_tv_template, 3151 .count = CAST5_ENC_TEST_VECTORS 3152 }, 3153 .dec = { 3154 .vecs = cast5_dec_tv_template, 3155 .count = CAST5_DEC_TEST_VECTORS 3156 } 3157 } 3158 } 3159 }, { 3160 .alg = "ecb(cast6)", 3161 .test = alg_test_skcipher, 3162 .suite = { 3163 .cipher = { 3164 .enc = { 3165 .vecs = cast6_enc_tv_template, 3166 .count = CAST6_ENC_TEST_VECTORS 3167 }, 3168 .dec = { 3169 .vecs = cast6_dec_tv_template, 3170 .count = CAST6_DEC_TEST_VECTORS 3171 } 3172 } 3173 } 3174 }, { 3175 .alg = "ecb(cipher_null)", 3176 .test = alg_test_null, 3177 }, { 3178 .alg = "ecb(des)", 3179 .test = alg_test_skcipher, 3180 .suite = { 3181 .cipher = { 3182 .enc = { 3183 .vecs = des_enc_tv_template, 3184 .count = DES_ENC_TEST_VECTORS 3185 }, 3186 .dec = { 3187 .vecs = des_dec_tv_template, 3188 .count = DES_DEC_TEST_VECTORS 3189 } 3190 } 3191 } 3192 }, { 3193 .alg = "ecb(des3_ede)", 3194 .test = alg_test_skcipher, 3195 .fips_allowed = 1, 3196 .suite = { 3197 .cipher = { 3198 .enc = { 3199 .vecs = des3_ede_enc_tv_template, 3200 .count = DES3_EDE_ENC_TEST_VECTORS 3201 }, 3202 .dec = { 3203 .vecs = des3_ede_dec_tv_template, 3204 .count = DES3_EDE_DEC_TEST_VECTORS 3205 } 3206 } 3207 } 3208 }, { 3209 .alg = "ecb(fcrypt)", 3210 .test = alg_test_skcipher, 3211 .suite = { 3212 .cipher = { 3213 .enc = { 3214 .vecs = fcrypt_pcbc_enc_tv_template, 3215 .count = 1 3216 }, 3217 .dec = { 3218 .vecs = fcrypt_pcbc_dec_tv_template, 3219 .count = 1 3220 } 3221 } 3222 } 3223 }, { 3224 .alg = "ecb(khazad)", 3225 .test = alg_test_skcipher, 3226 .suite = { 3227 .cipher = { 3228 .enc = { 3229 .vecs = khazad_enc_tv_template, 3230 .count = KHAZAD_ENC_TEST_VECTORS 3231 }, 3232 .dec = { 3233 .vecs = khazad_dec_tv_template, 3234 .count = KHAZAD_DEC_TEST_VECTORS 3235 } 3236 } 3237 } 3238 }, { 3239 .alg = "ecb(seed)", 3240 .test = alg_test_skcipher, 3241 .suite = { 3242 .cipher = { 3243 .enc = { 3244 .vecs = seed_enc_tv_template, 3245 .count = SEED_ENC_TEST_VECTORS 3246 }, 3247 .dec = { 3248 .vecs = seed_dec_tv_template, 3249 .count = SEED_DEC_TEST_VECTORS 3250 } 3251 } 3252 } 3253 }, { 3254 .alg = "ecb(serpent)", 3255 .test = alg_test_skcipher, 3256 .suite = { 3257 .cipher = { 3258 .enc = { 3259 .vecs = serpent_enc_tv_template, 3260 .count = SERPENT_ENC_TEST_VECTORS 3261 }, 3262 .dec = { 3263 .vecs = serpent_dec_tv_template, 3264 .count = SERPENT_DEC_TEST_VECTORS 3265 } 3266 } 3267 } 3268 }, { 3269 .alg = "ecb(tea)", 3270 .test = alg_test_skcipher, 3271 .suite = { 3272 .cipher = { 3273 .enc = { 3274 .vecs = tea_enc_tv_template, 3275 .count = TEA_ENC_TEST_VECTORS 3276 }, 3277 .dec = { 3278 .vecs = tea_dec_tv_template, 3279 .count = TEA_DEC_TEST_VECTORS 3280 } 3281 } 3282 } 3283 }, { 3284 .alg = "ecb(tnepres)", 3285 .test = alg_test_skcipher, 3286 .suite = { 3287 .cipher = { 3288 .enc = { 3289 .vecs = tnepres_enc_tv_template, 3290 .count = TNEPRES_ENC_TEST_VECTORS 3291 }, 3292 .dec = { 3293 .vecs = tnepres_dec_tv_template, 3294 .count = TNEPRES_DEC_TEST_VECTORS 3295 } 3296 } 3297 } 3298 }, { 3299 .alg = "ecb(twofish)", 3300 .test = alg_test_skcipher, 3301 .suite = { 3302 .cipher = { 3303 .enc = { 3304 .vecs = tf_enc_tv_template, 3305 .count = TF_ENC_TEST_VECTORS 3306 }, 3307 .dec = { 3308 .vecs = tf_dec_tv_template, 3309 .count = TF_DEC_TEST_VECTORS 3310 } 3311 } 3312 } 3313 }, { 3314 .alg = "ecb(xeta)", 3315 .test = alg_test_skcipher, 3316 .suite = { 3317 .cipher = { 3318 .enc = { 3319 .vecs = xeta_enc_tv_template, 3320 .count = XETA_ENC_TEST_VECTORS 3321 }, 3322 .dec = { 3323 .vecs = xeta_dec_tv_template, 3324 .count = XETA_DEC_TEST_VECTORS 3325 } 3326 } 3327 } 3328 }, { 3329 .alg = "ecb(xtea)", 3330 .test = alg_test_skcipher, 3331 .suite = { 3332 .cipher = { 3333 .enc = { 3334 .vecs = xtea_enc_tv_template, 3335 .count = XTEA_ENC_TEST_VECTORS 3336 }, 3337 .dec = { 3338 .vecs = xtea_dec_tv_template, 3339 .count = XTEA_DEC_TEST_VECTORS 3340 } 3341 } 3342 } 3343 }, { 3344 .alg = "ecdh", 3345 .test = alg_test_kpp, 3346 .fips_allowed = 1, 3347 .suite = { 3348 .kpp = { 3349 .vecs = ecdh_tv_template, 3350 .count = ECDH_TEST_VECTORS 3351 } 3352 } 3353 }, { 3354 .alg = "gcm(aes)", 3355 .test = alg_test_aead, 3356 .fips_allowed = 1, 3357 .suite = { 3358 .aead = { 3359 .enc = { 3360 .vecs = aes_gcm_enc_tv_template, 3361 .count = AES_GCM_ENC_TEST_VECTORS 3362 }, 3363 .dec = { 3364 .vecs = aes_gcm_dec_tv_template, 3365 .count = AES_GCM_DEC_TEST_VECTORS 3366 } 3367 } 3368 } 3369 }, { 3370 .alg = "ghash", 3371 .test = alg_test_hash, 3372 .fips_allowed = 1, 3373 .suite = { 3374 .hash = { 3375 .vecs = ghash_tv_template, 3376 .count = GHASH_TEST_VECTORS 3377 } 3378 } 3379 }, { 3380 .alg = "hmac(crc32)", 3381 .test = alg_test_hash, 3382 .suite = { 3383 .hash = { 3384 .vecs = bfin_crc_tv_template, 3385 .count = BFIN_CRC_TEST_VECTORS 3386 } 3387 } 3388 }, { 3389 .alg = "hmac(md5)", 3390 .test = alg_test_hash, 3391 .suite = { 3392 .hash = { 3393 .vecs = hmac_md5_tv_template, 3394 .count = HMAC_MD5_TEST_VECTORS 3395 } 3396 } 3397 }, { 3398 .alg = "hmac(rmd128)", 3399 .test = alg_test_hash, 3400 .suite = { 3401 .hash = { 3402 .vecs = hmac_rmd128_tv_template, 3403 .count = HMAC_RMD128_TEST_VECTORS 3404 } 3405 } 3406 }, { 3407 .alg = "hmac(rmd160)", 3408 .test = alg_test_hash, 3409 .suite = { 3410 .hash = { 3411 .vecs = hmac_rmd160_tv_template, 3412 .count = HMAC_RMD160_TEST_VECTORS 3413 } 3414 } 3415 }, { 3416 .alg = "hmac(sha1)", 3417 .test = alg_test_hash, 3418 .fips_allowed = 1, 3419 .suite = { 3420 .hash = { 3421 .vecs = hmac_sha1_tv_template, 3422 .count = HMAC_SHA1_TEST_VECTORS 3423 } 3424 } 3425 }, { 3426 .alg = "hmac(sha224)", 3427 .test = alg_test_hash, 3428 .fips_allowed = 1, 3429 .suite = { 3430 .hash = { 3431 .vecs = hmac_sha224_tv_template, 3432 .count = HMAC_SHA224_TEST_VECTORS 3433 } 3434 } 3435 }, { 3436 .alg = "hmac(sha256)", 3437 .test = alg_test_hash, 3438 .fips_allowed = 1, 3439 .suite = { 3440 .hash = { 3441 .vecs = hmac_sha256_tv_template, 3442 .count = HMAC_SHA256_TEST_VECTORS 3443 } 3444 } 3445 }, { 3446 .alg = "hmac(sha3-224)", 3447 .test = alg_test_hash, 3448 .fips_allowed = 1, 3449 .suite = { 3450 .hash = { 3451 .vecs = hmac_sha3_224_tv_template, 3452 .count = HMAC_SHA3_224_TEST_VECTORS 3453 } 3454 } 3455 }, { 3456 .alg = "hmac(sha3-256)", 3457 .test = alg_test_hash, 3458 .fips_allowed = 1, 3459 .suite = { 3460 .hash = { 3461 .vecs = hmac_sha3_256_tv_template, 3462 .count = HMAC_SHA3_256_TEST_VECTORS 3463 } 3464 } 3465 }, { 3466 .alg = "hmac(sha3-384)", 3467 .test = alg_test_hash, 3468 .fips_allowed = 1, 3469 .suite = { 3470 .hash = { 3471 .vecs = hmac_sha3_384_tv_template, 3472 .count = HMAC_SHA3_384_TEST_VECTORS 3473 } 3474 } 3475 }, { 3476 .alg = "hmac(sha3-512)", 3477 .test = alg_test_hash, 3478 .fips_allowed = 1, 3479 .suite = { 3480 .hash = { 3481 .vecs = hmac_sha3_512_tv_template, 3482 .count = HMAC_SHA3_512_TEST_VECTORS 3483 } 3484 } 3485 }, { 3486 .alg = "hmac(sha384)", 3487 .test = alg_test_hash, 3488 .fips_allowed = 1, 3489 .suite = { 3490 .hash = { 3491 .vecs = hmac_sha384_tv_template, 3492 .count = HMAC_SHA384_TEST_VECTORS 3493 } 3494 } 3495 }, { 3496 .alg = "hmac(sha512)", 3497 .test = alg_test_hash, 3498 .fips_allowed = 1, 3499 .suite = { 3500 .hash = { 3501 .vecs = hmac_sha512_tv_template, 3502 .count = HMAC_SHA512_TEST_VECTORS 3503 } 3504 } 3505 }, { 3506 .alg = "jitterentropy_rng", 3507 .fips_allowed = 1, 3508 .test = alg_test_null, 3509 }, { 3510 .alg = "kw(aes)", 3511 .test = alg_test_skcipher, 3512 .fips_allowed = 1, 3513 .suite = { 3514 .cipher = { 3515 .enc = { 3516 .vecs = aes_kw_enc_tv_template, 3517 .count = ARRAY_SIZE(aes_kw_enc_tv_template) 3518 }, 3519 .dec = { 3520 .vecs = aes_kw_dec_tv_template, 3521 .count = ARRAY_SIZE(aes_kw_dec_tv_template) 3522 } 3523 } 3524 } 3525 }, { 3526 .alg = "lrw(aes)", 3527 .test = alg_test_skcipher, 3528 .suite = { 3529 .cipher = { 3530 .enc = { 3531 .vecs = aes_lrw_enc_tv_template, 3532 .count = AES_LRW_ENC_TEST_VECTORS 3533 }, 3534 .dec = { 3535 .vecs = aes_lrw_dec_tv_template, 3536 .count = AES_LRW_DEC_TEST_VECTORS 3537 } 3538 } 3539 } 3540 }, { 3541 .alg = "lrw(camellia)", 3542 .test = alg_test_skcipher, 3543 .suite = { 3544 .cipher = { 3545 .enc = { 3546 .vecs = camellia_lrw_enc_tv_template, 3547 .count = CAMELLIA_LRW_ENC_TEST_VECTORS 3548 }, 3549 .dec = { 3550 .vecs = camellia_lrw_dec_tv_template, 3551 .count = CAMELLIA_LRW_DEC_TEST_VECTORS 3552 } 3553 } 3554 } 3555 }, { 3556 .alg = "lrw(cast6)", 3557 .test = alg_test_skcipher, 3558 .suite = { 3559 .cipher = { 3560 .enc = { 3561 .vecs = cast6_lrw_enc_tv_template, 3562 .count = CAST6_LRW_ENC_TEST_VECTORS 3563 }, 3564 .dec = { 3565 .vecs = cast6_lrw_dec_tv_template, 3566 .count = CAST6_LRW_DEC_TEST_VECTORS 3567 } 3568 } 3569 } 3570 }, { 3571 .alg = "lrw(serpent)", 3572 .test = alg_test_skcipher, 3573 .suite = { 3574 .cipher = { 3575 .enc = { 3576 .vecs = serpent_lrw_enc_tv_template, 3577 .count = SERPENT_LRW_ENC_TEST_VECTORS 3578 }, 3579 .dec = { 3580 .vecs = serpent_lrw_dec_tv_template, 3581 .count = SERPENT_LRW_DEC_TEST_VECTORS 3582 } 3583 } 3584 } 3585 }, { 3586 .alg = "lrw(twofish)", 3587 .test = alg_test_skcipher, 3588 .suite = { 3589 .cipher = { 3590 .enc = { 3591 .vecs = tf_lrw_enc_tv_template, 3592 .count = TF_LRW_ENC_TEST_VECTORS 3593 }, 3594 .dec = { 3595 .vecs = tf_lrw_dec_tv_template, 3596 .count = TF_LRW_DEC_TEST_VECTORS 3597 } 3598 } 3599 } 3600 }, { 3601 .alg = "lz4", 3602 .test = alg_test_comp, 3603 .fips_allowed = 1, 3604 .suite = { 3605 .comp = { 3606 .comp = { 3607 .vecs = lz4_comp_tv_template, 3608 .count = LZ4_COMP_TEST_VECTORS 3609 }, 3610 .decomp = { 3611 .vecs = lz4_decomp_tv_template, 3612 .count = LZ4_DECOMP_TEST_VECTORS 3613 } 3614 } 3615 } 3616 }, { 3617 .alg = "lz4hc", 3618 .test = alg_test_comp, 3619 .fips_allowed = 1, 3620 .suite = { 3621 .comp = { 3622 .comp = { 3623 .vecs = lz4hc_comp_tv_template, 3624 .count = LZ4HC_COMP_TEST_VECTORS 3625 }, 3626 .decomp = { 3627 .vecs = lz4hc_decomp_tv_template, 3628 .count = LZ4HC_DECOMP_TEST_VECTORS 3629 } 3630 } 3631 } 3632 }, { 3633 .alg = "lzo", 3634 .test = alg_test_comp, 3635 .fips_allowed = 1, 3636 .suite = { 3637 .comp = { 3638 .comp = { 3639 .vecs = lzo_comp_tv_template, 3640 .count = LZO_COMP_TEST_VECTORS 3641 }, 3642 .decomp = { 3643 .vecs = lzo_decomp_tv_template, 3644 .count = LZO_DECOMP_TEST_VECTORS 3645 } 3646 } 3647 } 3648 }, { 3649 .alg = "md4", 3650 .test = alg_test_hash, 3651 .suite = { 3652 .hash = { 3653 .vecs = md4_tv_template, 3654 .count = MD4_TEST_VECTORS 3655 } 3656 } 3657 }, { 3658 .alg = "md5", 3659 .test = alg_test_hash, 3660 .suite = { 3661 .hash = { 3662 .vecs = md5_tv_template, 3663 .count = MD5_TEST_VECTORS 3664 } 3665 } 3666 }, { 3667 .alg = "michael_mic", 3668 .test = alg_test_hash, 3669 .suite = { 3670 .hash = { 3671 .vecs = michael_mic_tv_template, 3672 .count = MICHAEL_MIC_TEST_VECTORS 3673 } 3674 } 3675 }, { 3676 .alg = "ofb(aes)", 3677 .test = alg_test_skcipher, 3678 .fips_allowed = 1, 3679 .suite = { 3680 .cipher = { 3681 .enc = { 3682 .vecs = aes_ofb_enc_tv_template, 3683 .count = AES_OFB_ENC_TEST_VECTORS 3684 }, 3685 .dec = { 3686 .vecs = aes_ofb_dec_tv_template, 3687 .count = AES_OFB_DEC_TEST_VECTORS 3688 } 3689 } 3690 } 3691 }, { 3692 .alg = "pcbc(fcrypt)", 3693 .test = alg_test_skcipher, 3694 .suite = { 3695 .cipher = { 3696 .enc = { 3697 .vecs = fcrypt_pcbc_enc_tv_template, 3698 .count = FCRYPT_ENC_TEST_VECTORS 3699 }, 3700 .dec = { 3701 .vecs = fcrypt_pcbc_dec_tv_template, 3702 .count = FCRYPT_DEC_TEST_VECTORS 3703 } 3704 } 3705 } 3706 }, { 3707 .alg = "poly1305", 3708 .test = alg_test_hash, 3709 .suite = { 3710 .hash = { 3711 .vecs = poly1305_tv_template, 3712 .count = POLY1305_TEST_VECTORS 3713 } 3714 } 3715 }, { 3716 .alg = "rfc3686(ctr(aes))", 3717 .test = alg_test_skcipher, 3718 .fips_allowed = 1, 3719 .suite = { 3720 .cipher = { 3721 .enc = { 3722 .vecs = aes_ctr_rfc3686_enc_tv_template, 3723 .count = AES_CTR_3686_ENC_TEST_VECTORS 3724 }, 3725 .dec = { 3726 .vecs = aes_ctr_rfc3686_dec_tv_template, 3727 .count = AES_CTR_3686_DEC_TEST_VECTORS 3728 } 3729 } 3730 } 3731 }, { 3732 .alg = "rfc4106(gcm(aes))", 3733 .test = alg_test_aead, 3734 .fips_allowed = 1, 3735 .suite = { 3736 .aead = { 3737 .enc = { 3738 .vecs = aes_gcm_rfc4106_enc_tv_template, 3739 .count = AES_GCM_4106_ENC_TEST_VECTORS 3740 }, 3741 .dec = { 3742 .vecs = aes_gcm_rfc4106_dec_tv_template, 3743 .count = AES_GCM_4106_DEC_TEST_VECTORS 3744 } 3745 } 3746 } 3747 }, { 3748 .alg = "rfc4309(ccm(aes))", 3749 .test = alg_test_aead, 3750 .fips_allowed = 1, 3751 .suite = { 3752 .aead = { 3753 .enc = { 3754 .vecs = aes_ccm_rfc4309_enc_tv_template, 3755 .count = AES_CCM_4309_ENC_TEST_VECTORS 3756 }, 3757 .dec = { 3758 .vecs = aes_ccm_rfc4309_dec_tv_template, 3759 .count = AES_CCM_4309_DEC_TEST_VECTORS 3760 } 3761 } 3762 } 3763 }, { 3764 .alg = "rfc4543(gcm(aes))", 3765 .test = alg_test_aead, 3766 .suite = { 3767 .aead = { 3768 .enc = { 3769 .vecs = aes_gcm_rfc4543_enc_tv_template, 3770 .count = AES_GCM_4543_ENC_TEST_VECTORS 3771 }, 3772 .dec = { 3773 .vecs = aes_gcm_rfc4543_dec_tv_template, 3774 .count = AES_GCM_4543_DEC_TEST_VECTORS 3775 }, 3776 } 3777 } 3778 }, { 3779 .alg = "rfc7539(chacha20,poly1305)", 3780 .test = alg_test_aead, 3781 .suite = { 3782 .aead = { 3783 .enc = { 3784 .vecs = rfc7539_enc_tv_template, 3785 .count = RFC7539_ENC_TEST_VECTORS 3786 }, 3787 .dec = { 3788 .vecs = rfc7539_dec_tv_template, 3789 .count = RFC7539_DEC_TEST_VECTORS 3790 }, 3791 } 3792 } 3793 }, { 3794 .alg = "rfc7539esp(chacha20,poly1305)", 3795 .test = alg_test_aead, 3796 .suite = { 3797 .aead = { 3798 .enc = { 3799 .vecs = rfc7539esp_enc_tv_template, 3800 .count = RFC7539ESP_ENC_TEST_VECTORS 3801 }, 3802 .dec = { 3803 .vecs = rfc7539esp_dec_tv_template, 3804 .count = RFC7539ESP_DEC_TEST_VECTORS 3805 }, 3806 } 3807 } 3808 }, { 3809 .alg = "rmd128", 3810 .test = alg_test_hash, 3811 .suite = { 3812 .hash = { 3813 .vecs = rmd128_tv_template, 3814 .count = RMD128_TEST_VECTORS 3815 } 3816 } 3817 }, { 3818 .alg = "rmd160", 3819 .test = alg_test_hash, 3820 .suite = { 3821 .hash = { 3822 .vecs = rmd160_tv_template, 3823 .count = RMD160_TEST_VECTORS 3824 } 3825 } 3826 }, { 3827 .alg = "rmd256", 3828 .test = alg_test_hash, 3829 .suite = { 3830 .hash = { 3831 .vecs = rmd256_tv_template, 3832 .count = RMD256_TEST_VECTORS 3833 } 3834 } 3835 }, { 3836 .alg = "rmd320", 3837 .test = alg_test_hash, 3838 .suite = { 3839 .hash = { 3840 .vecs = rmd320_tv_template, 3841 .count = RMD320_TEST_VECTORS 3842 } 3843 } 3844 }, { 3845 .alg = "rsa", 3846 .test = alg_test_akcipher, 3847 .fips_allowed = 1, 3848 .suite = { 3849 .akcipher = { 3850 .vecs = rsa_tv_template, 3851 .count = RSA_TEST_VECTORS 3852 } 3853 } 3854 }, { 3855 .alg = "salsa20", 3856 .test = alg_test_skcipher, 3857 .suite = { 3858 .cipher = { 3859 .enc = { 3860 .vecs = salsa20_stream_enc_tv_template, 3861 .count = SALSA20_STREAM_ENC_TEST_VECTORS 3862 } 3863 } 3864 } 3865 }, { 3866 .alg = "sha1", 3867 .test = alg_test_hash, 3868 .fips_allowed = 1, 3869 .suite = { 3870 .hash = { 3871 .vecs = sha1_tv_template, 3872 .count = SHA1_TEST_VECTORS 3873 } 3874 } 3875 }, { 3876 .alg = "sha224", 3877 .test = alg_test_hash, 3878 .fips_allowed = 1, 3879 .suite = { 3880 .hash = { 3881 .vecs = sha224_tv_template, 3882 .count = SHA224_TEST_VECTORS 3883 } 3884 } 3885 }, { 3886 .alg = "sha256", 3887 .test = alg_test_hash, 3888 .fips_allowed = 1, 3889 .suite = { 3890 .hash = { 3891 .vecs = sha256_tv_template, 3892 .count = SHA256_TEST_VECTORS 3893 } 3894 } 3895 }, { 3896 .alg = "sha3-224", 3897 .test = alg_test_hash, 3898 .fips_allowed = 1, 3899 .suite = { 3900 .hash = { 3901 .vecs = sha3_224_tv_template, 3902 .count = SHA3_224_TEST_VECTORS 3903 } 3904 } 3905 }, { 3906 .alg = "sha3-256", 3907 .test = alg_test_hash, 3908 .fips_allowed = 1, 3909 .suite = { 3910 .hash = { 3911 .vecs = sha3_256_tv_template, 3912 .count = SHA3_256_TEST_VECTORS 3913 } 3914 } 3915 }, { 3916 .alg = "sha3-384", 3917 .test = alg_test_hash, 3918 .fips_allowed = 1, 3919 .suite = { 3920 .hash = { 3921 .vecs = sha3_384_tv_template, 3922 .count = SHA3_384_TEST_VECTORS 3923 } 3924 } 3925 }, { 3926 .alg = "sha3-512", 3927 .test = alg_test_hash, 3928 .fips_allowed = 1, 3929 .suite = { 3930 .hash = { 3931 .vecs = sha3_512_tv_template, 3932 .count = SHA3_512_TEST_VECTORS 3933 } 3934 } 3935 }, { 3936 .alg = "sha384", 3937 .test = alg_test_hash, 3938 .fips_allowed = 1, 3939 .suite = { 3940 .hash = { 3941 .vecs = sha384_tv_template, 3942 .count = SHA384_TEST_VECTORS 3943 } 3944 } 3945 }, { 3946 .alg = "sha512", 3947 .test = alg_test_hash, 3948 .fips_allowed = 1, 3949 .suite = { 3950 .hash = { 3951 .vecs = sha512_tv_template, 3952 .count = SHA512_TEST_VECTORS 3953 } 3954 } 3955 }, { 3956 .alg = "tgr128", 3957 .test = alg_test_hash, 3958 .suite = { 3959 .hash = { 3960 .vecs = tgr128_tv_template, 3961 .count = TGR128_TEST_VECTORS 3962 } 3963 } 3964 }, { 3965 .alg = "tgr160", 3966 .test = alg_test_hash, 3967 .suite = { 3968 .hash = { 3969 .vecs = tgr160_tv_template, 3970 .count = TGR160_TEST_VECTORS 3971 } 3972 } 3973 }, { 3974 .alg = "tgr192", 3975 .test = alg_test_hash, 3976 .suite = { 3977 .hash = { 3978 .vecs = tgr192_tv_template, 3979 .count = TGR192_TEST_VECTORS 3980 } 3981 } 3982 }, { 3983 .alg = "vmac(aes)", 3984 .test = alg_test_hash, 3985 .suite = { 3986 .hash = { 3987 .vecs = aes_vmac128_tv_template, 3988 .count = VMAC_AES_TEST_VECTORS 3989 } 3990 } 3991 }, { 3992 .alg = "wp256", 3993 .test = alg_test_hash, 3994 .suite = { 3995 .hash = { 3996 .vecs = wp256_tv_template, 3997 .count = WP256_TEST_VECTORS 3998 } 3999 } 4000 }, { 4001 .alg = "wp384", 4002 .test = alg_test_hash, 4003 .suite = { 4004 .hash = { 4005 .vecs = wp384_tv_template, 4006 .count = WP384_TEST_VECTORS 4007 } 4008 } 4009 }, { 4010 .alg = "wp512", 4011 .test = alg_test_hash, 4012 .suite = { 4013 .hash = { 4014 .vecs = wp512_tv_template, 4015 .count = WP512_TEST_VECTORS 4016 } 4017 } 4018 }, { 4019 .alg = "xcbc(aes)", 4020 .test = alg_test_hash, 4021 .suite = { 4022 .hash = { 4023 .vecs = aes_xcbc128_tv_template, 4024 .count = XCBC_AES_TEST_VECTORS 4025 } 4026 } 4027 }, { 4028 .alg = "xts(aes)", 4029 .test = alg_test_skcipher, 4030 .fips_allowed = 1, 4031 .suite = { 4032 .cipher = { 4033 .enc = { 4034 .vecs = aes_xts_enc_tv_template, 4035 .count = AES_XTS_ENC_TEST_VECTORS 4036 }, 4037 .dec = { 4038 .vecs = aes_xts_dec_tv_template, 4039 .count = AES_XTS_DEC_TEST_VECTORS 4040 } 4041 } 4042 } 4043 }, { 4044 .alg = "xts(camellia)", 4045 .test = alg_test_skcipher, 4046 .suite = { 4047 .cipher = { 4048 .enc = { 4049 .vecs = camellia_xts_enc_tv_template, 4050 .count = CAMELLIA_XTS_ENC_TEST_VECTORS 4051 }, 4052 .dec = { 4053 .vecs = camellia_xts_dec_tv_template, 4054 .count = CAMELLIA_XTS_DEC_TEST_VECTORS 4055 } 4056 } 4057 } 4058 }, { 4059 .alg = "xts(cast6)", 4060 .test = alg_test_skcipher, 4061 .suite = { 4062 .cipher = { 4063 .enc = { 4064 .vecs = cast6_xts_enc_tv_template, 4065 .count = CAST6_XTS_ENC_TEST_VECTORS 4066 }, 4067 .dec = { 4068 .vecs = cast6_xts_dec_tv_template, 4069 .count = CAST6_XTS_DEC_TEST_VECTORS 4070 } 4071 } 4072 } 4073 }, { 4074 .alg = "xts(serpent)", 4075 .test = alg_test_skcipher, 4076 .suite = { 4077 .cipher = { 4078 .enc = { 4079 .vecs = serpent_xts_enc_tv_template, 4080 .count = SERPENT_XTS_ENC_TEST_VECTORS 4081 }, 4082 .dec = { 4083 .vecs = serpent_xts_dec_tv_template, 4084 .count = SERPENT_XTS_DEC_TEST_VECTORS 4085 } 4086 } 4087 } 4088 }, { 4089 .alg = "xts(twofish)", 4090 .test = alg_test_skcipher, 4091 .suite = { 4092 .cipher = { 4093 .enc = { 4094 .vecs = tf_xts_enc_tv_template, 4095 .count = TF_XTS_ENC_TEST_VECTORS 4096 }, 4097 .dec = { 4098 .vecs = tf_xts_dec_tv_template, 4099 .count = TF_XTS_DEC_TEST_VECTORS 4100 } 4101 } 4102 } 4103 } 4104 }; 4105 4106 static bool alg_test_descs_checked; 4107 4108 static void alg_test_descs_check_order(void) 4109 { 4110 int i; 4111 4112 /* only check once */ 4113 if (alg_test_descs_checked) 4114 return; 4115 4116 alg_test_descs_checked = true; 4117 4118 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 4119 int diff = strcmp(alg_test_descs[i - 1].alg, 4120 alg_test_descs[i].alg); 4121 4122 if (WARN_ON(diff > 0)) { 4123 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 4124 alg_test_descs[i - 1].alg, 4125 alg_test_descs[i].alg); 4126 } 4127 4128 if (WARN_ON(diff == 0)) { 4129 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 4130 alg_test_descs[i].alg); 4131 } 4132 } 4133 } 4134 4135 static int alg_find_test(const char *alg) 4136 { 4137 int start = 0; 4138 int end = ARRAY_SIZE(alg_test_descs); 4139 4140 while (start < end) { 4141 int i = (start + end) / 2; 4142 int diff = strcmp(alg_test_descs[i].alg, alg); 4143 4144 if (diff > 0) { 4145 end = i; 4146 continue; 4147 } 4148 4149 if (diff < 0) { 4150 start = i + 1; 4151 continue; 4152 } 4153 4154 return i; 4155 } 4156 4157 return -1; 4158 } 4159 4160 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 4161 { 4162 int i; 4163 int j; 4164 int rc; 4165 4166 if (!fips_enabled && notests) { 4167 printk_once(KERN_INFO "alg: self-tests disabled\n"); 4168 return 0; 4169 } 4170 4171 alg_test_descs_check_order(); 4172 4173 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 4174 char nalg[CRYPTO_MAX_ALG_NAME]; 4175 4176 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 4177 sizeof(nalg)) 4178 return -ENAMETOOLONG; 4179 4180 i = alg_find_test(nalg); 4181 if (i < 0) 4182 goto notest; 4183 4184 if (fips_enabled && !alg_test_descs[i].fips_allowed) 4185 goto non_fips_alg; 4186 4187 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 4188 goto test_done; 4189 } 4190 4191 i = alg_find_test(alg); 4192 j = alg_find_test(driver); 4193 if (i < 0 && j < 0) 4194 goto notest; 4195 4196 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 4197 (j >= 0 && !alg_test_descs[j].fips_allowed))) 4198 goto non_fips_alg; 4199 4200 rc = 0; 4201 if (i >= 0) 4202 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 4203 type, mask); 4204 if (j >= 0 && j != i) 4205 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 4206 type, mask); 4207 4208 test_done: 4209 if (fips_enabled && rc) 4210 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 4211 4212 if (fips_enabled && !rc) 4213 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 4214 4215 return rc; 4216 4217 notest: 4218 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 4219 return 0; 4220 non_fips_alg: 4221 return -EINVAL; 4222 } 4223 4224 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 4225 4226 EXPORT_SYMBOL_GPL(alg_test); 4227