1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * Updated RFC4106 AES-GCM testing. 10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 11 * Adrian Hoban <adrian.hoban@intel.com> 12 * Gabriele Paoloni <gabriele.paoloni@intel.com> 13 * Tadeusz Struk (tadeusz.struk@intel.com) 14 * Copyright (c) 2010, Intel Corporation. 15 * 16 * This program is free software; you can redistribute it and/or modify it 17 * under the terms of the GNU General Public License as published by the Free 18 * Software Foundation; either version 2 of the License, or (at your option) 19 * any later version. 20 * 21 */ 22 23 #include <crypto/aead.h> 24 #include <crypto/hash.h> 25 #include <crypto/skcipher.h> 26 #include <linux/err.h> 27 #include <linux/fips.h> 28 #include <linux/module.h> 29 #include <linux/scatterlist.h> 30 #include <linux/slab.h> 31 #include <linux/string.h> 32 #include <crypto/rng.h> 33 #include <crypto/drbg.h> 34 #include <crypto/akcipher.h> 35 #include <crypto/kpp.h> 36 #include <crypto/acompress.h> 37 38 #include "internal.h" 39 40 static bool notests; 41 module_param(notests, bool, 0644); 42 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 43 44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 45 46 /* a perfect nop */ 47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 48 { 49 return 0; 50 } 51 52 #else 53 54 #include "testmgr.h" 55 56 /* 57 * Need slab memory for testing (size in number of pages). 58 */ 59 #define XBUFSIZE 8 60 61 /* 62 * Indexes into the xbuf to simulate cross-page access. 63 */ 64 #define IDX1 32 65 #define IDX2 32400 66 #define IDX3 1511 67 #define IDX4 8193 68 #define IDX5 22222 69 #define IDX6 17101 70 #define IDX7 27333 71 #define IDX8 3000 72 73 /* 74 * Used by test_cipher() 75 */ 76 #define ENCRYPT 1 77 #define DECRYPT 0 78 79 struct aead_test_suite { 80 struct { 81 const struct aead_testvec *vecs; 82 unsigned int count; 83 } enc, dec; 84 }; 85 86 struct cipher_test_suite { 87 const struct cipher_testvec *vecs; 88 unsigned int count; 89 }; 90 91 struct comp_test_suite { 92 struct { 93 const struct comp_testvec *vecs; 94 unsigned int count; 95 } comp, decomp; 96 }; 97 98 struct hash_test_suite { 99 const struct hash_testvec *vecs; 100 unsigned int count; 101 }; 102 103 struct cprng_test_suite { 104 const struct cprng_testvec *vecs; 105 unsigned int count; 106 }; 107 108 struct drbg_test_suite { 109 const struct drbg_testvec *vecs; 110 unsigned int count; 111 }; 112 113 struct akcipher_test_suite { 114 const struct akcipher_testvec *vecs; 115 unsigned int count; 116 }; 117 118 struct kpp_test_suite { 119 const struct kpp_testvec *vecs; 120 unsigned int count; 121 }; 122 123 struct alg_test_desc { 124 const char *alg; 125 int (*test)(const struct alg_test_desc *desc, const char *driver, 126 u32 type, u32 mask); 127 int fips_allowed; /* set if alg is allowed in fips mode */ 128 129 union { 130 struct aead_test_suite aead; 131 struct cipher_test_suite cipher; 132 struct comp_test_suite comp; 133 struct hash_test_suite hash; 134 struct cprng_test_suite cprng; 135 struct drbg_test_suite drbg; 136 struct akcipher_test_suite akcipher; 137 struct kpp_test_suite kpp; 138 } suite; 139 }; 140 141 static const unsigned int IDX[8] = { 142 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 143 144 static void hexdump(unsigned char *buf, unsigned int len) 145 { 146 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 147 16, 1, 148 buf, len, false); 149 } 150 151 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 152 { 153 int i; 154 155 for (i = 0; i < XBUFSIZE; i++) { 156 buf[i] = (void *)__get_free_page(GFP_KERNEL); 157 if (!buf[i]) 158 goto err_free_buf; 159 } 160 161 return 0; 162 163 err_free_buf: 164 while (i-- > 0) 165 free_page((unsigned long)buf[i]); 166 167 return -ENOMEM; 168 } 169 170 static void testmgr_free_buf(char *buf[XBUFSIZE]) 171 { 172 int i; 173 174 for (i = 0; i < XBUFSIZE; i++) 175 free_page((unsigned long)buf[i]); 176 } 177 178 static int ahash_guard_result(char *result, char c, int size) 179 { 180 int i; 181 182 for (i = 0; i < size; i++) { 183 if (result[i] != c) 184 return -EINVAL; 185 } 186 187 return 0; 188 } 189 190 static int ahash_partial_update(struct ahash_request **preq, 191 struct crypto_ahash *tfm, const struct hash_testvec *template, 192 void *hash_buff, int k, int temp, struct scatterlist *sg, 193 const char *algo, char *result, struct crypto_wait *wait) 194 { 195 char *state; 196 struct ahash_request *req; 197 int statesize, ret = -EINVAL; 198 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 199 int digestsize = crypto_ahash_digestsize(tfm); 200 201 req = *preq; 202 statesize = crypto_ahash_statesize( 203 crypto_ahash_reqtfm(req)); 204 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); 205 if (!state) { 206 pr_err("alg: hash: Failed to alloc state for %s\n", algo); 207 goto out_nostate; 208 } 209 memcpy(state + statesize, guard, sizeof(guard)); 210 memset(result, 1, digestsize); 211 ret = crypto_ahash_export(req, state); 212 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); 213 if (ret) { 214 pr_err("alg: hash: Failed to export() for %s\n", algo); 215 goto out; 216 } 217 ret = ahash_guard_result(result, 1, digestsize); 218 if (ret) { 219 pr_err("alg: hash: Failed, export used req->result for %s\n", 220 algo); 221 goto out; 222 } 223 ahash_request_free(req); 224 req = ahash_request_alloc(tfm, GFP_KERNEL); 225 if (!req) { 226 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 227 goto out_noreq; 228 } 229 ahash_request_set_callback(req, 230 CRYPTO_TFM_REQ_MAY_BACKLOG, 231 crypto_req_done, wait); 232 233 memcpy(hash_buff, template->plaintext + temp, 234 template->tap[k]); 235 sg_init_one(&sg[0], hash_buff, template->tap[k]); 236 ahash_request_set_crypt(req, sg, result, template->tap[k]); 237 ret = crypto_ahash_import(req, state); 238 if (ret) { 239 pr_err("alg: hash: Failed to import() for %s\n", algo); 240 goto out; 241 } 242 ret = ahash_guard_result(result, 1, digestsize); 243 if (ret) { 244 pr_err("alg: hash: Failed, import used req->result for %s\n", 245 algo); 246 goto out; 247 } 248 ret = crypto_wait_req(crypto_ahash_update(req), wait); 249 if (ret) 250 goto out; 251 *preq = req; 252 ret = 0; 253 goto out_noreq; 254 out: 255 ahash_request_free(req); 256 out_noreq: 257 kfree(state); 258 out_nostate: 259 return ret; 260 } 261 262 enum hash_test { 263 HASH_TEST_DIGEST, 264 HASH_TEST_FINAL, 265 HASH_TEST_FINUP 266 }; 267 268 static int __test_hash(struct crypto_ahash *tfm, 269 const struct hash_testvec *template, unsigned int tcount, 270 enum hash_test test_type, const int align_offset) 271 { 272 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 273 size_t digest_size = crypto_ahash_digestsize(tfm); 274 unsigned int i, j, k, temp; 275 struct scatterlist sg[8]; 276 char *result; 277 char *key; 278 struct ahash_request *req; 279 struct crypto_wait wait; 280 void *hash_buff; 281 char *xbuf[XBUFSIZE]; 282 int ret = -ENOMEM; 283 284 result = kmalloc(digest_size, GFP_KERNEL); 285 if (!result) 286 return ret; 287 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 288 if (!key) 289 goto out_nobuf; 290 if (testmgr_alloc_buf(xbuf)) 291 goto out_nobuf; 292 293 crypto_init_wait(&wait); 294 295 req = ahash_request_alloc(tfm, GFP_KERNEL); 296 if (!req) { 297 printk(KERN_ERR "alg: hash: Failed to allocate request for " 298 "%s\n", algo); 299 goto out_noreq; 300 } 301 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 302 crypto_req_done, &wait); 303 304 j = 0; 305 for (i = 0; i < tcount; i++) { 306 if (template[i].np) 307 continue; 308 309 ret = -EINVAL; 310 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 311 goto out; 312 313 j++; 314 memset(result, 0, digest_size); 315 316 hash_buff = xbuf[0]; 317 hash_buff += align_offset; 318 319 memcpy(hash_buff, template[i].plaintext, template[i].psize); 320 sg_init_one(&sg[0], hash_buff, template[i].psize); 321 322 if (template[i].ksize) { 323 crypto_ahash_clear_flags(tfm, ~0); 324 if (template[i].ksize > MAX_KEYLEN) { 325 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 326 j, algo, template[i].ksize, MAX_KEYLEN); 327 ret = -EINVAL; 328 goto out; 329 } 330 memcpy(key, template[i].key, template[i].ksize); 331 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 332 if (ret) { 333 printk(KERN_ERR "alg: hash: setkey failed on " 334 "test %d for %s: ret=%d\n", j, algo, 335 -ret); 336 goto out; 337 } 338 } 339 340 ahash_request_set_crypt(req, sg, result, template[i].psize); 341 switch (test_type) { 342 case HASH_TEST_DIGEST: 343 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 344 if (ret) { 345 pr_err("alg: hash: digest failed on test %d " 346 "for %s: ret=%d\n", j, algo, -ret); 347 goto out; 348 } 349 break; 350 351 case HASH_TEST_FINAL: 352 memset(result, 1, digest_size); 353 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 354 if (ret) { 355 pr_err("alg: hash: init failed on test %d " 356 "for %s: ret=%d\n", j, algo, -ret); 357 goto out; 358 } 359 ret = ahash_guard_result(result, 1, digest_size); 360 if (ret) { 361 pr_err("alg: hash: init failed on test %d " 362 "for %s: used req->result\n", j, algo); 363 goto out; 364 } 365 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 366 if (ret) { 367 pr_err("alg: hash: update failed on test %d " 368 "for %s: ret=%d\n", j, algo, -ret); 369 goto out; 370 } 371 ret = ahash_guard_result(result, 1, digest_size); 372 if (ret) { 373 pr_err("alg: hash: update failed on test %d " 374 "for %s: used req->result\n", j, algo); 375 goto out; 376 } 377 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 378 if (ret) { 379 pr_err("alg: hash: final failed on test %d " 380 "for %s: ret=%d\n", j, algo, -ret); 381 goto out; 382 } 383 break; 384 385 case HASH_TEST_FINUP: 386 memset(result, 1, digest_size); 387 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 388 if (ret) { 389 pr_err("alg: hash: init failed on test %d " 390 "for %s: ret=%d\n", j, algo, -ret); 391 goto out; 392 } 393 ret = ahash_guard_result(result, 1, digest_size); 394 if (ret) { 395 pr_err("alg: hash: init failed on test %d " 396 "for %s: used req->result\n", j, algo); 397 goto out; 398 } 399 ret = crypto_wait_req(crypto_ahash_finup(req), &wait); 400 if (ret) { 401 pr_err("alg: hash: final failed on test %d " 402 "for %s: ret=%d\n", j, algo, -ret); 403 goto out; 404 } 405 break; 406 } 407 408 if (memcmp(result, template[i].digest, 409 crypto_ahash_digestsize(tfm))) { 410 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 411 j, algo); 412 hexdump(result, crypto_ahash_digestsize(tfm)); 413 ret = -EINVAL; 414 goto out; 415 } 416 } 417 418 if (test_type) 419 goto out; 420 421 j = 0; 422 for (i = 0; i < tcount; i++) { 423 /* alignment tests are only done with continuous buffers */ 424 if (align_offset != 0) 425 break; 426 427 if (!template[i].np) 428 continue; 429 430 j++; 431 memset(result, 0, digest_size); 432 433 temp = 0; 434 sg_init_table(sg, template[i].np); 435 ret = -EINVAL; 436 for (k = 0; k < template[i].np; k++) { 437 if (WARN_ON(offset_in_page(IDX[k]) + 438 template[i].tap[k] > PAGE_SIZE)) 439 goto out; 440 sg_set_buf(&sg[k], 441 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 442 offset_in_page(IDX[k]), 443 template[i].plaintext + temp, 444 template[i].tap[k]), 445 template[i].tap[k]); 446 temp += template[i].tap[k]; 447 } 448 449 if (template[i].ksize) { 450 if (template[i].ksize > MAX_KEYLEN) { 451 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 452 j, algo, template[i].ksize, MAX_KEYLEN); 453 ret = -EINVAL; 454 goto out; 455 } 456 crypto_ahash_clear_flags(tfm, ~0); 457 memcpy(key, template[i].key, template[i].ksize); 458 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 459 460 if (ret) { 461 printk(KERN_ERR "alg: hash: setkey " 462 "failed on chunking test %d " 463 "for %s: ret=%d\n", j, algo, -ret); 464 goto out; 465 } 466 } 467 468 ahash_request_set_crypt(req, sg, result, template[i].psize); 469 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 470 if (ret) { 471 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n", 472 j, algo, -ret); 473 goto out; 474 } 475 476 if (memcmp(result, template[i].digest, 477 crypto_ahash_digestsize(tfm))) { 478 printk(KERN_ERR "alg: hash: Chunking test %d " 479 "failed for %s\n", j, algo); 480 hexdump(result, crypto_ahash_digestsize(tfm)); 481 ret = -EINVAL; 482 goto out; 483 } 484 } 485 486 /* partial update exercise */ 487 j = 0; 488 for (i = 0; i < tcount; i++) { 489 /* alignment tests are only done with continuous buffers */ 490 if (align_offset != 0) 491 break; 492 493 if (template[i].np < 2) 494 continue; 495 496 j++; 497 memset(result, 0, digest_size); 498 499 ret = -EINVAL; 500 hash_buff = xbuf[0]; 501 memcpy(hash_buff, template[i].plaintext, 502 template[i].tap[0]); 503 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 504 505 if (template[i].ksize) { 506 crypto_ahash_clear_flags(tfm, ~0); 507 if (template[i].ksize > MAX_KEYLEN) { 508 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 509 j, algo, template[i].ksize, MAX_KEYLEN); 510 ret = -EINVAL; 511 goto out; 512 } 513 memcpy(key, template[i].key, template[i].ksize); 514 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 515 if (ret) { 516 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 517 j, algo, -ret); 518 goto out; 519 } 520 } 521 522 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 523 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 524 if (ret) { 525 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n", 526 j, algo, -ret); 527 goto out; 528 } 529 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 530 if (ret) { 531 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n", 532 j, algo, -ret); 533 goto out; 534 } 535 536 temp = template[i].tap[0]; 537 for (k = 1; k < template[i].np; k++) { 538 ret = ahash_partial_update(&req, tfm, &template[i], 539 hash_buff, k, temp, &sg[0], algo, result, 540 &wait); 541 if (ret) { 542 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n", 543 j, algo, -ret); 544 goto out_noreq; 545 } 546 temp += template[i].tap[k]; 547 } 548 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 549 if (ret) { 550 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n", 551 j, algo, -ret); 552 goto out; 553 } 554 if (memcmp(result, template[i].digest, 555 crypto_ahash_digestsize(tfm))) { 556 pr_err("alg: hash: Partial Test %d failed for %s\n", 557 j, algo); 558 hexdump(result, crypto_ahash_digestsize(tfm)); 559 ret = -EINVAL; 560 goto out; 561 } 562 } 563 564 ret = 0; 565 566 out: 567 ahash_request_free(req); 568 out_noreq: 569 testmgr_free_buf(xbuf); 570 out_nobuf: 571 kfree(key); 572 kfree(result); 573 return ret; 574 } 575 576 static int test_hash(struct crypto_ahash *tfm, 577 const struct hash_testvec *template, 578 unsigned int tcount, enum hash_test test_type) 579 { 580 unsigned int alignmask; 581 int ret; 582 583 ret = __test_hash(tfm, template, tcount, test_type, 0); 584 if (ret) 585 return ret; 586 587 /* test unaligned buffers, check with one byte offset */ 588 ret = __test_hash(tfm, template, tcount, test_type, 1); 589 if (ret) 590 return ret; 591 592 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 593 if (alignmask) { 594 /* Check if alignment mask for tfm is correctly set. */ 595 ret = __test_hash(tfm, template, tcount, test_type, 596 alignmask + 1); 597 if (ret) 598 return ret; 599 } 600 601 return 0; 602 } 603 604 static int __test_aead(struct crypto_aead *tfm, int enc, 605 const struct aead_testvec *template, unsigned int tcount, 606 const bool diff_dst, const int align_offset) 607 { 608 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 609 unsigned int i, j, k, n, temp; 610 int ret = -ENOMEM; 611 char *q; 612 char *key; 613 struct aead_request *req; 614 struct scatterlist *sg; 615 struct scatterlist *sgout; 616 const char *e, *d; 617 struct crypto_wait wait; 618 unsigned int authsize, iv_len; 619 void *input; 620 void *output; 621 void *assoc; 622 char *iv; 623 char *xbuf[XBUFSIZE]; 624 char *xoutbuf[XBUFSIZE]; 625 char *axbuf[XBUFSIZE]; 626 627 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 628 if (!iv) 629 return ret; 630 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 631 if (!key) 632 goto out_noxbuf; 633 if (testmgr_alloc_buf(xbuf)) 634 goto out_noxbuf; 635 if (testmgr_alloc_buf(axbuf)) 636 goto out_noaxbuf; 637 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 638 goto out_nooutbuf; 639 640 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 641 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)), 642 GFP_KERNEL); 643 if (!sg) 644 goto out_nosg; 645 sgout = &sg[16]; 646 647 if (diff_dst) 648 d = "-ddst"; 649 else 650 d = ""; 651 652 if (enc == ENCRYPT) 653 e = "encryption"; 654 else 655 e = "decryption"; 656 657 crypto_init_wait(&wait); 658 659 req = aead_request_alloc(tfm, GFP_KERNEL); 660 if (!req) { 661 pr_err("alg: aead%s: Failed to allocate request for %s\n", 662 d, algo); 663 goto out; 664 } 665 666 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 667 crypto_req_done, &wait); 668 669 iv_len = crypto_aead_ivsize(tfm); 670 671 for (i = 0, j = 0; i < tcount; i++) { 672 if (template[i].np) 673 continue; 674 675 j++; 676 677 /* some templates have no input data but they will 678 * touch input 679 */ 680 input = xbuf[0]; 681 input += align_offset; 682 assoc = axbuf[0]; 683 684 ret = -EINVAL; 685 if (WARN_ON(align_offset + template[i].ilen > 686 PAGE_SIZE || template[i].alen > PAGE_SIZE)) 687 goto out; 688 689 memcpy(input, template[i].input, template[i].ilen); 690 memcpy(assoc, template[i].assoc, template[i].alen); 691 if (template[i].iv) 692 memcpy(iv, template[i].iv, iv_len); 693 else 694 memset(iv, 0, iv_len); 695 696 crypto_aead_clear_flags(tfm, ~0); 697 if (template[i].wk) 698 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 699 700 if (template[i].klen > MAX_KEYLEN) { 701 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 702 d, j, algo, template[i].klen, 703 MAX_KEYLEN); 704 ret = -EINVAL; 705 goto out; 706 } 707 memcpy(key, template[i].key, template[i].klen); 708 709 ret = crypto_aead_setkey(tfm, key, template[i].klen); 710 if (template[i].fail == !ret) { 711 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 712 d, j, algo, crypto_aead_get_flags(tfm)); 713 goto out; 714 } else if (ret) 715 continue; 716 717 authsize = abs(template[i].rlen - template[i].ilen); 718 ret = crypto_aead_setauthsize(tfm, authsize); 719 if (ret) { 720 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 721 d, authsize, j, algo); 722 goto out; 723 } 724 725 k = !!template[i].alen; 726 sg_init_table(sg, k + 1); 727 sg_set_buf(&sg[0], assoc, template[i].alen); 728 sg_set_buf(&sg[k], input, 729 template[i].ilen + (enc ? authsize : 0)); 730 output = input; 731 732 if (diff_dst) { 733 sg_init_table(sgout, k + 1); 734 sg_set_buf(&sgout[0], assoc, template[i].alen); 735 736 output = xoutbuf[0]; 737 output += align_offset; 738 sg_set_buf(&sgout[k], output, 739 template[i].rlen + (enc ? 0 : authsize)); 740 } 741 742 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 743 template[i].ilen, iv); 744 745 aead_request_set_ad(req, template[i].alen); 746 747 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 748 : crypto_aead_decrypt(req), &wait); 749 750 switch (ret) { 751 case 0: 752 if (template[i].novrfy) { 753 /* verification was supposed to fail */ 754 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 755 d, e, j, algo); 756 /* so really, we got a bad message */ 757 ret = -EBADMSG; 758 goto out; 759 } 760 break; 761 case -EBADMSG: 762 if (template[i].novrfy) 763 /* verification failure was expected */ 764 continue; 765 /* fall through */ 766 default: 767 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 768 d, e, j, algo, -ret); 769 goto out; 770 } 771 772 q = output; 773 if (memcmp(q, template[i].result, template[i].rlen)) { 774 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 775 d, j, e, algo); 776 hexdump(q, template[i].rlen); 777 ret = -EINVAL; 778 goto out; 779 } 780 } 781 782 for (i = 0, j = 0; i < tcount; i++) { 783 /* alignment tests are only done with continuous buffers */ 784 if (align_offset != 0) 785 break; 786 787 if (!template[i].np) 788 continue; 789 790 j++; 791 792 if (template[i].iv) 793 memcpy(iv, template[i].iv, iv_len); 794 else 795 memset(iv, 0, MAX_IVLEN); 796 797 crypto_aead_clear_flags(tfm, ~0); 798 if (template[i].wk) 799 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 800 if (template[i].klen > MAX_KEYLEN) { 801 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 802 d, j, algo, template[i].klen, MAX_KEYLEN); 803 ret = -EINVAL; 804 goto out; 805 } 806 memcpy(key, template[i].key, template[i].klen); 807 808 ret = crypto_aead_setkey(tfm, key, template[i].klen); 809 if (template[i].fail == !ret) { 810 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 811 d, j, algo, crypto_aead_get_flags(tfm)); 812 goto out; 813 } else if (ret) 814 continue; 815 816 authsize = abs(template[i].rlen - template[i].ilen); 817 818 ret = -EINVAL; 819 sg_init_table(sg, template[i].anp + template[i].np); 820 if (diff_dst) 821 sg_init_table(sgout, template[i].anp + template[i].np); 822 823 ret = -EINVAL; 824 for (k = 0, temp = 0; k < template[i].anp; k++) { 825 if (WARN_ON(offset_in_page(IDX[k]) + 826 template[i].atap[k] > PAGE_SIZE)) 827 goto out; 828 sg_set_buf(&sg[k], 829 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 830 offset_in_page(IDX[k]), 831 template[i].assoc + temp, 832 template[i].atap[k]), 833 template[i].atap[k]); 834 if (diff_dst) 835 sg_set_buf(&sgout[k], 836 axbuf[IDX[k] >> PAGE_SHIFT] + 837 offset_in_page(IDX[k]), 838 template[i].atap[k]); 839 temp += template[i].atap[k]; 840 } 841 842 for (k = 0, temp = 0; k < template[i].np; k++) { 843 if (WARN_ON(offset_in_page(IDX[k]) + 844 template[i].tap[k] > PAGE_SIZE)) 845 goto out; 846 847 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 848 memcpy(q, template[i].input + temp, template[i].tap[k]); 849 sg_set_buf(&sg[template[i].anp + k], 850 q, template[i].tap[k]); 851 852 if (diff_dst) { 853 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 854 offset_in_page(IDX[k]); 855 856 memset(q, 0, template[i].tap[k]); 857 858 sg_set_buf(&sgout[template[i].anp + k], 859 q, template[i].tap[k]); 860 } 861 862 n = template[i].tap[k]; 863 if (k == template[i].np - 1 && enc) 864 n += authsize; 865 if (offset_in_page(q) + n < PAGE_SIZE) 866 q[n] = 0; 867 868 temp += template[i].tap[k]; 869 } 870 871 ret = crypto_aead_setauthsize(tfm, authsize); 872 if (ret) { 873 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 874 d, authsize, j, algo); 875 goto out; 876 } 877 878 if (enc) { 879 if (WARN_ON(sg[template[i].anp + k - 1].offset + 880 sg[template[i].anp + k - 1].length + 881 authsize > PAGE_SIZE)) { 882 ret = -EINVAL; 883 goto out; 884 } 885 886 if (diff_dst) 887 sgout[template[i].anp + k - 1].length += 888 authsize; 889 sg[template[i].anp + k - 1].length += authsize; 890 } 891 892 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 893 template[i].ilen, 894 iv); 895 896 aead_request_set_ad(req, template[i].alen); 897 898 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 899 : crypto_aead_decrypt(req), &wait); 900 901 switch (ret) { 902 case 0: 903 if (template[i].novrfy) { 904 /* verification was supposed to fail */ 905 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 906 d, e, j, algo); 907 /* so really, we got a bad message */ 908 ret = -EBADMSG; 909 goto out; 910 } 911 break; 912 case -EBADMSG: 913 if (template[i].novrfy) 914 /* verification failure was expected */ 915 continue; 916 /* fall through */ 917 default: 918 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 919 d, e, j, algo, -ret); 920 goto out; 921 } 922 923 ret = -EINVAL; 924 for (k = 0, temp = 0; k < template[i].np; k++) { 925 if (diff_dst) 926 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 927 offset_in_page(IDX[k]); 928 else 929 q = xbuf[IDX[k] >> PAGE_SHIFT] + 930 offset_in_page(IDX[k]); 931 932 n = template[i].tap[k]; 933 if (k == template[i].np - 1) 934 n += enc ? authsize : -authsize; 935 936 if (memcmp(q, template[i].result + temp, n)) { 937 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 938 d, j, e, k, algo); 939 hexdump(q, n); 940 goto out; 941 } 942 943 q += n; 944 if (k == template[i].np - 1 && !enc) { 945 if (!diff_dst && 946 memcmp(q, template[i].input + 947 temp + n, authsize)) 948 n = authsize; 949 else 950 n = 0; 951 } else { 952 for (n = 0; offset_in_page(q + n) && q[n]; n++) 953 ; 954 } 955 if (n) { 956 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 957 d, j, e, k, algo, n); 958 hexdump(q, n); 959 goto out; 960 } 961 962 temp += template[i].tap[k]; 963 } 964 } 965 966 ret = 0; 967 968 out: 969 aead_request_free(req); 970 kfree(sg); 971 out_nosg: 972 if (diff_dst) 973 testmgr_free_buf(xoutbuf); 974 out_nooutbuf: 975 testmgr_free_buf(axbuf); 976 out_noaxbuf: 977 testmgr_free_buf(xbuf); 978 out_noxbuf: 979 kfree(key); 980 kfree(iv); 981 return ret; 982 } 983 984 static int test_aead(struct crypto_aead *tfm, int enc, 985 const struct aead_testvec *template, unsigned int tcount) 986 { 987 unsigned int alignmask; 988 int ret; 989 990 /* test 'dst == src' case */ 991 ret = __test_aead(tfm, enc, template, tcount, false, 0); 992 if (ret) 993 return ret; 994 995 /* test 'dst != src' case */ 996 ret = __test_aead(tfm, enc, template, tcount, true, 0); 997 if (ret) 998 return ret; 999 1000 /* test unaligned buffers, check with one byte offset */ 1001 ret = __test_aead(tfm, enc, template, tcount, true, 1); 1002 if (ret) 1003 return ret; 1004 1005 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1006 if (alignmask) { 1007 /* Check if alignment mask for tfm is correctly set. */ 1008 ret = __test_aead(tfm, enc, template, tcount, true, 1009 alignmask + 1); 1010 if (ret) 1011 return ret; 1012 } 1013 1014 return 0; 1015 } 1016 1017 static int test_cipher(struct crypto_cipher *tfm, int enc, 1018 const struct cipher_testvec *template, 1019 unsigned int tcount) 1020 { 1021 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 1022 unsigned int i, j, k; 1023 char *q; 1024 const char *e; 1025 const char *input, *result; 1026 void *data; 1027 char *xbuf[XBUFSIZE]; 1028 int ret = -ENOMEM; 1029 1030 if (testmgr_alloc_buf(xbuf)) 1031 goto out_nobuf; 1032 1033 if (enc == ENCRYPT) 1034 e = "encryption"; 1035 else 1036 e = "decryption"; 1037 1038 j = 0; 1039 for (i = 0; i < tcount; i++) { 1040 if (template[i].np) 1041 continue; 1042 1043 if (fips_enabled && template[i].fips_skip) 1044 continue; 1045 1046 input = enc ? template[i].ptext : template[i].ctext; 1047 result = enc ? template[i].ctext : template[i].ptext; 1048 j++; 1049 1050 ret = -EINVAL; 1051 if (WARN_ON(template[i].len > PAGE_SIZE)) 1052 goto out; 1053 1054 data = xbuf[0]; 1055 memcpy(data, input, template[i].len); 1056 1057 crypto_cipher_clear_flags(tfm, ~0); 1058 if (template[i].wk) 1059 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 1060 1061 ret = crypto_cipher_setkey(tfm, template[i].key, 1062 template[i].klen); 1063 if (template[i].fail == !ret) { 1064 printk(KERN_ERR "alg: cipher: setkey failed " 1065 "on test %d for %s: flags=%x\n", j, 1066 algo, crypto_cipher_get_flags(tfm)); 1067 goto out; 1068 } else if (ret) 1069 continue; 1070 1071 for (k = 0; k < template[i].len; 1072 k += crypto_cipher_blocksize(tfm)) { 1073 if (enc) 1074 crypto_cipher_encrypt_one(tfm, data + k, 1075 data + k); 1076 else 1077 crypto_cipher_decrypt_one(tfm, data + k, 1078 data + k); 1079 } 1080 1081 q = data; 1082 if (memcmp(q, result, template[i].len)) { 1083 printk(KERN_ERR "alg: cipher: Test %d failed " 1084 "on %s for %s\n", j, e, algo); 1085 hexdump(q, template[i].len); 1086 ret = -EINVAL; 1087 goto out; 1088 } 1089 } 1090 1091 ret = 0; 1092 1093 out: 1094 testmgr_free_buf(xbuf); 1095 out_nobuf: 1096 return ret; 1097 } 1098 1099 static int __test_skcipher(struct crypto_skcipher *tfm, int enc, 1100 const struct cipher_testvec *template, 1101 unsigned int tcount, 1102 const bool diff_dst, const int align_offset) 1103 { 1104 const char *algo = 1105 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm)); 1106 unsigned int i, j, k, n, temp; 1107 char *q; 1108 struct skcipher_request *req; 1109 struct scatterlist sg[8]; 1110 struct scatterlist sgout[8]; 1111 const char *e, *d; 1112 struct crypto_wait wait; 1113 const char *input, *result; 1114 void *data; 1115 char iv[MAX_IVLEN]; 1116 char *xbuf[XBUFSIZE]; 1117 char *xoutbuf[XBUFSIZE]; 1118 int ret = -ENOMEM; 1119 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1120 1121 if (testmgr_alloc_buf(xbuf)) 1122 goto out_nobuf; 1123 1124 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1125 goto out_nooutbuf; 1126 1127 if (diff_dst) 1128 d = "-ddst"; 1129 else 1130 d = ""; 1131 1132 if (enc == ENCRYPT) 1133 e = "encryption"; 1134 else 1135 e = "decryption"; 1136 1137 crypto_init_wait(&wait); 1138 1139 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1140 if (!req) { 1141 pr_err("alg: skcipher%s: Failed to allocate request for %s\n", 1142 d, algo); 1143 goto out; 1144 } 1145 1146 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1147 crypto_req_done, &wait); 1148 1149 j = 0; 1150 for (i = 0; i < tcount; i++) { 1151 if (template[i].np && !template[i].also_non_np) 1152 continue; 1153 1154 if (fips_enabled && template[i].fips_skip) 1155 continue; 1156 1157 if (template[i].iv && !(template[i].generates_iv && enc)) 1158 memcpy(iv, template[i].iv, ivsize); 1159 else 1160 memset(iv, 0, MAX_IVLEN); 1161 1162 input = enc ? template[i].ptext : template[i].ctext; 1163 result = enc ? template[i].ctext : template[i].ptext; 1164 j++; 1165 ret = -EINVAL; 1166 if (WARN_ON(align_offset + template[i].len > PAGE_SIZE)) 1167 goto out; 1168 1169 data = xbuf[0]; 1170 data += align_offset; 1171 memcpy(data, input, template[i].len); 1172 1173 crypto_skcipher_clear_flags(tfm, ~0); 1174 if (template[i].wk) 1175 crypto_skcipher_set_flags(tfm, 1176 CRYPTO_TFM_REQ_WEAK_KEY); 1177 1178 ret = crypto_skcipher_setkey(tfm, template[i].key, 1179 template[i].klen); 1180 if (template[i].fail == !ret) { 1181 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n", 1182 d, j, algo, crypto_skcipher_get_flags(tfm)); 1183 goto out; 1184 } else if (ret) 1185 continue; 1186 1187 sg_init_one(&sg[0], data, template[i].len); 1188 if (diff_dst) { 1189 data = xoutbuf[0]; 1190 data += align_offset; 1191 sg_init_one(&sgout[0], data, template[i].len); 1192 } 1193 1194 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1195 template[i].len, iv); 1196 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 1197 crypto_skcipher_decrypt(req), &wait); 1198 1199 if (ret) { 1200 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n", 1201 d, e, j, algo, -ret); 1202 goto out; 1203 } 1204 1205 q = data; 1206 if (memcmp(q, result, template[i].len)) { 1207 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n", 1208 d, j, e, algo); 1209 hexdump(q, template[i].len); 1210 ret = -EINVAL; 1211 goto out; 1212 } 1213 1214 if (template[i].generates_iv && enc && 1215 memcmp(iv, template[i].iv, crypto_skcipher_ivsize(tfm))) { 1216 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n", 1217 d, j, e, algo); 1218 hexdump(iv, crypto_skcipher_ivsize(tfm)); 1219 ret = -EINVAL; 1220 goto out; 1221 } 1222 } 1223 1224 j = 0; 1225 for (i = 0; i < tcount; i++) { 1226 /* alignment tests are only done with continuous buffers */ 1227 if (align_offset != 0) 1228 break; 1229 1230 if (!template[i].np) 1231 continue; 1232 1233 if (fips_enabled && template[i].fips_skip) 1234 continue; 1235 1236 if (template[i].iv && !(template[i].generates_iv && enc)) 1237 memcpy(iv, template[i].iv, ivsize); 1238 else 1239 memset(iv, 0, MAX_IVLEN); 1240 1241 input = enc ? template[i].ptext : template[i].ctext; 1242 result = enc ? template[i].ctext : template[i].ptext; 1243 j++; 1244 crypto_skcipher_clear_flags(tfm, ~0); 1245 if (template[i].wk) 1246 crypto_skcipher_set_flags(tfm, 1247 CRYPTO_TFM_REQ_WEAK_KEY); 1248 1249 ret = crypto_skcipher_setkey(tfm, template[i].key, 1250 template[i].klen); 1251 if (template[i].fail == !ret) { 1252 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n", 1253 d, j, algo, crypto_skcipher_get_flags(tfm)); 1254 goto out; 1255 } else if (ret) 1256 continue; 1257 1258 temp = 0; 1259 ret = -EINVAL; 1260 sg_init_table(sg, template[i].np); 1261 if (diff_dst) 1262 sg_init_table(sgout, template[i].np); 1263 for (k = 0; k < template[i].np; k++) { 1264 if (WARN_ON(offset_in_page(IDX[k]) + 1265 template[i].tap[k] > PAGE_SIZE)) 1266 goto out; 1267 1268 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1269 1270 memcpy(q, input + temp, template[i].tap[k]); 1271 1272 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE) 1273 q[template[i].tap[k]] = 0; 1274 1275 sg_set_buf(&sg[k], q, template[i].tap[k]); 1276 if (diff_dst) { 1277 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1278 offset_in_page(IDX[k]); 1279 1280 sg_set_buf(&sgout[k], q, template[i].tap[k]); 1281 1282 memset(q, 0, template[i].tap[k]); 1283 if (offset_in_page(q) + 1284 template[i].tap[k] < PAGE_SIZE) 1285 q[template[i].tap[k]] = 0; 1286 } 1287 1288 temp += template[i].tap[k]; 1289 } 1290 1291 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1292 template[i].len, iv); 1293 1294 ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 1295 crypto_skcipher_decrypt(req), &wait); 1296 1297 if (ret) { 1298 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n", 1299 d, e, j, algo, -ret); 1300 goto out; 1301 } 1302 1303 temp = 0; 1304 ret = -EINVAL; 1305 for (k = 0; k < template[i].np; k++) { 1306 if (diff_dst) 1307 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1308 offset_in_page(IDX[k]); 1309 else 1310 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1311 offset_in_page(IDX[k]); 1312 1313 if (memcmp(q, result + temp, template[i].tap[k])) { 1314 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n", 1315 d, j, e, k, algo); 1316 hexdump(q, template[i].tap[k]); 1317 goto out; 1318 } 1319 1320 q += template[i].tap[k]; 1321 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1322 ; 1323 if (n) { 1324 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1325 d, j, e, k, algo, n); 1326 hexdump(q, n); 1327 goto out; 1328 } 1329 temp += template[i].tap[k]; 1330 } 1331 } 1332 1333 ret = 0; 1334 1335 out: 1336 skcipher_request_free(req); 1337 if (diff_dst) 1338 testmgr_free_buf(xoutbuf); 1339 out_nooutbuf: 1340 testmgr_free_buf(xbuf); 1341 out_nobuf: 1342 return ret; 1343 } 1344 1345 static int test_skcipher(struct crypto_skcipher *tfm, int enc, 1346 const struct cipher_testvec *template, 1347 unsigned int tcount) 1348 { 1349 unsigned int alignmask; 1350 int ret; 1351 1352 /* test 'dst == src' case */ 1353 ret = __test_skcipher(tfm, enc, template, tcount, false, 0); 1354 if (ret) 1355 return ret; 1356 1357 /* test 'dst != src' case */ 1358 ret = __test_skcipher(tfm, enc, template, tcount, true, 0); 1359 if (ret) 1360 return ret; 1361 1362 /* test unaligned buffers, check with one byte offset */ 1363 ret = __test_skcipher(tfm, enc, template, tcount, true, 1); 1364 if (ret) 1365 return ret; 1366 1367 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1368 if (alignmask) { 1369 /* Check if alignment mask for tfm is correctly set. */ 1370 ret = __test_skcipher(tfm, enc, template, tcount, true, 1371 alignmask + 1); 1372 if (ret) 1373 return ret; 1374 } 1375 1376 return 0; 1377 } 1378 1379 static int test_comp(struct crypto_comp *tfm, 1380 const struct comp_testvec *ctemplate, 1381 const struct comp_testvec *dtemplate, 1382 int ctcount, int dtcount) 1383 { 1384 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1385 char *output, *decomp_output; 1386 unsigned int i; 1387 int ret; 1388 1389 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1390 if (!output) 1391 return -ENOMEM; 1392 1393 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1394 if (!decomp_output) { 1395 kfree(output); 1396 return -ENOMEM; 1397 } 1398 1399 for (i = 0; i < ctcount; i++) { 1400 int ilen; 1401 unsigned int dlen = COMP_BUF_SIZE; 1402 1403 memset(output, 0, sizeof(COMP_BUF_SIZE)); 1404 memset(decomp_output, 0, sizeof(COMP_BUF_SIZE)); 1405 1406 ilen = ctemplate[i].inlen; 1407 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1408 ilen, output, &dlen); 1409 if (ret) { 1410 printk(KERN_ERR "alg: comp: compression failed " 1411 "on test %d for %s: ret=%d\n", i + 1, algo, 1412 -ret); 1413 goto out; 1414 } 1415 1416 ilen = dlen; 1417 dlen = COMP_BUF_SIZE; 1418 ret = crypto_comp_decompress(tfm, output, 1419 ilen, decomp_output, &dlen); 1420 if (ret) { 1421 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n", 1422 i + 1, algo, -ret); 1423 goto out; 1424 } 1425 1426 if (dlen != ctemplate[i].inlen) { 1427 printk(KERN_ERR "alg: comp: Compression test %d " 1428 "failed for %s: output len = %d\n", i + 1, algo, 1429 dlen); 1430 ret = -EINVAL; 1431 goto out; 1432 } 1433 1434 if (memcmp(decomp_output, ctemplate[i].input, 1435 ctemplate[i].inlen)) { 1436 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n", 1437 i + 1, algo); 1438 hexdump(decomp_output, dlen); 1439 ret = -EINVAL; 1440 goto out; 1441 } 1442 } 1443 1444 for (i = 0; i < dtcount; i++) { 1445 int ilen; 1446 unsigned int dlen = COMP_BUF_SIZE; 1447 1448 memset(decomp_output, 0, sizeof(COMP_BUF_SIZE)); 1449 1450 ilen = dtemplate[i].inlen; 1451 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 1452 ilen, decomp_output, &dlen); 1453 if (ret) { 1454 printk(KERN_ERR "alg: comp: decompression failed " 1455 "on test %d for %s: ret=%d\n", i + 1, algo, 1456 -ret); 1457 goto out; 1458 } 1459 1460 if (dlen != dtemplate[i].outlen) { 1461 printk(KERN_ERR "alg: comp: Decompression test %d " 1462 "failed for %s: output len = %d\n", i + 1, algo, 1463 dlen); 1464 ret = -EINVAL; 1465 goto out; 1466 } 1467 1468 if (memcmp(decomp_output, dtemplate[i].output, dlen)) { 1469 printk(KERN_ERR "alg: comp: Decompression test %d " 1470 "failed for %s\n", i + 1, algo); 1471 hexdump(decomp_output, dlen); 1472 ret = -EINVAL; 1473 goto out; 1474 } 1475 } 1476 1477 ret = 0; 1478 1479 out: 1480 kfree(decomp_output); 1481 kfree(output); 1482 return ret; 1483 } 1484 1485 static int test_acomp(struct crypto_acomp *tfm, 1486 const struct comp_testvec *ctemplate, 1487 const struct comp_testvec *dtemplate, 1488 int ctcount, int dtcount) 1489 { 1490 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 1491 unsigned int i; 1492 char *output, *decomp_out; 1493 int ret; 1494 struct scatterlist src, dst; 1495 struct acomp_req *req; 1496 struct crypto_wait wait; 1497 1498 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1499 if (!output) 1500 return -ENOMEM; 1501 1502 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1503 if (!decomp_out) { 1504 kfree(output); 1505 return -ENOMEM; 1506 } 1507 1508 for (i = 0; i < ctcount; i++) { 1509 unsigned int dlen = COMP_BUF_SIZE; 1510 int ilen = ctemplate[i].inlen; 1511 void *input_vec; 1512 1513 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL); 1514 if (!input_vec) { 1515 ret = -ENOMEM; 1516 goto out; 1517 } 1518 1519 memset(output, 0, dlen); 1520 crypto_init_wait(&wait); 1521 sg_init_one(&src, input_vec, ilen); 1522 sg_init_one(&dst, output, dlen); 1523 1524 req = acomp_request_alloc(tfm); 1525 if (!req) { 1526 pr_err("alg: acomp: request alloc failed for %s\n", 1527 algo); 1528 kfree(input_vec); 1529 ret = -ENOMEM; 1530 goto out; 1531 } 1532 1533 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1534 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1535 crypto_req_done, &wait); 1536 1537 ret = crypto_wait_req(crypto_acomp_compress(req), &wait); 1538 if (ret) { 1539 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1540 i + 1, algo, -ret); 1541 kfree(input_vec); 1542 acomp_request_free(req); 1543 goto out; 1544 } 1545 1546 ilen = req->dlen; 1547 dlen = COMP_BUF_SIZE; 1548 sg_init_one(&src, output, ilen); 1549 sg_init_one(&dst, decomp_out, dlen); 1550 crypto_init_wait(&wait); 1551 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1552 1553 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 1554 if (ret) { 1555 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 1556 i + 1, algo, -ret); 1557 kfree(input_vec); 1558 acomp_request_free(req); 1559 goto out; 1560 } 1561 1562 if (req->dlen != ctemplate[i].inlen) { 1563 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 1564 i + 1, algo, req->dlen); 1565 ret = -EINVAL; 1566 kfree(input_vec); 1567 acomp_request_free(req); 1568 goto out; 1569 } 1570 1571 if (memcmp(input_vec, decomp_out, req->dlen)) { 1572 pr_err("alg: acomp: Compression test %d failed for %s\n", 1573 i + 1, algo); 1574 hexdump(output, req->dlen); 1575 ret = -EINVAL; 1576 kfree(input_vec); 1577 acomp_request_free(req); 1578 goto out; 1579 } 1580 1581 kfree(input_vec); 1582 acomp_request_free(req); 1583 } 1584 1585 for (i = 0; i < dtcount; i++) { 1586 unsigned int dlen = COMP_BUF_SIZE; 1587 int ilen = dtemplate[i].inlen; 1588 void *input_vec; 1589 1590 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL); 1591 if (!input_vec) { 1592 ret = -ENOMEM; 1593 goto out; 1594 } 1595 1596 memset(output, 0, dlen); 1597 crypto_init_wait(&wait); 1598 sg_init_one(&src, input_vec, ilen); 1599 sg_init_one(&dst, output, dlen); 1600 1601 req = acomp_request_alloc(tfm); 1602 if (!req) { 1603 pr_err("alg: acomp: request alloc failed for %s\n", 1604 algo); 1605 kfree(input_vec); 1606 ret = -ENOMEM; 1607 goto out; 1608 } 1609 1610 acomp_request_set_params(req, &src, &dst, ilen, dlen); 1611 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1612 crypto_req_done, &wait); 1613 1614 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 1615 if (ret) { 1616 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 1617 i + 1, algo, -ret); 1618 kfree(input_vec); 1619 acomp_request_free(req); 1620 goto out; 1621 } 1622 1623 if (req->dlen != dtemplate[i].outlen) { 1624 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 1625 i + 1, algo, req->dlen); 1626 ret = -EINVAL; 1627 kfree(input_vec); 1628 acomp_request_free(req); 1629 goto out; 1630 } 1631 1632 if (memcmp(output, dtemplate[i].output, req->dlen)) { 1633 pr_err("alg: acomp: Decompression test %d failed for %s\n", 1634 i + 1, algo); 1635 hexdump(output, req->dlen); 1636 ret = -EINVAL; 1637 kfree(input_vec); 1638 acomp_request_free(req); 1639 goto out; 1640 } 1641 1642 kfree(input_vec); 1643 acomp_request_free(req); 1644 } 1645 1646 ret = 0; 1647 1648 out: 1649 kfree(decomp_out); 1650 kfree(output); 1651 return ret; 1652 } 1653 1654 static int test_cprng(struct crypto_rng *tfm, 1655 const struct cprng_testvec *template, 1656 unsigned int tcount) 1657 { 1658 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1659 int err = 0, i, j, seedsize; 1660 u8 *seed; 1661 char result[32]; 1662 1663 seedsize = crypto_rng_seedsize(tfm); 1664 1665 seed = kmalloc(seedsize, GFP_KERNEL); 1666 if (!seed) { 1667 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1668 "for %s\n", algo); 1669 return -ENOMEM; 1670 } 1671 1672 for (i = 0; i < tcount; i++) { 1673 memset(result, 0, 32); 1674 1675 memcpy(seed, template[i].v, template[i].vlen); 1676 memcpy(seed + template[i].vlen, template[i].key, 1677 template[i].klen); 1678 memcpy(seed + template[i].vlen + template[i].klen, 1679 template[i].dt, template[i].dtlen); 1680 1681 err = crypto_rng_reset(tfm, seed, seedsize); 1682 if (err) { 1683 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1684 "for %s\n", algo); 1685 goto out; 1686 } 1687 1688 for (j = 0; j < template[i].loops; j++) { 1689 err = crypto_rng_get_bytes(tfm, result, 1690 template[i].rlen); 1691 if (err < 0) { 1692 printk(KERN_ERR "alg: cprng: Failed to obtain " 1693 "the correct amount of random data for " 1694 "%s (requested %d)\n", algo, 1695 template[i].rlen); 1696 goto out; 1697 } 1698 } 1699 1700 err = memcmp(result, template[i].result, 1701 template[i].rlen); 1702 if (err) { 1703 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1704 i, algo); 1705 hexdump(result, template[i].rlen); 1706 err = -EINVAL; 1707 goto out; 1708 } 1709 } 1710 1711 out: 1712 kfree(seed); 1713 return err; 1714 } 1715 1716 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1717 u32 type, u32 mask) 1718 { 1719 struct crypto_aead *tfm; 1720 int err = 0; 1721 1722 tfm = crypto_alloc_aead(driver, type, mask); 1723 if (IS_ERR(tfm)) { 1724 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1725 "%ld\n", driver, PTR_ERR(tfm)); 1726 return PTR_ERR(tfm); 1727 } 1728 1729 if (desc->suite.aead.enc.vecs) { 1730 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1731 desc->suite.aead.enc.count); 1732 if (err) 1733 goto out; 1734 } 1735 1736 if (!err && desc->suite.aead.dec.vecs) 1737 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1738 desc->suite.aead.dec.count); 1739 1740 out: 1741 crypto_free_aead(tfm); 1742 return err; 1743 } 1744 1745 static int alg_test_cipher(const struct alg_test_desc *desc, 1746 const char *driver, u32 type, u32 mask) 1747 { 1748 const struct cipher_test_suite *suite = &desc->suite.cipher; 1749 struct crypto_cipher *tfm; 1750 int err; 1751 1752 tfm = crypto_alloc_cipher(driver, type, mask); 1753 if (IS_ERR(tfm)) { 1754 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1755 "%s: %ld\n", driver, PTR_ERR(tfm)); 1756 return PTR_ERR(tfm); 1757 } 1758 1759 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count); 1760 if (!err) 1761 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count); 1762 1763 crypto_free_cipher(tfm); 1764 return err; 1765 } 1766 1767 static int alg_test_skcipher(const struct alg_test_desc *desc, 1768 const char *driver, u32 type, u32 mask) 1769 { 1770 const struct cipher_test_suite *suite = &desc->suite.cipher; 1771 struct crypto_skcipher *tfm; 1772 int err; 1773 1774 tfm = crypto_alloc_skcipher(driver, type, mask); 1775 if (IS_ERR(tfm)) { 1776 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1777 "%s: %ld\n", driver, PTR_ERR(tfm)); 1778 return PTR_ERR(tfm); 1779 } 1780 1781 err = test_skcipher(tfm, ENCRYPT, suite->vecs, suite->count); 1782 if (!err) 1783 err = test_skcipher(tfm, DECRYPT, suite->vecs, suite->count); 1784 1785 crypto_free_skcipher(tfm); 1786 return err; 1787 } 1788 1789 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1790 u32 type, u32 mask) 1791 { 1792 struct crypto_comp *comp; 1793 struct crypto_acomp *acomp; 1794 int err; 1795 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 1796 1797 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 1798 acomp = crypto_alloc_acomp(driver, type, mask); 1799 if (IS_ERR(acomp)) { 1800 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 1801 driver, PTR_ERR(acomp)); 1802 return PTR_ERR(acomp); 1803 } 1804 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 1805 desc->suite.comp.decomp.vecs, 1806 desc->suite.comp.comp.count, 1807 desc->suite.comp.decomp.count); 1808 crypto_free_acomp(acomp); 1809 } else { 1810 comp = crypto_alloc_comp(driver, type, mask); 1811 if (IS_ERR(comp)) { 1812 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 1813 driver, PTR_ERR(comp)); 1814 return PTR_ERR(comp); 1815 } 1816 1817 err = test_comp(comp, desc->suite.comp.comp.vecs, 1818 desc->suite.comp.decomp.vecs, 1819 desc->suite.comp.comp.count, 1820 desc->suite.comp.decomp.count); 1821 1822 crypto_free_comp(comp); 1823 } 1824 return err; 1825 } 1826 1827 static int __alg_test_hash(const struct hash_testvec *template, 1828 unsigned int tcount, const char *driver, 1829 u32 type, u32 mask) 1830 { 1831 struct crypto_ahash *tfm; 1832 int err; 1833 1834 tfm = crypto_alloc_ahash(driver, type, mask); 1835 if (IS_ERR(tfm)) { 1836 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1837 "%ld\n", driver, PTR_ERR(tfm)); 1838 return PTR_ERR(tfm); 1839 } 1840 1841 err = test_hash(tfm, template, tcount, HASH_TEST_DIGEST); 1842 if (!err) 1843 err = test_hash(tfm, template, tcount, HASH_TEST_FINAL); 1844 if (!err) 1845 err = test_hash(tfm, template, tcount, HASH_TEST_FINUP); 1846 crypto_free_ahash(tfm); 1847 return err; 1848 } 1849 1850 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1851 u32 type, u32 mask) 1852 { 1853 const struct hash_testvec *template = desc->suite.hash.vecs; 1854 unsigned int tcount = desc->suite.hash.count; 1855 unsigned int nr_unkeyed, nr_keyed; 1856 int err; 1857 1858 /* 1859 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests 1860 * first, before setting a key on the tfm. To make this easier, we 1861 * require that the unkeyed test vectors (if any) are listed first. 1862 */ 1863 1864 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) { 1865 if (template[nr_unkeyed].ksize) 1866 break; 1867 } 1868 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) { 1869 if (!template[nr_unkeyed + nr_keyed].ksize) { 1870 pr_err("alg: hash: test vectors for %s out of order, " 1871 "unkeyed ones must come first\n", desc->alg); 1872 return -EINVAL; 1873 } 1874 } 1875 1876 err = 0; 1877 if (nr_unkeyed) { 1878 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask); 1879 template += nr_unkeyed; 1880 } 1881 1882 if (!err && nr_keyed) 1883 err = __alg_test_hash(template, nr_keyed, driver, type, mask); 1884 1885 return err; 1886 } 1887 1888 static int alg_test_crc32c(const struct alg_test_desc *desc, 1889 const char *driver, u32 type, u32 mask) 1890 { 1891 struct crypto_shash *tfm; 1892 u32 val; 1893 int err; 1894 1895 err = alg_test_hash(desc, driver, type, mask); 1896 if (err) 1897 goto out; 1898 1899 tfm = crypto_alloc_shash(driver, type, mask); 1900 if (IS_ERR(tfm)) { 1901 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1902 "%ld\n", driver, PTR_ERR(tfm)); 1903 err = PTR_ERR(tfm); 1904 goto out; 1905 } 1906 1907 do { 1908 SHASH_DESC_ON_STACK(shash, tfm); 1909 u32 *ctx = (u32 *)shash_desc_ctx(shash); 1910 1911 shash->tfm = tfm; 1912 shash->flags = 0; 1913 1914 *ctx = le32_to_cpu(420553207); 1915 err = crypto_shash_final(shash, (u8 *)&val); 1916 if (err) { 1917 printk(KERN_ERR "alg: crc32c: Operation failed for " 1918 "%s: %d\n", driver, err); 1919 break; 1920 } 1921 1922 if (val != ~420553207) { 1923 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1924 "%d\n", driver, val); 1925 err = -EINVAL; 1926 } 1927 } while (0); 1928 1929 crypto_free_shash(tfm); 1930 1931 out: 1932 return err; 1933 } 1934 1935 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1936 u32 type, u32 mask) 1937 { 1938 struct crypto_rng *rng; 1939 int err; 1940 1941 rng = crypto_alloc_rng(driver, type, mask); 1942 if (IS_ERR(rng)) { 1943 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1944 "%ld\n", driver, PTR_ERR(rng)); 1945 return PTR_ERR(rng); 1946 } 1947 1948 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1949 1950 crypto_free_rng(rng); 1951 1952 return err; 1953 } 1954 1955 1956 static int drbg_cavs_test(const struct drbg_testvec *test, int pr, 1957 const char *driver, u32 type, u32 mask) 1958 { 1959 int ret = -EAGAIN; 1960 struct crypto_rng *drng; 1961 struct drbg_test_data test_data; 1962 struct drbg_string addtl, pers, testentropy; 1963 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 1964 1965 if (!buf) 1966 return -ENOMEM; 1967 1968 drng = crypto_alloc_rng(driver, type, mask); 1969 if (IS_ERR(drng)) { 1970 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 1971 "%s\n", driver); 1972 kzfree(buf); 1973 return -ENOMEM; 1974 } 1975 1976 test_data.testentropy = &testentropy; 1977 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 1978 drbg_string_fill(&pers, test->pers, test->perslen); 1979 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 1980 if (ret) { 1981 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 1982 goto outbuf; 1983 } 1984 1985 drbg_string_fill(&addtl, test->addtla, test->addtllen); 1986 if (pr) { 1987 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 1988 ret = crypto_drbg_get_bytes_addtl_test(drng, 1989 buf, test->expectedlen, &addtl, &test_data); 1990 } else { 1991 ret = crypto_drbg_get_bytes_addtl(drng, 1992 buf, test->expectedlen, &addtl); 1993 } 1994 if (ret < 0) { 1995 printk(KERN_ERR "alg: drbg: could not obtain random data for " 1996 "driver %s\n", driver); 1997 goto outbuf; 1998 } 1999 2000 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 2001 if (pr) { 2002 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 2003 ret = crypto_drbg_get_bytes_addtl_test(drng, 2004 buf, test->expectedlen, &addtl, &test_data); 2005 } else { 2006 ret = crypto_drbg_get_bytes_addtl(drng, 2007 buf, test->expectedlen, &addtl); 2008 } 2009 if (ret < 0) { 2010 printk(KERN_ERR "alg: drbg: could not obtain random data for " 2011 "driver %s\n", driver); 2012 goto outbuf; 2013 } 2014 2015 ret = memcmp(test->expected, buf, test->expectedlen); 2016 2017 outbuf: 2018 crypto_free_rng(drng); 2019 kzfree(buf); 2020 return ret; 2021 } 2022 2023 2024 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 2025 u32 type, u32 mask) 2026 { 2027 int err = 0; 2028 int pr = 0; 2029 int i = 0; 2030 const struct drbg_testvec *template = desc->suite.drbg.vecs; 2031 unsigned int tcount = desc->suite.drbg.count; 2032 2033 if (0 == memcmp(driver, "drbg_pr_", 8)) 2034 pr = 1; 2035 2036 for (i = 0; i < tcount; i++) { 2037 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 2038 if (err) { 2039 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 2040 i, driver); 2041 err = -EINVAL; 2042 break; 2043 } 2044 } 2045 return err; 2046 2047 } 2048 2049 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec, 2050 const char *alg) 2051 { 2052 struct kpp_request *req; 2053 void *input_buf = NULL; 2054 void *output_buf = NULL; 2055 void *a_public = NULL; 2056 void *a_ss = NULL; 2057 void *shared_secret = NULL; 2058 struct crypto_wait wait; 2059 unsigned int out_len_max; 2060 int err = -ENOMEM; 2061 struct scatterlist src, dst; 2062 2063 req = kpp_request_alloc(tfm, GFP_KERNEL); 2064 if (!req) 2065 return err; 2066 2067 crypto_init_wait(&wait); 2068 2069 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 2070 if (err < 0) 2071 goto free_req; 2072 2073 out_len_max = crypto_kpp_maxsize(tfm); 2074 output_buf = kzalloc(out_len_max, GFP_KERNEL); 2075 if (!output_buf) { 2076 err = -ENOMEM; 2077 goto free_req; 2078 } 2079 2080 /* Use appropriate parameter as base */ 2081 kpp_request_set_input(req, NULL, 0); 2082 sg_init_one(&dst, output_buf, out_len_max); 2083 kpp_request_set_output(req, &dst, out_len_max); 2084 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2085 crypto_req_done, &wait); 2086 2087 /* Compute party A's public key */ 2088 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait); 2089 if (err) { 2090 pr_err("alg: %s: Party A: generate public key test failed. err %d\n", 2091 alg, err); 2092 goto free_output; 2093 } 2094 2095 if (vec->genkey) { 2096 /* Save party A's public key */ 2097 a_public = kzalloc(out_len_max, GFP_KERNEL); 2098 if (!a_public) { 2099 err = -ENOMEM; 2100 goto free_output; 2101 } 2102 memcpy(a_public, sg_virt(req->dst), out_len_max); 2103 } else { 2104 /* Verify calculated public key */ 2105 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 2106 vec->expected_a_public_size)) { 2107 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n", 2108 alg); 2109 err = -EINVAL; 2110 goto free_output; 2111 } 2112 } 2113 2114 /* Calculate shared secret key by using counter part (b) public key. */ 2115 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL); 2116 if (!input_buf) { 2117 err = -ENOMEM; 2118 goto free_output; 2119 } 2120 2121 memcpy(input_buf, vec->b_public, vec->b_public_size); 2122 sg_init_one(&src, input_buf, vec->b_public_size); 2123 sg_init_one(&dst, output_buf, out_len_max); 2124 kpp_request_set_input(req, &src, vec->b_public_size); 2125 kpp_request_set_output(req, &dst, out_len_max); 2126 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2127 crypto_req_done, &wait); 2128 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait); 2129 if (err) { 2130 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n", 2131 alg, err); 2132 goto free_all; 2133 } 2134 2135 if (vec->genkey) { 2136 /* Save the shared secret obtained by party A */ 2137 a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL); 2138 if (!a_ss) { 2139 err = -ENOMEM; 2140 goto free_all; 2141 } 2142 memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size); 2143 2144 /* 2145 * Calculate party B's shared secret by using party A's 2146 * public key. 2147 */ 2148 err = crypto_kpp_set_secret(tfm, vec->b_secret, 2149 vec->b_secret_size); 2150 if (err < 0) 2151 goto free_all; 2152 2153 sg_init_one(&src, a_public, vec->expected_a_public_size); 2154 sg_init_one(&dst, output_buf, out_len_max); 2155 kpp_request_set_input(req, &src, vec->expected_a_public_size); 2156 kpp_request_set_output(req, &dst, out_len_max); 2157 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2158 crypto_req_done, &wait); 2159 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), 2160 &wait); 2161 if (err) { 2162 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n", 2163 alg, err); 2164 goto free_all; 2165 } 2166 2167 shared_secret = a_ss; 2168 } else { 2169 shared_secret = (void *)vec->expected_ss; 2170 } 2171 2172 /* 2173 * verify shared secret from which the user will derive 2174 * secret key by executing whatever hash it has chosen 2175 */ 2176 if (memcmp(shared_secret, sg_virt(req->dst), 2177 vec->expected_ss_size)) { 2178 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 2179 alg); 2180 err = -EINVAL; 2181 } 2182 2183 free_all: 2184 kfree(a_ss); 2185 kfree(input_buf); 2186 free_output: 2187 kfree(a_public); 2188 kfree(output_buf); 2189 free_req: 2190 kpp_request_free(req); 2191 return err; 2192 } 2193 2194 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 2195 const struct kpp_testvec *vecs, unsigned int tcount) 2196 { 2197 int ret, i; 2198 2199 for (i = 0; i < tcount; i++) { 2200 ret = do_test_kpp(tfm, vecs++, alg); 2201 if (ret) { 2202 pr_err("alg: %s: test failed on vector %d, err=%d\n", 2203 alg, i + 1, ret); 2204 return ret; 2205 } 2206 } 2207 return 0; 2208 } 2209 2210 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 2211 u32 type, u32 mask) 2212 { 2213 struct crypto_kpp *tfm; 2214 int err = 0; 2215 2216 tfm = crypto_alloc_kpp(driver, type, mask); 2217 if (IS_ERR(tfm)) { 2218 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 2219 driver, PTR_ERR(tfm)); 2220 return PTR_ERR(tfm); 2221 } 2222 if (desc->suite.kpp.vecs) 2223 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 2224 desc->suite.kpp.count); 2225 2226 crypto_free_kpp(tfm); 2227 return err; 2228 } 2229 2230 static int test_akcipher_one(struct crypto_akcipher *tfm, 2231 const struct akcipher_testvec *vecs) 2232 { 2233 char *xbuf[XBUFSIZE]; 2234 struct akcipher_request *req; 2235 void *outbuf_enc = NULL; 2236 void *outbuf_dec = NULL; 2237 struct crypto_wait wait; 2238 unsigned int out_len_max, out_len = 0; 2239 int err = -ENOMEM; 2240 struct scatterlist src, dst, src_tab[2]; 2241 2242 if (testmgr_alloc_buf(xbuf)) 2243 return err; 2244 2245 req = akcipher_request_alloc(tfm, GFP_KERNEL); 2246 if (!req) 2247 goto free_xbuf; 2248 2249 crypto_init_wait(&wait); 2250 2251 if (vecs->public_key_vec) 2252 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2253 vecs->key_len); 2254 else 2255 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 2256 vecs->key_len); 2257 if (err) 2258 goto free_req; 2259 2260 err = -ENOMEM; 2261 out_len_max = crypto_akcipher_maxsize(tfm); 2262 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 2263 if (!outbuf_enc) 2264 goto free_req; 2265 2266 if (WARN_ON(vecs->m_size > PAGE_SIZE)) 2267 goto free_all; 2268 2269 memcpy(xbuf[0], vecs->m, vecs->m_size); 2270 2271 sg_init_table(src_tab, 2); 2272 sg_set_buf(&src_tab[0], xbuf[0], 8); 2273 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8); 2274 sg_init_one(&dst, outbuf_enc, out_len_max); 2275 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size, 2276 out_len_max); 2277 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2278 crypto_req_done, &wait); 2279 2280 err = crypto_wait_req(vecs->siggen_sigver_test ? 2281 /* Run asymmetric signature generation */ 2282 crypto_akcipher_sign(req) : 2283 /* Run asymmetric encrypt */ 2284 crypto_akcipher_encrypt(req), &wait); 2285 if (err) { 2286 pr_err("alg: akcipher: encrypt test failed. err %d\n", err); 2287 goto free_all; 2288 } 2289 if (req->dst_len != vecs->c_size) { 2290 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n"); 2291 err = -EINVAL; 2292 goto free_all; 2293 } 2294 /* verify that encrypted message is equal to expected */ 2295 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) { 2296 pr_err("alg: akcipher: encrypt test failed. Invalid output\n"); 2297 hexdump(outbuf_enc, vecs->c_size); 2298 err = -EINVAL; 2299 goto free_all; 2300 } 2301 /* Don't invoke decrypt for vectors with public key */ 2302 if (vecs->public_key_vec) { 2303 err = 0; 2304 goto free_all; 2305 } 2306 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 2307 if (!outbuf_dec) { 2308 err = -ENOMEM; 2309 goto free_all; 2310 } 2311 2312 if (WARN_ON(vecs->c_size > PAGE_SIZE)) 2313 goto free_all; 2314 2315 memcpy(xbuf[0], vecs->c, vecs->c_size); 2316 2317 sg_init_one(&src, xbuf[0], vecs->c_size); 2318 sg_init_one(&dst, outbuf_dec, out_len_max); 2319 crypto_init_wait(&wait); 2320 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max); 2321 2322 err = crypto_wait_req(vecs->siggen_sigver_test ? 2323 /* Run asymmetric signature verification */ 2324 crypto_akcipher_verify(req) : 2325 /* Run asymmetric decrypt */ 2326 crypto_akcipher_decrypt(req), &wait); 2327 if (err) { 2328 pr_err("alg: akcipher: decrypt test failed. err %d\n", err); 2329 goto free_all; 2330 } 2331 out_len = req->dst_len; 2332 if (out_len < vecs->m_size) { 2333 pr_err("alg: akcipher: decrypt test failed. " 2334 "Invalid output len %u\n", out_len); 2335 err = -EINVAL; 2336 goto free_all; 2337 } 2338 /* verify that decrypted message is equal to the original msg */ 2339 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) || 2340 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size, 2341 vecs->m_size)) { 2342 pr_err("alg: akcipher: decrypt test failed. Invalid output\n"); 2343 hexdump(outbuf_dec, out_len); 2344 err = -EINVAL; 2345 } 2346 free_all: 2347 kfree(outbuf_dec); 2348 kfree(outbuf_enc); 2349 free_req: 2350 akcipher_request_free(req); 2351 free_xbuf: 2352 testmgr_free_buf(xbuf); 2353 return err; 2354 } 2355 2356 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 2357 const struct akcipher_testvec *vecs, 2358 unsigned int tcount) 2359 { 2360 const char *algo = 2361 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 2362 int ret, i; 2363 2364 for (i = 0; i < tcount; i++) { 2365 ret = test_akcipher_one(tfm, vecs++); 2366 if (!ret) 2367 continue; 2368 2369 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 2370 i + 1, algo, ret); 2371 return ret; 2372 } 2373 return 0; 2374 } 2375 2376 static int alg_test_akcipher(const struct alg_test_desc *desc, 2377 const char *driver, u32 type, u32 mask) 2378 { 2379 struct crypto_akcipher *tfm; 2380 int err = 0; 2381 2382 tfm = crypto_alloc_akcipher(driver, type, mask); 2383 if (IS_ERR(tfm)) { 2384 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 2385 driver, PTR_ERR(tfm)); 2386 return PTR_ERR(tfm); 2387 } 2388 if (desc->suite.akcipher.vecs) 2389 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 2390 desc->suite.akcipher.count); 2391 2392 crypto_free_akcipher(tfm); 2393 return err; 2394 } 2395 2396 static int alg_test_null(const struct alg_test_desc *desc, 2397 const char *driver, u32 type, u32 mask) 2398 { 2399 return 0; 2400 } 2401 2402 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) } 2403 2404 /* Please keep this list sorted by algorithm name. */ 2405 static const struct alg_test_desc alg_test_descs[] = { 2406 { 2407 .alg = "aegis128", 2408 .test = alg_test_aead, 2409 .suite = { 2410 .aead = { 2411 .enc = __VECS(aegis128_enc_tv_template), 2412 .dec = __VECS(aegis128_dec_tv_template), 2413 } 2414 } 2415 }, { 2416 .alg = "aegis128l", 2417 .test = alg_test_aead, 2418 .suite = { 2419 .aead = { 2420 .enc = __VECS(aegis128l_enc_tv_template), 2421 .dec = __VECS(aegis128l_dec_tv_template), 2422 } 2423 } 2424 }, { 2425 .alg = "aegis256", 2426 .test = alg_test_aead, 2427 .suite = { 2428 .aead = { 2429 .enc = __VECS(aegis256_enc_tv_template), 2430 .dec = __VECS(aegis256_dec_tv_template), 2431 } 2432 } 2433 }, { 2434 .alg = "ansi_cprng", 2435 .test = alg_test_cprng, 2436 .suite = { 2437 .cprng = __VECS(ansi_cprng_aes_tv_template) 2438 } 2439 }, { 2440 .alg = "authenc(hmac(md5),ecb(cipher_null))", 2441 .test = alg_test_aead, 2442 .suite = { 2443 .aead = { 2444 .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template), 2445 .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template) 2446 } 2447 } 2448 }, { 2449 .alg = "authenc(hmac(sha1),cbc(aes))", 2450 .test = alg_test_aead, 2451 .fips_allowed = 1, 2452 .suite = { 2453 .aead = { 2454 .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp) 2455 } 2456 } 2457 }, { 2458 .alg = "authenc(hmac(sha1),cbc(des))", 2459 .test = alg_test_aead, 2460 .suite = { 2461 .aead = { 2462 .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp) 2463 } 2464 } 2465 }, { 2466 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 2467 .test = alg_test_aead, 2468 .fips_allowed = 1, 2469 .suite = { 2470 .aead = { 2471 .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp) 2472 } 2473 } 2474 }, { 2475 .alg = "authenc(hmac(sha1),ctr(aes))", 2476 .test = alg_test_null, 2477 .fips_allowed = 1, 2478 }, { 2479 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 2480 .test = alg_test_aead, 2481 .suite = { 2482 .aead = { 2483 .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp), 2484 .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp) 2485 } 2486 } 2487 }, { 2488 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 2489 .test = alg_test_null, 2490 .fips_allowed = 1, 2491 }, { 2492 .alg = "authenc(hmac(sha224),cbc(des))", 2493 .test = alg_test_aead, 2494 .suite = { 2495 .aead = { 2496 .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp) 2497 } 2498 } 2499 }, { 2500 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 2501 .test = alg_test_aead, 2502 .fips_allowed = 1, 2503 .suite = { 2504 .aead = { 2505 .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp) 2506 } 2507 } 2508 }, { 2509 .alg = "authenc(hmac(sha256),cbc(aes))", 2510 .test = alg_test_aead, 2511 .fips_allowed = 1, 2512 .suite = { 2513 .aead = { 2514 .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp) 2515 } 2516 } 2517 }, { 2518 .alg = "authenc(hmac(sha256),cbc(des))", 2519 .test = alg_test_aead, 2520 .suite = { 2521 .aead = { 2522 .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp) 2523 } 2524 } 2525 }, { 2526 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 2527 .test = alg_test_aead, 2528 .fips_allowed = 1, 2529 .suite = { 2530 .aead = { 2531 .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp) 2532 } 2533 } 2534 }, { 2535 .alg = "authenc(hmac(sha256),ctr(aes))", 2536 .test = alg_test_null, 2537 .fips_allowed = 1, 2538 }, { 2539 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 2540 .test = alg_test_null, 2541 .fips_allowed = 1, 2542 }, { 2543 .alg = "authenc(hmac(sha384),cbc(des))", 2544 .test = alg_test_aead, 2545 .suite = { 2546 .aead = { 2547 .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp) 2548 } 2549 } 2550 }, { 2551 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 2552 .test = alg_test_aead, 2553 .fips_allowed = 1, 2554 .suite = { 2555 .aead = { 2556 .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp) 2557 } 2558 } 2559 }, { 2560 .alg = "authenc(hmac(sha384),ctr(aes))", 2561 .test = alg_test_null, 2562 .fips_allowed = 1, 2563 }, { 2564 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 2565 .test = alg_test_null, 2566 .fips_allowed = 1, 2567 }, { 2568 .alg = "authenc(hmac(sha512),cbc(aes))", 2569 .fips_allowed = 1, 2570 .test = alg_test_aead, 2571 .suite = { 2572 .aead = { 2573 .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp) 2574 } 2575 } 2576 }, { 2577 .alg = "authenc(hmac(sha512),cbc(des))", 2578 .test = alg_test_aead, 2579 .suite = { 2580 .aead = { 2581 .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp) 2582 } 2583 } 2584 }, { 2585 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 2586 .test = alg_test_aead, 2587 .fips_allowed = 1, 2588 .suite = { 2589 .aead = { 2590 .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp) 2591 } 2592 } 2593 }, { 2594 .alg = "authenc(hmac(sha512),ctr(aes))", 2595 .test = alg_test_null, 2596 .fips_allowed = 1, 2597 }, { 2598 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 2599 .test = alg_test_null, 2600 .fips_allowed = 1, 2601 }, { 2602 .alg = "cbc(aes)", 2603 .test = alg_test_skcipher, 2604 .fips_allowed = 1, 2605 .suite = { 2606 .cipher = __VECS(aes_cbc_tv_template) 2607 }, 2608 }, { 2609 .alg = "cbc(anubis)", 2610 .test = alg_test_skcipher, 2611 .suite = { 2612 .cipher = __VECS(anubis_cbc_tv_template) 2613 }, 2614 }, { 2615 .alg = "cbc(blowfish)", 2616 .test = alg_test_skcipher, 2617 .suite = { 2618 .cipher = __VECS(bf_cbc_tv_template) 2619 }, 2620 }, { 2621 .alg = "cbc(camellia)", 2622 .test = alg_test_skcipher, 2623 .suite = { 2624 .cipher = __VECS(camellia_cbc_tv_template) 2625 }, 2626 }, { 2627 .alg = "cbc(cast5)", 2628 .test = alg_test_skcipher, 2629 .suite = { 2630 .cipher = __VECS(cast5_cbc_tv_template) 2631 }, 2632 }, { 2633 .alg = "cbc(cast6)", 2634 .test = alg_test_skcipher, 2635 .suite = { 2636 .cipher = __VECS(cast6_cbc_tv_template) 2637 }, 2638 }, { 2639 .alg = "cbc(des)", 2640 .test = alg_test_skcipher, 2641 .suite = { 2642 .cipher = __VECS(des_cbc_tv_template) 2643 }, 2644 }, { 2645 .alg = "cbc(des3_ede)", 2646 .test = alg_test_skcipher, 2647 .fips_allowed = 1, 2648 .suite = { 2649 .cipher = __VECS(des3_ede_cbc_tv_template) 2650 }, 2651 }, { 2652 /* Same as cbc(aes) except the key is stored in 2653 * hardware secure memory which we reference by index 2654 */ 2655 .alg = "cbc(paes)", 2656 .test = alg_test_null, 2657 .fips_allowed = 1, 2658 }, { 2659 .alg = "cbc(serpent)", 2660 .test = alg_test_skcipher, 2661 .suite = { 2662 .cipher = __VECS(serpent_cbc_tv_template) 2663 }, 2664 }, { 2665 .alg = "cbc(twofish)", 2666 .test = alg_test_skcipher, 2667 .suite = { 2668 .cipher = __VECS(tf_cbc_tv_template) 2669 }, 2670 }, { 2671 .alg = "cbcmac(aes)", 2672 .fips_allowed = 1, 2673 .test = alg_test_hash, 2674 .suite = { 2675 .hash = __VECS(aes_cbcmac_tv_template) 2676 } 2677 }, { 2678 .alg = "ccm(aes)", 2679 .test = alg_test_aead, 2680 .fips_allowed = 1, 2681 .suite = { 2682 .aead = { 2683 .enc = __VECS(aes_ccm_enc_tv_template), 2684 .dec = __VECS(aes_ccm_dec_tv_template) 2685 } 2686 } 2687 }, { 2688 .alg = "chacha20", 2689 .test = alg_test_skcipher, 2690 .suite = { 2691 .cipher = __VECS(chacha20_tv_template) 2692 }, 2693 }, { 2694 .alg = "cmac(aes)", 2695 .fips_allowed = 1, 2696 .test = alg_test_hash, 2697 .suite = { 2698 .hash = __VECS(aes_cmac128_tv_template) 2699 } 2700 }, { 2701 .alg = "cmac(des3_ede)", 2702 .fips_allowed = 1, 2703 .test = alg_test_hash, 2704 .suite = { 2705 .hash = __VECS(des3_ede_cmac64_tv_template) 2706 } 2707 }, { 2708 .alg = "compress_null", 2709 .test = alg_test_null, 2710 }, { 2711 .alg = "crc32", 2712 .test = alg_test_hash, 2713 .suite = { 2714 .hash = __VECS(crc32_tv_template) 2715 } 2716 }, { 2717 .alg = "crc32c", 2718 .test = alg_test_crc32c, 2719 .fips_allowed = 1, 2720 .suite = { 2721 .hash = __VECS(crc32c_tv_template) 2722 } 2723 }, { 2724 .alg = "crct10dif", 2725 .test = alg_test_hash, 2726 .fips_allowed = 1, 2727 .suite = { 2728 .hash = __VECS(crct10dif_tv_template) 2729 } 2730 }, { 2731 .alg = "ctr(aes)", 2732 .test = alg_test_skcipher, 2733 .fips_allowed = 1, 2734 .suite = { 2735 .cipher = __VECS(aes_ctr_tv_template) 2736 } 2737 }, { 2738 .alg = "ctr(blowfish)", 2739 .test = alg_test_skcipher, 2740 .suite = { 2741 .cipher = __VECS(bf_ctr_tv_template) 2742 } 2743 }, { 2744 .alg = "ctr(camellia)", 2745 .test = alg_test_skcipher, 2746 .suite = { 2747 .cipher = __VECS(camellia_ctr_tv_template) 2748 } 2749 }, { 2750 .alg = "ctr(cast5)", 2751 .test = alg_test_skcipher, 2752 .suite = { 2753 .cipher = __VECS(cast5_ctr_tv_template) 2754 } 2755 }, { 2756 .alg = "ctr(cast6)", 2757 .test = alg_test_skcipher, 2758 .suite = { 2759 .cipher = __VECS(cast6_ctr_tv_template) 2760 } 2761 }, { 2762 .alg = "ctr(des)", 2763 .test = alg_test_skcipher, 2764 .suite = { 2765 .cipher = __VECS(des_ctr_tv_template) 2766 } 2767 }, { 2768 .alg = "ctr(des3_ede)", 2769 .test = alg_test_skcipher, 2770 .fips_allowed = 1, 2771 .suite = { 2772 .cipher = __VECS(des3_ede_ctr_tv_template) 2773 } 2774 }, { 2775 /* Same as ctr(aes) except the key is stored in 2776 * hardware secure memory which we reference by index 2777 */ 2778 .alg = "ctr(paes)", 2779 .test = alg_test_null, 2780 .fips_allowed = 1, 2781 }, { 2782 .alg = "ctr(serpent)", 2783 .test = alg_test_skcipher, 2784 .suite = { 2785 .cipher = __VECS(serpent_ctr_tv_template) 2786 } 2787 }, { 2788 .alg = "ctr(twofish)", 2789 .test = alg_test_skcipher, 2790 .suite = { 2791 .cipher = __VECS(tf_ctr_tv_template) 2792 } 2793 }, { 2794 .alg = "cts(cbc(aes))", 2795 .test = alg_test_skcipher, 2796 .suite = { 2797 .cipher = __VECS(cts_mode_tv_template) 2798 } 2799 }, { 2800 .alg = "deflate", 2801 .test = alg_test_comp, 2802 .fips_allowed = 1, 2803 .suite = { 2804 .comp = { 2805 .comp = __VECS(deflate_comp_tv_template), 2806 .decomp = __VECS(deflate_decomp_tv_template) 2807 } 2808 } 2809 }, { 2810 .alg = "dh", 2811 .test = alg_test_kpp, 2812 .fips_allowed = 1, 2813 .suite = { 2814 .kpp = __VECS(dh_tv_template) 2815 } 2816 }, { 2817 .alg = "digest_null", 2818 .test = alg_test_null, 2819 }, { 2820 .alg = "drbg_nopr_ctr_aes128", 2821 .test = alg_test_drbg, 2822 .fips_allowed = 1, 2823 .suite = { 2824 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template) 2825 } 2826 }, { 2827 .alg = "drbg_nopr_ctr_aes192", 2828 .test = alg_test_drbg, 2829 .fips_allowed = 1, 2830 .suite = { 2831 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template) 2832 } 2833 }, { 2834 .alg = "drbg_nopr_ctr_aes256", 2835 .test = alg_test_drbg, 2836 .fips_allowed = 1, 2837 .suite = { 2838 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template) 2839 } 2840 }, { 2841 /* 2842 * There is no need to specifically test the DRBG with every 2843 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 2844 */ 2845 .alg = "drbg_nopr_hmac_sha1", 2846 .fips_allowed = 1, 2847 .test = alg_test_null, 2848 }, { 2849 .alg = "drbg_nopr_hmac_sha256", 2850 .test = alg_test_drbg, 2851 .fips_allowed = 1, 2852 .suite = { 2853 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template) 2854 } 2855 }, { 2856 /* covered by drbg_nopr_hmac_sha256 test */ 2857 .alg = "drbg_nopr_hmac_sha384", 2858 .fips_allowed = 1, 2859 .test = alg_test_null, 2860 }, { 2861 .alg = "drbg_nopr_hmac_sha512", 2862 .test = alg_test_null, 2863 .fips_allowed = 1, 2864 }, { 2865 .alg = "drbg_nopr_sha1", 2866 .fips_allowed = 1, 2867 .test = alg_test_null, 2868 }, { 2869 .alg = "drbg_nopr_sha256", 2870 .test = alg_test_drbg, 2871 .fips_allowed = 1, 2872 .suite = { 2873 .drbg = __VECS(drbg_nopr_sha256_tv_template) 2874 } 2875 }, { 2876 /* covered by drbg_nopr_sha256 test */ 2877 .alg = "drbg_nopr_sha384", 2878 .fips_allowed = 1, 2879 .test = alg_test_null, 2880 }, { 2881 .alg = "drbg_nopr_sha512", 2882 .fips_allowed = 1, 2883 .test = alg_test_null, 2884 }, { 2885 .alg = "drbg_pr_ctr_aes128", 2886 .test = alg_test_drbg, 2887 .fips_allowed = 1, 2888 .suite = { 2889 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template) 2890 } 2891 }, { 2892 /* covered by drbg_pr_ctr_aes128 test */ 2893 .alg = "drbg_pr_ctr_aes192", 2894 .fips_allowed = 1, 2895 .test = alg_test_null, 2896 }, { 2897 .alg = "drbg_pr_ctr_aes256", 2898 .fips_allowed = 1, 2899 .test = alg_test_null, 2900 }, { 2901 .alg = "drbg_pr_hmac_sha1", 2902 .fips_allowed = 1, 2903 .test = alg_test_null, 2904 }, { 2905 .alg = "drbg_pr_hmac_sha256", 2906 .test = alg_test_drbg, 2907 .fips_allowed = 1, 2908 .suite = { 2909 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template) 2910 } 2911 }, { 2912 /* covered by drbg_pr_hmac_sha256 test */ 2913 .alg = "drbg_pr_hmac_sha384", 2914 .fips_allowed = 1, 2915 .test = alg_test_null, 2916 }, { 2917 .alg = "drbg_pr_hmac_sha512", 2918 .test = alg_test_null, 2919 .fips_allowed = 1, 2920 }, { 2921 .alg = "drbg_pr_sha1", 2922 .fips_allowed = 1, 2923 .test = alg_test_null, 2924 }, { 2925 .alg = "drbg_pr_sha256", 2926 .test = alg_test_drbg, 2927 .fips_allowed = 1, 2928 .suite = { 2929 .drbg = __VECS(drbg_pr_sha256_tv_template) 2930 } 2931 }, { 2932 /* covered by drbg_pr_sha256 test */ 2933 .alg = "drbg_pr_sha384", 2934 .fips_allowed = 1, 2935 .test = alg_test_null, 2936 }, { 2937 .alg = "drbg_pr_sha512", 2938 .fips_allowed = 1, 2939 .test = alg_test_null, 2940 }, { 2941 .alg = "ecb(aes)", 2942 .test = alg_test_skcipher, 2943 .fips_allowed = 1, 2944 .suite = { 2945 .cipher = __VECS(aes_tv_template) 2946 } 2947 }, { 2948 .alg = "ecb(anubis)", 2949 .test = alg_test_skcipher, 2950 .suite = { 2951 .cipher = __VECS(anubis_tv_template) 2952 } 2953 }, { 2954 .alg = "ecb(arc4)", 2955 .test = alg_test_skcipher, 2956 .suite = { 2957 .cipher = __VECS(arc4_tv_template) 2958 } 2959 }, { 2960 .alg = "ecb(blowfish)", 2961 .test = alg_test_skcipher, 2962 .suite = { 2963 .cipher = __VECS(bf_tv_template) 2964 } 2965 }, { 2966 .alg = "ecb(camellia)", 2967 .test = alg_test_skcipher, 2968 .suite = { 2969 .cipher = __VECS(camellia_tv_template) 2970 } 2971 }, { 2972 .alg = "ecb(cast5)", 2973 .test = alg_test_skcipher, 2974 .suite = { 2975 .cipher = __VECS(cast5_tv_template) 2976 } 2977 }, { 2978 .alg = "ecb(cast6)", 2979 .test = alg_test_skcipher, 2980 .suite = { 2981 .cipher = __VECS(cast6_tv_template) 2982 } 2983 }, { 2984 .alg = "ecb(cipher_null)", 2985 .test = alg_test_null, 2986 .fips_allowed = 1, 2987 }, { 2988 .alg = "ecb(des)", 2989 .test = alg_test_skcipher, 2990 .suite = { 2991 .cipher = __VECS(des_tv_template) 2992 } 2993 }, { 2994 .alg = "ecb(des3_ede)", 2995 .test = alg_test_skcipher, 2996 .fips_allowed = 1, 2997 .suite = { 2998 .cipher = __VECS(des3_ede_tv_template) 2999 } 3000 }, { 3001 .alg = "ecb(fcrypt)", 3002 .test = alg_test_skcipher, 3003 .suite = { 3004 .cipher = { 3005 .vecs = fcrypt_pcbc_tv_template, 3006 .count = 1 3007 } 3008 } 3009 }, { 3010 .alg = "ecb(khazad)", 3011 .test = alg_test_skcipher, 3012 .suite = { 3013 .cipher = __VECS(khazad_tv_template) 3014 } 3015 }, { 3016 /* Same as ecb(aes) except the key is stored in 3017 * hardware secure memory which we reference by index 3018 */ 3019 .alg = "ecb(paes)", 3020 .test = alg_test_null, 3021 .fips_allowed = 1, 3022 }, { 3023 .alg = "ecb(seed)", 3024 .test = alg_test_skcipher, 3025 .suite = { 3026 .cipher = __VECS(seed_tv_template) 3027 } 3028 }, { 3029 .alg = "ecb(serpent)", 3030 .test = alg_test_skcipher, 3031 .suite = { 3032 .cipher = __VECS(serpent_tv_template) 3033 } 3034 }, { 3035 .alg = "ecb(sm4)", 3036 .test = alg_test_skcipher, 3037 .suite = { 3038 .cipher = __VECS(sm4_tv_template) 3039 } 3040 }, { 3041 .alg = "ecb(speck128)", 3042 .test = alg_test_skcipher, 3043 .suite = { 3044 .cipher = __VECS(speck128_tv_template) 3045 } 3046 }, { 3047 .alg = "ecb(speck64)", 3048 .test = alg_test_skcipher, 3049 .suite = { 3050 .cipher = __VECS(speck64_tv_template) 3051 } 3052 }, { 3053 .alg = "ecb(tea)", 3054 .test = alg_test_skcipher, 3055 .suite = { 3056 .cipher = __VECS(tea_tv_template) 3057 } 3058 }, { 3059 .alg = "ecb(tnepres)", 3060 .test = alg_test_skcipher, 3061 .suite = { 3062 .cipher = __VECS(tnepres_tv_template) 3063 } 3064 }, { 3065 .alg = "ecb(twofish)", 3066 .test = alg_test_skcipher, 3067 .suite = { 3068 .cipher = __VECS(tf_tv_template) 3069 } 3070 }, { 3071 .alg = "ecb(xeta)", 3072 .test = alg_test_skcipher, 3073 .suite = { 3074 .cipher = __VECS(xeta_tv_template) 3075 } 3076 }, { 3077 .alg = "ecb(xtea)", 3078 .test = alg_test_skcipher, 3079 .suite = { 3080 .cipher = __VECS(xtea_tv_template) 3081 } 3082 }, { 3083 .alg = "ecdh", 3084 .test = alg_test_kpp, 3085 .fips_allowed = 1, 3086 .suite = { 3087 .kpp = __VECS(ecdh_tv_template) 3088 } 3089 }, { 3090 .alg = "gcm(aes)", 3091 .test = alg_test_aead, 3092 .fips_allowed = 1, 3093 .suite = { 3094 .aead = { 3095 .enc = __VECS(aes_gcm_enc_tv_template), 3096 .dec = __VECS(aes_gcm_dec_tv_template) 3097 } 3098 } 3099 }, { 3100 .alg = "ghash", 3101 .test = alg_test_hash, 3102 .fips_allowed = 1, 3103 .suite = { 3104 .hash = __VECS(ghash_tv_template) 3105 } 3106 }, { 3107 .alg = "hmac(md5)", 3108 .test = alg_test_hash, 3109 .suite = { 3110 .hash = __VECS(hmac_md5_tv_template) 3111 } 3112 }, { 3113 .alg = "hmac(rmd128)", 3114 .test = alg_test_hash, 3115 .suite = { 3116 .hash = __VECS(hmac_rmd128_tv_template) 3117 } 3118 }, { 3119 .alg = "hmac(rmd160)", 3120 .test = alg_test_hash, 3121 .suite = { 3122 .hash = __VECS(hmac_rmd160_tv_template) 3123 } 3124 }, { 3125 .alg = "hmac(sha1)", 3126 .test = alg_test_hash, 3127 .fips_allowed = 1, 3128 .suite = { 3129 .hash = __VECS(hmac_sha1_tv_template) 3130 } 3131 }, { 3132 .alg = "hmac(sha224)", 3133 .test = alg_test_hash, 3134 .fips_allowed = 1, 3135 .suite = { 3136 .hash = __VECS(hmac_sha224_tv_template) 3137 } 3138 }, { 3139 .alg = "hmac(sha256)", 3140 .test = alg_test_hash, 3141 .fips_allowed = 1, 3142 .suite = { 3143 .hash = __VECS(hmac_sha256_tv_template) 3144 } 3145 }, { 3146 .alg = "hmac(sha3-224)", 3147 .test = alg_test_hash, 3148 .fips_allowed = 1, 3149 .suite = { 3150 .hash = __VECS(hmac_sha3_224_tv_template) 3151 } 3152 }, { 3153 .alg = "hmac(sha3-256)", 3154 .test = alg_test_hash, 3155 .fips_allowed = 1, 3156 .suite = { 3157 .hash = __VECS(hmac_sha3_256_tv_template) 3158 } 3159 }, { 3160 .alg = "hmac(sha3-384)", 3161 .test = alg_test_hash, 3162 .fips_allowed = 1, 3163 .suite = { 3164 .hash = __VECS(hmac_sha3_384_tv_template) 3165 } 3166 }, { 3167 .alg = "hmac(sha3-512)", 3168 .test = alg_test_hash, 3169 .fips_allowed = 1, 3170 .suite = { 3171 .hash = __VECS(hmac_sha3_512_tv_template) 3172 } 3173 }, { 3174 .alg = "hmac(sha384)", 3175 .test = alg_test_hash, 3176 .fips_allowed = 1, 3177 .suite = { 3178 .hash = __VECS(hmac_sha384_tv_template) 3179 } 3180 }, { 3181 .alg = "hmac(sha512)", 3182 .test = alg_test_hash, 3183 .fips_allowed = 1, 3184 .suite = { 3185 .hash = __VECS(hmac_sha512_tv_template) 3186 } 3187 }, { 3188 .alg = "jitterentropy_rng", 3189 .fips_allowed = 1, 3190 .test = alg_test_null, 3191 }, { 3192 .alg = "kw(aes)", 3193 .test = alg_test_skcipher, 3194 .fips_allowed = 1, 3195 .suite = { 3196 .cipher = __VECS(aes_kw_tv_template) 3197 } 3198 }, { 3199 .alg = "lrw(aes)", 3200 .test = alg_test_skcipher, 3201 .suite = { 3202 .cipher = __VECS(aes_lrw_tv_template) 3203 } 3204 }, { 3205 .alg = "lrw(camellia)", 3206 .test = alg_test_skcipher, 3207 .suite = { 3208 .cipher = __VECS(camellia_lrw_tv_template) 3209 } 3210 }, { 3211 .alg = "lrw(cast6)", 3212 .test = alg_test_skcipher, 3213 .suite = { 3214 .cipher = __VECS(cast6_lrw_tv_template) 3215 } 3216 }, { 3217 .alg = "lrw(serpent)", 3218 .test = alg_test_skcipher, 3219 .suite = { 3220 .cipher = __VECS(serpent_lrw_tv_template) 3221 } 3222 }, { 3223 .alg = "lrw(twofish)", 3224 .test = alg_test_skcipher, 3225 .suite = { 3226 .cipher = __VECS(tf_lrw_tv_template) 3227 } 3228 }, { 3229 .alg = "lz4", 3230 .test = alg_test_comp, 3231 .fips_allowed = 1, 3232 .suite = { 3233 .comp = { 3234 .comp = __VECS(lz4_comp_tv_template), 3235 .decomp = __VECS(lz4_decomp_tv_template) 3236 } 3237 } 3238 }, { 3239 .alg = "lz4hc", 3240 .test = alg_test_comp, 3241 .fips_allowed = 1, 3242 .suite = { 3243 .comp = { 3244 .comp = __VECS(lz4hc_comp_tv_template), 3245 .decomp = __VECS(lz4hc_decomp_tv_template) 3246 } 3247 } 3248 }, { 3249 .alg = "lzo", 3250 .test = alg_test_comp, 3251 .fips_allowed = 1, 3252 .suite = { 3253 .comp = { 3254 .comp = __VECS(lzo_comp_tv_template), 3255 .decomp = __VECS(lzo_decomp_tv_template) 3256 } 3257 } 3258 }, { 3259 .alg = "md4", 3260 .test = alg_test_hash, 3261 .suite = { 3262 .hash = __VECS(md4_tv_template) 3263 } 3264 }, { 3265 .alg = "md5", 3266 .test = alg_test_hash, 3267 .suite = { 3268 .hash = __VECS(md5_tv_template) 3269 } 3270 }, { 3271 .alg = "michael_mic", 3272 .test = alg_test_hash, 3273 .suite = { 3274 .hash = __VECS(michael_mic_tv_template) 3275 } 3276 }, { 3277 .alg = "morus1280", 3278 .test = alg_test_aead, 3279 .suite = { 3280 .aead = { 3281 .enc = __VECS(morus1280_enc_tv_template), 3282 .dec = __VECS(morus1280_dec_tv_template), 3283 } 3284 } 3285 }, { 3286 .alg = "morus640", 3287 .test = alg_test_aead, 3288 .suite = { 3289 .aead = { 3290 .enc = __VECS(morus640_enc_tv_template), 3291 .dec = __VECS(morus640_dec_tv_template), 3292 } 3293 } 3294 }, { 3295 .alg = "ofb(aes)", 3296 .test = alg_test_skcipher, 3297 .fips_allowed = 1, 3298 .suite = { 3299 .cipher = __VECS(aes_ofb_tv_template) 3300 } 3301 }, { 3302 /* Same as ofb(aes) except the key is stored in 3303 * hardware secure memory which we reference by index 3304 */ 3305 .alg = "ofb(paes)", 3306 .test = alg_test_null, 3307 .fips_allowed = 1, 3308 }, { 3309 .alg = "pcbc(fcrypt)", 3310 .test = alg_test_skcipher, 3311 .suite = { 3312 .cipher = __VECS(fcrypt_pcbc_tv_template) 3313 } 3314 }, { 3315 .alg = "pkcs1pad(rsa,sha224)", 3316 .test = alg_test_null, 3317 .fips_allowed = 1, 3318 }, { 3319 .alg = "pkcs1pad(rsa,sha256)", 3320 .test = alg_test_akcipher, 3321 .fips_allowed = 1, 3322 .suite = { 3323 .akcipher = __VECS(pkcs1pad_rsa_tv_template) 3324 } 3325 }, { 3326 .alg = "pkcs1pad(rsa,sha384)", 3327 .test = alg_test_null, 3328 .fips_allowed = 1, 3329 }, { 3330 .alg = "pkcs1pad(rsa,sha512)", 3331 .test = alg_test_null, 3332 .fips_allowed = 1, 3333 }, { 3334 .alg = "poly1305", 3335 .test = alg_test_hash, 3336 .suite = { 3337 .hash = __VECS(poly1305_tv_template) 3338 } 3339 }, { 3340 .alg = "rfc3686(ctr(aes))", 3341 .test = alg_test_skcipher, 3342 .fips_allowed = 1, 3343 .suite = { 3344 .cipher = __VECS(aes_ctr_rfc3686_tv_template) 3345 } 3346 }, { 3347 .alg = "rfc4106(gcm(aes))", 3348 .test = alg_test_aead, 3349 .fips_allowed = 1, 3350 .suite = { 3351 .aead = { 3352 .enc = __VECS(aes_gcm_rfc4106_enc_tv_template), 3353 .dec = __VECS(aes_gcm_rfc4106_dec_tv_template) 3354 } 3355 } 3356 }, { 3357 .alg = "rfc4309(ccm(aes))", 3358 .test = alg_test_aead, 3359 .fips_allowed = 1, 3360 .suite = { 3361 .aead = { 3362 .enc = __VECS(aes_ccm_rfc4309_enc_tv_template), 3363 .dec = __VECS(aes_ccm_rfc4309_dec_tv_template) 3364 } 3365 } 3366 }, { 3367 .alg = "rfc4543(gcm(aes))", 3368 .test = alg_test_aead, 3369 .suite = { 3370 .aead = { 3371 .enc = __VECS(aes_gcm_rfc4543_enc_tv_template), 3372 .dec = __VECS(aes_gcm_rfc4543_dec_tv_template), 3373 } 3374 } 3375 }, { 3376 .alg = "rfc7539(chacha20,poly1305)", 3377 .test = alg_test_aead, 3378 .suite = { 3379 .aead = { 3380 .enc = __VECS(rfc7539_enc_tv_template), 3381 .dec = __VECS(rfc7539_dec_tv_template), 3382 } 3383 } 3384 }, { 3385 .alg = "rfc7539esp(chacha20,poly1305)", 3386 .test = alg_test_aead, 3387 .suite = { 3388 .aead = { 3389 .enc = __VECS(rfc7539esp_enc_tv_template), 3390 .dec = __VECS(rfc7539esp_dec_tv_template), 3391 } 3392 } 3393 }, { 3394 .alg = "rmd128", 3395 .test = alg_test_hash, 3396 .suite = { 3397 .hash = __VECS(rmd128_tv_template) 3398 } 3399 }, { 3400 .alg = "rmd160", 3401 .test = alg_test_hash, 3402 .suite = { 3403 .hash = __VECS(rmd160_tv_template) 3404 } 3405 }, { 3406 .alg = "rmd256", 3407 .test = alg_test_hash, 3408 .suite = { 3409 .hash = __VECS(rmd256_tv_template) 3410 } 3411 }, { 3412 .alg = "rmd320", 3413 .test = alg_test_hash, 3414 .suite = { 3415 .hash = __VECS(rmd320_tv_template) 3416 } 3417 }, { 3418 .alg = "rsa", 3419 .test = alg_test_akcipher, 3420 .fips_allowed = 1, 3421 .suite = { 3422 .akcipher = __VECS(rsa_tv_template) 3423 } 3424 }, { 3425 .alg = "salsa20", 3426 .test = alg_test_skcipher, 3427 .suite = { 3428 .cipher = __VECS(salsa20_stream_tv_template) 3429 } 3430 }, { 3431 .alg = "sha1", 3432 .test = alg_test_hash, 3433 .fips_allowed = 1, 3434 .suite = { 3435 .hash = __VECS(sha1_tv_template) 3436 } 3437 }, { 3438 .alg = "sha224", 3439 .test = alg_test_hash, 3440 .fips_allowed = 1, 3441 .suite = { 3442 .hash = __VECS(sha224_tv_template) 3443 } 3444 }, { 3445 .alg = "sha256", 3446 .test = alg_test_hash, 3447 .fips_allowed = 1, 3448 .suite = { 3449 .hash = __VECS(sha256_tv_template) 3450 } 3451 }, { 3452 .alg = "sha3-224", 3453 .test = alg_test_hash, 3454 .fips_allowed = 1, 3455 .suite = { 3456 .hash = __VECS(sha3_224_tv_template) 3457 } 3458 }, { 3459 .alg = "sha3-256", 3460 .test = alg_test_hash, 3461 .fips_allowed = 1, 3462 .suite = { 3463 .hash = __VECS(sha3_256_tv_template) 3464 } 3465 }, { 3466 .alg = "sha3-384", 3467 .test = alg_test_hash, 3468 .fips_allowed = 1, 3469 .suite = { 3470 .hash = __VECS(sha3_384_tv_template) 3471 } 3472 }, { 3473 .alg = "sha3-512", 3474 .test = alg_test_hash, 3475 .fips_allowed = 1, 3476 .suite = { 3477 .hash = __VECS(sha3_512_tv_template) 3478 } 3479 }, { 3480 .alg = "sha384", 3481 .test = alg_test_hash, 3482 .fips_allowed = 1, 3483 .suite = { 3484 .hash = __VECS(sha384_tv_template) 3485 } 3486 }, { 3487 .alg = "sha512", 3488 .test = alg_test_hash, 3489 .fips_allowed = 1, 3490 .suite = { 3491 .hash = __VECS(sha512_tv_template) 3492 } 3493 }, { 3494 .alg = "sm3", 3495 .test = alg_test_hash, 3496 .suite = { 3497 .hash = __VECS(sm3_tv_template) 3498 } 3499 }, { 3500 .alg = "tgr128", 3501 .test = alg_test_hash, 3502 .suite = { 3503 .hash = __VECS(tgr128_tv_template) 3504 } 3505 }, { 3506 .alg = "tgr160", 3507 .test = alg_test_hash, 3508 .suite = { 3509 .hash = __VECS(tgr160_tv_template) 3510 } 3511 }, { 3512 .alg = "tgr192", 3513 .test = alg_test_hash, 3514 .suite = { 3515 .hash = __VECS(tgr192_tv_template) 3516 } 3517 }, { 3518 .alg = "vmac64(aes)", 3519 .test = alg_test_hash, 3520 .suite = { 3521 .hash = __VECS(vmac64_aes_tv_template) 3522 } 3523 }, { 3524 .alg = "wp256", 3525 .test = alg_test_hash, 3526 .suite = { 3527 .hash = __VECS(wp256_tv_template) 3528 } 3529 }, { 3530 .alg = "wp384", 3531 .test = alg_test_hash, 3532 .suite = { 3533 .hash = __VECS(wp384_tv_template) 3534 } 3535 }, { 3536 .alg = "wp512", 3537 .test = alg_test_hash, 3538 .suite = { 3539 .hash = __VECS(wp512_tv_template) 3540 } 3541 }, { 3542 .alg = "xcbc(aes)", 3543 .test = alg_test_hash, 3544 .suite = { 3545 .hash = __VECS(aes_xcbc128_tv_template) 3546 } 3547 }, { 3548 .alg = "xts(aes)", 3549 .test = alg_test_skcipher, 3550 .fips_allowed = 1, 3551 .suite = { 3552 .cipher = __VECS(aes_xts_tv_template) 3553 } 3554 }, { 3555 .alg = "xts(camellia)", 3556 .test = alg_test_skcipher, 3557 .suite = { 3558 .cipher = __VECS(camellia_xts_tv_template) 3559 } 3560 }, { 3561 .alg = "xts(cast6)", 3562 .test = alg_test_skcipher, 3563 .suite = { 3564 .cipher = __VECS(cast6_xts_tv_template) 3565 } 3566 }, { 3567 /* Same as xts(aes) except the key is stored in 3568 * hardware secure memory which we reference by index 3569 */ 3570 .alg = "xts(paes)", 3571 .test = alg_test_null, 3572 .fips_allowed = 1, 3573 }, { 3574 .alg = "xts(serpent)", 3575 .test = alg_test_skcipher, 3576 .suite = { 3577 .cipher = __VECS(serpent_xts_tv_template) 3578 } 3579 }, { 3580 .alg = "xts(speck128)", 3581 .test = alg_test_skcipher, 3582 .suite = { 3583 .cipher = __VECS(speck128_xts_tv_template) 3584 } 3585 }, { 3586 .alg = "xts(speck64)", 3587 .test = alg_test_skcipher, 3588 .suite = { 3589 .cipher = __VECS(speck64_xts_tv_template) 3590 } 3591 }, { 3592 .alg = "xts(twofish)", 3593 .test = alg_test_skcipher, 3594 .suite = { 3595 .cipher = __VECS(tf_xts_tv_template) 3596 } 3597 }, { 3598 .alg = "xts4096(paes)", 3599 .test = alg_test_null, 3600 .fips_allowed = 1, 3601 }, { 3602 .alg = "xts512(paes)", 3603 .test = alg_test_null, 3604 .fips_allowed = 1, 3605 }, { 3606 .alg = "zlib-deflate", 3607 .test = alg_test_comp, 3608 .fips_allowed = 1, 3609 .suite = { 3610 .comp = { 3611 .comp = __VECS(zlib_deflate_comp_tv_template), 3612 .decomp = __VECS(zlib_deflate_decomp_tv_template) 3613 } 3614 } 3615 }, { 3616 .alg = "zstd", 3617 .test = alg_test_comp, 3618 .fips_allowed = 1, 3619 .suite = { 3620 .comp = { 3621 .comp = __VECS(zstd_comp_tv_template), 3622 .decomp = __VECS(zstd_decomp_tv_template) 3623 } 3624 } 3625 } 3626 }; 3627 3628 static bool alg_test_descs_checked; 3629 3630 static void alg_test_descs_check_order(void) 3631 { 3632 int i; 3633 3634 /* only check once */ 3635 if (alg_test_descs_checked) 3636 return; 3637 3638 alg_test_descs_checked = true; 3639 3640 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 3641 int diff = strcmp(alg_test_descs[i - 1].alg, 3642 alg_test_descs[i].alg); 3643 3644 if (WARN_ON(diff > 0)) { 3645 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 3646 alg_test_descs[i - 1].alg, 3647 alg_test_descs[i].alg); 3648 } 3649 3650 if (WARN_ON(diff == 0)) { 3651 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 3652 alg_test_descs[i].alg); 3653 } 3654 } 3655 } 3656 3657 static int alg_find_test(const char *alg) 3658 { 3659 int start = 0; 3660 int end = ARRAY_SIZE(alg_test_descs); 3661 3662 while (start < end) { 3663 int i = (start + end) / 2; 3664 int diff = strcmp(alg_test_descs[i].alg, alg); 3665 3666 if (diff > 0) { 3667 end = i; 3668 continue; 3669 } 3670 3671 if (diff < 0) { 3672 start = i + 1; 3673 continue; 3674 } 3675 3676 return i; 3677 } 3678 3679 return -1; 3680 } 3681 3682 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 3683 { 3684 int i; 3685 int j; 3686 int rc; 3687 3688 if (!fips_enabled && notests) { 3689 printk_once(KERN_INFO "alg: self-tests disabled\n"); 3690 return 0; 3691 } 3692 3693 alg_test_descs_check_order(); 3694 3695 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 3696 char nalg[CRYPTO_MAX_ALG_NAME]; 3697 3698 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 3699 sizeof(nalg)) 3700 return -ENAMETOOLONG; 3701 3702 i = alg_find_test(nalg); 3703 if (i < 0) 3704 goto notest; 3705 3706 if (fips_enabled && !alg_test_descs[i].fips_allowed) 3707 goto non_fips_alg; 3708 3709 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 3710 goto test_done; 3711 } 3712 3713 i = alg_find_test(alg); 3714 j = alg_find_test(driver); 3715 if (i < 0 && j < 0) 3716 goto notest; 3717 3718 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 3719 (j >= 0 && !alg_test_descs[j].fips_allowed))) 3720 goto non_fips_alg; 3721 3722 rc = 0; 3723 if (i >= 0) 3724 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 3725 type, mask); 3726 if (j >= 0 && j != i) 3727 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 3728 type, mask); 3729 3730 test_done: 3731 if (fips_enabled && rc) 3732 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 3733 3734 if (fips_enabled && !rc) 3735 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 3736 3737 return rc; 3738 3739 notest: 3740 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 3741 return 0; 3742 non_fips_alg: 3743 return -EINVAL; 3744 } 3745 3746 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 3747 3748 EXPORT_SYMBOL_GPL(alg_test); 3749