1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * This program is free software; you can redistribute it and/or modify it 10 * under the terms of the GNU General Public License as published by the Free 11 * Software Foundation; either version 2 of the License, or (at your option) 12 * any later version. 13 * 14 */ 15 16 #include <crypto/hash.h> 17 #include <linux/err.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 #include <linux/slab.h> 21 #include <linux/string.h> 22 #include <crypto/rng.h> 23 24 #include "internal.h" 25 #include "testmgr.h" 26 27 /* 28 * Need slab memory for testing (size in number of pages). 29 */ 30 #define XBUFSIZE 8 31 32 /* 33 * Indexes into the xbuf to simulate cross-page access. 34 */ 35 #define IDX1 32 36 #define IDX2 32400 37 #define IDX3 1 38 #define IDX4 8193 39 #define IDX5 22222 40 #define IDX6 17101 41 #define IDX7 27333 42 #define IDX8 3000 43 44 /* 45 * Used by test_cipher() 46 */ 47 #define ENCRYPT 1 48 #define DECRYPT 0 49 50 struct tcrypt_result { 51 struct completion completion; 52 int err; 53 }; 54 55 struct aead_test_suite { 56 struct { 57 struct aead_testvec *vecs; 58 unsigned int count; 59 } enc, dec; 60 }; 61 62 struct cipher_test_suite { 63 struct { 64 struct cipher_testvec *vecs; 65 unsigned int count; 66 } enc, dec; 67 }; 68 69 struct comp_test_suite { 70 struct { 71 struct comp_testvec *vecs; 72 unsigned int count; 73 } comp, decomp; 74 }; 75 76 struct pcomp_test_suite { 77 struct { 78 struct pcomp_testvec *vecs; 79 unsigned int count; 80 } comp, decomp; 81 }; 82 83 struct hash_test_suite { 84 struct hash_testvec *vecs; 85 unsigned int count; 86 }; 87 88 struct cprng_test_suite { 89 struct cprng_testvec *vecs; 90 unsigned int count; 91 }; 92 93 struct alg_test_desc { 94 const char *alg; 95 int (*test)(const struct alg_test_desc *desc, const char *driver, 96 u32 type, u32 mask); 97 int fips_allowed; /* set if alg is allowed in fips mode */ 98 99 union { 100 struct aead_test_suite aead; 101 struct cipher_test_suite cipher; 102 struct comp_test_suite comp; 103 struct pcomp_test_suite pcomp; 104 struct hash_test_suite hash; 105 struct cprng_test_suite cprng; 106 } suite; 107 }; 108 109 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 110 111 static void hexdump(unsigned char *buf, unsigned int len) 112 { 113 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 114 16, 1, 115 buf, len, false); 116 } 117 118 static void tcrypt_complete(struct crypto_async_request *req, int err) 119 { 120 struct tcrypt_result *res = req->data; 121 122 if (err == -EINPROGRESS) 123 return; 124 125 res->err = err; 126 complete(&res->completion); 127 } 128 129 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 130 { 131 int i; 132 133 for (i = 0; i < XBUFSIZE; i++) { 134 buf[i] = (void *)__get_free_page(GFP_KERNEL); 135 if (!buf[i]) 136 goto err_free_buf; 137 } 138 139 return 0; 140 141 err_free_buf: 142 while (i-- > 0) 143 free_page((unsigned long)buf[i]); 144 145 return -ENOMEM; 146 } 147 148 static void testmgr_free_buf(char *buf[XBUFSIZE]) 149 { 150 int i; 151 152 for (i = 0; i < XBUFSIZE; i++) 153 free_page((unsigned long)buf[i]); 154 } 155 156 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 157 unsigned int tcount) 158 { 159 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 160 unsigned int i, j, k, temp; 161 struct scatterlist sg[8]; 162 char result[64]; 163 struct ahash_request *req; 164 struct tcrypt_result tresult; 165 void *hash_buff; 166 char *xbuf[XBUFSIZE]; 167 int ret = -ENOMEM; 168 169 if (testmgr_alloc_buf(xbuf)) 170 goto out_nobuf; 171 172 init_completion(&tresult.completion); 173 174 req = ahash_request_alloc(tfm, GFP_KERNEL); 175 if (!req) { 176 printk(KERN_ERR "alg: hash: Failed to allocate request for " 177 "%s\n", algo); 178 goto out_noreq; 179 } 180 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 181 tcrypt_complete, &tresult); 182 183 j = 0; 184 for (i = 0; i < tcount; i++) { 185 if (template[i].np) 186 continue; 187 188 j++; 189 memset(result, 0, 64); 190 191 hash_buff = xbuf[0]; 192 193 memcpy(hash_buff, template[i].plaintext, template[i].psize); 194 sg_init_one(&sg[0], hash_buff, template[i].psize); 195 196 if (template[i].ksize) { 197 crypto_ahash_clear_flags(tfm, ~0); 198 ret = crypto_ahash_setkey(tfm, template[i].key, 199 template[i].ksize); 200 if (ret) { 201 printk(KERN_ERR "alg: hash: setkey failed on " 202 "test %d for %s: ret=%d\n", j, algo, 203 -ret); 204 goto out; 205 } 206 } 207 208 ahash_request_set_crypt(req, sg, result, template[i].psize); 209 ret = crypto_ahash_digest(req); 210 switch (ret) { 211 case 0: 212 break; 213 case -EINPROGRESS: 214 case -EBUSY: 215 ret = wait_for_completion_interruptible( 216 &tresult.completion); 217 if (!ret && !(ret = tresult.err)) { 218 INIT_COMPLETION(tresult.completion); 219 break; 220 } 221 /* fall through */ 222 default: 223 printk(KERN_ERR "alg: hash: digest failed on test %d " 224 "for %s: ret=%d\n", j, algo, -ret); 225 goto out; 226 } 227 228 if (memcmp(result, template[i].digest, 229 crypto_ahash_digestsize(tfm))) { 230 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 231 j, algo); 232 hexdump(result, crypto_ahash_digestsize(tfm)); 233 ret = -EINVAL; 234 goto out; 235 } 236 } 237 238 j = 0; 239 for (i = 0; i < tcount; i++) { 240 if (template[i].np) { 241 j++; 242 memset(result, 0, 64); 243 244 temp = 0; 245 sg_init_table(sg, template[i].np); 246 ret = -EINVAL; 247 for (k = 0; k < template[i].np; k++) { 248 if (WARN_ON(offset_in_page(IDX[k]) + 249 template[i].tap[k] > PAGE_SIZE)) 250 goto out; 251 sg_set_buf(&sg[k], 252 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 253 offset_in_page(IDX[k]), 254 template[i].plaintext + temp, 255 template[i].tap[k]), 256 template[i].tap[k]); 257 temp += template[i].tap[k]; 258 } 259 260 if (template[i].ksize) { 261 crypto_ahash_clear_flags(tfm, ~0); 262 ret = crypto_ahash_setkey(tfm, template[i].key, 263 template[i].ksize); 264 265 if (ret) { 266 printk(KERN_ERR "alg: hash: setkey " 267 "failed on chunking test %d " 268 "for %s: ret=%d\n", j, algo, 269 -ret); 270 goto out; 271 } 272 } 273 274 ahash_request_set_crypt(req, sg, result, 275 template[i].psize); 276 ret = crypto_ahash_digest(req); 277 switch (ret) { 278 case 0: 279 break; 280 case -EINPROGRESS: 281 case -EBUSY: 282 ret = wait_for_completion_interruptible( 283 &tresult.completion); 284 if (!ret && !(ret = tresult.err)) { 285 INIT_COMPLETION(tresult.completion); 286 break; 287 } 288 /* fall through */ 289 default: 290 printk(KERN_ERR "alg: hash: digest failed " 291 "on chunking test %d for %s: " 292 "ret=%d\n", j, algo, -ret); 293 goto out; 294 } 295 296 if (memcmp(result, template[i].digest, 297 crypto_ahash_digestsize(tfm))) { 298 printk(KERN_ERR "alg: hash: Chunking test %d " 299 "failed for %s\n", j, algo); 300 hexdump(result, crypto_ahash_digestsize(tfm)); 301 ret = -EINVAL; 302 goto out; 303 } 304 } 305 } 306 307 ret = 0; 308 309 out: 310 ahash_request_free(req); 311 out_noreq: 312 testmgr_free_buf(xbuf); 313 out_nobuf: 314 return ret; 315 } 316 317 static int test_aead(struct crypto_aead *tfm, int enc, 318 struct aead_testvec *template, unsigned int tcount) 319 { 320 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 321 unsigned int i, j, k, n, temp; 322 int ret = -ENOMEM; 323 char *q; 324 char *key; 325 struct aead_request *req; 326 struct scatterlist sg[8]; 327 struct scatterlist asg[8]; 328 const char *e; 329 struct tcrypt_result result; 330 unsigned int authsize; 331 void *input; 332 void *assoc; 333 char iv[MAX_IVLEN]; 334 char *xbuf[XBUFSIZE]; 335 char *axbuf[XBUFSIZE]; 336 337 if (testmgr_alloc_buf(xbuf)) 338 goto out_noxbuf; 339 if (testmgr_alloc_buf(axbuf)) 340 goto out_noaxbuf; 341 342 if (enc == ENCRYPT) 343 e = "encryption"; 344 else 345 e = "decryption"; 346 347 init_completion(&result.completion); 348 349 req = aead_request_alloc(tfm, GFP_KERNEL); 350 if (!req) { 351 printk(KERN_ERR "alg: aead: Failed to allocate request for " 352 "%s\n", algo); 353 goto out; 354 } 355 356 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 357 tcrypt_complete, &result); 358 359 for (i = 0, j = 0; i < tcount; i++) { 360 if (!template[i].np) { 361 j++; 362 363 /* some tepmplates have no input data but they will 364 * touch input 365 */ 366 input = xbuf[0]; 367 assoc = axbuf[0]; 368 369 ret = -EINVAL; 370 if (WARN_ON(template[i].ilen > PAGE_SIZE || 371 template[i].alen > PAGE_SIZE)) 372 goto out; 373 374 memcpy(input, template[i].input, template[i].ilen); 375 memcpy(assoc, template[i].assoc, template[i].alen); 376 if (template[i].iv) 377 memcpy(iv, template[i].iv, MAX_IVLEN); 378 else 379 memset(iv, 0, MAX_IVLEN); 380 381 crypto_aead_clear_flags(tfm, ~0); 382 if (template[i].wk) 383 crypto_aead_set_flags( 384 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 385 386 key = template[i].key; 387 388 ret = crypto_aead_setkey(tfm, key, 389 template[i].klen); 390 if (!ret == template[i].fail) { 391 printk(KERN_ERR "alg: aead: setkey failed on " 392 "test %d for %s: flags=%x\n", j, algo, 393 crypto_aead_get_flags(tfm)); 394 goto out; 395 } else if (ret) 396 continue; 397 398 authsize = abs(template[i].rlen - template[i].ilen); 399 ret = crypto_aead_setauthsize(tfm, authsize); 400 if (ret) { 401 printk(KERN_ERR "alg: aead: Failed to set " 402 "authsize to %u on test %d for %s\n", 403 authsize, j, algo); 404 goto out; 405 } 406 407 sg_init_one(&sg[0], input, 408 template[i].ilen + (enc ? authsize : 0)); 409 410 sg_init_one(&asg[0], assoc, template[i].alen); 411 412 aead_request_set_crypt(req, sg, sg, 413 template[i].ilen, iv); 414 415 aead_request_set_assoc(req, asg, template[i].alen); 416 417 ret = enc ? 418 crypto_aead_encrypt(req) : 419 crypto_aead_decrypt(req); 420 421 switch (ret) { 422 case 0: 423 if (template[i].novrfy) { 424 /* verification was supposed to fail */ 425 printk(KERN_ERR "alg: aead: %s failed " 426 "on test %d for %s: ret was 0, " 427 "expected -EBADMSG\n", 428 e, j, algo); 429 /* so really, we got a bad message */ 430 ret = -EBADMSG; 431 goto out; 432 } 433 break; 434 case -EINPROGRESS: 435 case -EBUSY: 436 ret = wait_for_completion_interruptible( 437 &result.completion); 438 if (!ret && !(ret = result.err)) { 439 INIT_COMPLETION(result.completion); 440 break; 441 } 442 case -EBADMSG: 443 if (template[i].novrfy) 444 /* verification failure was expected */ 445 continue; 446 /* fall through */ 447 default: 448 printk(KERN_ERR "alg: aead: %s failed on test " 449 "%d for %s: ret=%d\n", e, j, algo, -ret); 450 goto out; 451 } 452 453 q = input; 454 if (memcmp(q, template[i].result, template[i].rlen)) { 455 printk(KERN_ERR "alg: aead: Test %d failed on " 456 "%s for %s\n", j, e, algo); 457 hexdump(q, template[i].rlen); 458 ret = -EINVAL; 459 goto out; 460 } 461 } 462 } 463 464 for (i = 0, j = 0; i < tcount; i++) { 465 if (template[i].np) { 466 j++; 467 468 if (template[i].iv) 469 memcpy(iv, template[i].iv, MAX_IVLEN); 470 else 471 memset(iv, 0, MAX_IVLEN); 472 473 crypto_aead_clear_flags(tfm, ~0); 474 if (template[i].wk) 475 crypto_aead_set_flags( 476 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 477 key = template[i].key; 478 479 ret = crypto_aead_setkey(tfm, key, template[i].klen); 480 if (!ret == template[i].fail) { 481 printk(KERN_ERR "alg: aead: setkey failed on " 482 "chunk test %d for %s: flags=%x\n", j, 483 algo, crypto_aead_get_flags(tfm)); 484 goto out; 485 } else if (ret) 486 continue; 487 488 authsize = abs(template[i].rlen - template[i].ilen); 489 490 ret = -EINVAL; 491 sg_init_table(sg, template[i].np); 492 for (k = 0, temp = 0; k < template[i].np; k++) { 493 if (WARN_ON(offset_in_page(IDX[k]) + 494 template[i].tap[k] > PAGE_SIZE)) 495 goto out; 496 497 q = xbuf[IDX[k] >> PAGE_SHIFT] + 498 offset_in_page(IDX[k]); 499 500 memcpy(q, template[i].input + temp, 501 template[i].tap[k]); 502 503 n = template[i].tap[k]; 504 if (k == template[i].np - 1 && enc) 505 n += authsize; 506 if (offset_in_page(q) + n < PAGE_SIZE) 507 q[n] = 0; 508 509 sg_set_buf(&sg[k], q, template[i].tap[k]); 510 temp += template[i].tap[k]; 511 } 512 513 ret = crypto_aead_setauthsize(tfm, authsize); 514 if (ret) { 515 printk(KERN_ERR "alg: aead: Failed to set " 516 "authsize to %u on chunk test %d for " 517 "%s\n", authsize, j, algo); 518 goto out; 519 } 520 521 if (enc) { 522 if (WARN_ON(sg[k - 1].offset + 523 sg[k - 1].length + authsize > 524 PAGE_SIZE)) { 525 ret = -EINVAL; 526 goto out; 527 } 528 529 sg[k - 1].length += authsize; 530 } 531 532 sg_init_table(asg, template[i].anp); 533 ret = -EINVAL; 534 for (k = 0, temp = 0; k < template[i].anp; k++) { 535 if (WARN_ON(offset_in_page(IDX[k]) + 536 template[i].atap[k] > PAGE_SIZE)) 537 goto out; 538 sg_set_buf(&asg[k], 539 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 540 offset_in_page(IDX[k]), 541 template[i].assoc + temp, 542 template[i].atap[k]), 543 template[i].atap[k]); 544 temp += template[i].atap[k]; 545 } 546 547 aead_request_set_crypt(req, sg, sg, 548 template[i].ilen, 549 iv); 550 551 aead_request_set_assoc(req, asg, template[i].alen); 552 553 ret = enc ? 554 crypto_aead_encrypt(req) : 555 crypto_aead_decrypt(req); 556 557 switch (ret) { 558 case 0: 559 if (template[i].novrfy) { 560 /* verification was supposed to fail */ 561 printk(KERN_ERR "alg: aead: %s failed " 562 "on chunk test %d for %s: ret " 563 "was 0, expected -EBADMSG\n", 564 e, j, algo); 565 /* so really, we got a bad message */ 566 ret = -EBADMSG; 567 goto out; 568 } 569 break; 570 case -EINPROGRESS: 571 case -EBUSY: 572 ret = wait_for_completion_interruptible( 573 &result.completion); 574 if (!ret && !(ret = result.err)) { 575 INIT_COMPLETION(result.completion); 576 break; 577 } 578 case -EBADMSG: 579 if (template[i].novrfy) 580 /* verification failure was expected */ 581 continue; 582 /* fall through */ 583 default: 584 printk(KERN_ERR "alg: aead: %s failed on " 585 "chunk test %d for %s: ret=%d\n", e, j, 586 algo, -ret); 587 goto out; 588 } 589 590 ret = -EINVAL; 591 for (k = 0, temp = 0; k < template[i].np; k++) { 592 q = xbuf[IDX[k] >> PAGE_SHIFT] + 593 offset_in_page(IDX[k]); 594 595 n = template[i].tap[k]; 596 if (k == template[i].np - 1) 597 n += enc ? authsize : -authsize; 598 599 if (memcmp(q, template[i].result + temp, n)) { 600 printk(KERN_ERR "alg: aead: Chunk " 601 "test %d failed on %s at page " 602 "%u for %s\n", j, e, k, algo); 603 hexdump(q, n); 604 goto out; 605 } 606 607 q += n; 608 if (k == template[i].np - 1 && !enc) { 609 if (memcmp(q, template[i].input + 610 temp + n, authsize)) 611 n = authsize; 612 else 613 n = 0; 614 } else { 615 for (n = 0; offset_in_page(q + n) && 616 q[n]; n++) 617 ; 618 } 619 if (n) { 620 printk(KERN_ERR "alg: aead: Result " 621 "buffer corruption in chunk " 622 "test %d on %s at page %u for " 623 "%s: %u bytes:\n", j, e, k, 624 algo, n); 625 hexdump(q, n); 626 goto out; 627 } 628 629 temp += template[i].tap[k]; 630 } 631 } 632 } 633 634 ret = 0; 635 636 out: 637 aead_request_free(req); 638 testmgr_free_buf(axbuf); 639 out_noaxbuf: 640 testmgr_free_buf(xbuf); 641 out_noxbuf: 642 return ret; 643 } 644 645 static int test_cipher(struct crypto_cipher *tfm, int enc, 646 struct cipher_testvec *template, unsigned int tcount) 647 { 648 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 649 unsigned int i, j, k; 650 char *q; 651 const char *e; 652 void *data; 653 char *xbuf[XBUFSIZE]; 654 int ret = -ENOMEM; 655 656 if (testmgr_alloc_buf(xbuf)) 657 goto out_nobuf; 658 659 if (enc == ENCRYPT) 660 e = "encryption"; 661 else 662 e = "decryption"; 663 664 j = 0; 665 for (i = 0; i < tcount; i++) { 666 if (template[i].np) 667 continue; 668 669 j++; 670 671 ret = -EINVAL; 672 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 673 goto out; 674 675 data = xbuf[0]; 676 memcpy(data, template[i].input, template[i].ilen); 677 678 crypto_cipher_clear_flags(tfm, ~0); 679 if (template[i].wk) 680 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 681 682 ret = crypto_cipher_setkey(tfm, template[i].key, 683 template[i].klen); 684 if (!ret == template[i].fail) { 685 printk(KERN_ERR "alg: cipher: setkey failed " 686 "on test %d for %s: flags=%x\n", j, 687 algo, crypto_cipher_get_flags(tfm)); 688 goto out; 689 } else if (ret) 690 continue; 691 692 for (k = 0; k < template[i].ilen; 693 k += crypto_cipher_blocksize(tfm)) { 694 if (enc) 695 crypto_cipher_encrypt_one(tfm, data + k, 696 data + k); 697 else 698 crypto_cipher_decrypt_one(tfm, data + k, 699 data + k); 700 } 701 702 q = data; 703 if (memcmp(q, template[i].result, template[i].rlen)) { 704 printk(KERN_ERR "alg: cipher: Test %d failed " 705 "on %s for %s\n", j, e, algo); 706 hexdump(q, template[i].rlen); 707 ret = -EINVAL; 708 goto out; 709 } 710 } 711 712 ret = 0; 713 714 out: 715 testmgr_free_buf(xbuf); 716 out_nobuf: 717 return ret; 718 } 719 720 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 721 struct cipher_testvec *template, unsigned int tcount) 722 { 723 const char *algo = 724 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 725 unsigned int i, j, k, n, temp; 726 char *q; 727 struct ablkcipher_request *req; 728 struct scatterlist sg[8]; 729 const char *e; 730 struct tcrypt_result result; 731 void *data; 732 char iv[MAX_IVLEN]; 733 char *xbuf[XBUFSIZE]; 734 int ret = -ENOMEM; 735 736 if (testmgr_alloc_buf(xbuf)) 737 goto out_nobuf; 738 739 if (enc == ENCRYPT) 740 e = "encryption"; 741 else 742 e = "decryption"; 743 744 init_completion(&result.completion); 745 746 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 747 if (!req) { 748 printk(KERN_ERR "alg: skcipher: Failed to allocate request " 749 "for %s\n", algo); 750 goto out; 751 } 752 753 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 754 tcrypt_complete, &result); 755 756 j = 0; 757 for (i = 0; i < tcount; i++) { 758 if (template[i].iv) 759 memcpy(iv, template[i].iv, MAX_IVLEN); 760 else 761 memset(iv, 0, MAX_IVLEN); 762 763 if (!(template[i].np)) { 764 j++; 765 766 ret = -EINVAL; 767 if (WARN_ON(template[i].ilen > PAGE_SIZE)) 768 goto out; 769 770 data = xbuf[0]; 771 memcpy(data, template[i].input, template[i].ilen); 772 773 crypto_ablkcipher_clear_flags(tfm, ~0); 774 if (template[i].wk) 775 crypto_ablkcipher_set_flags( 776 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 777 778 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 779 template[i].klen); 780 if (!ret == template[i].fail) { 781 printk(KERN_ERR "alg: skcipher: setkey failed " 782 "on test %d for %s: flags=%x\n", j, 783 algo, crypto_ablkcipher_get_flags(tfm)); 784 goto out; 785 } else if (ret) 786 continue; 787 788 sg_init_one(&sg[0], data, template[i].ilen); 789 790 ablkcipher_request_set_crypt(req, sg, sg, 791 template[i].ilen, iv); 792 ret = enc ? 793 crypto_ablkcipher_encrypt(req) : 794 crypto_ablkcipher_decrypt(req); 795 796 switch (ret) { 797 case 0: 798 break; 799 case -EINPROGRESS: 800 case -EBUSY: 801 ret = wait_for_completion_interruptible( 802 &result.completion); 803 if (!ret && !((ret = result.err))) { 804 INIT_COMPLETION(result.completion); 805 break; 806 } 807 /* fall through */ 808 default: 809 printk(KERN_ERR "alg: skcipher: %s failed on " 810 "test %d for %s: ret=%d\n", e, j, algo, 811 -ret); 812 goto out; 813 } 814 815 q = data; 816 if (memcmp(q, template[i].result, template[i].rlen)) { 817 printk(KERN_ERR "alg: skcipher: Test %d " 818 "failed on %s for %s\n", j, e, algo); 819 hexdump(q, template[i].rlen); 820 ret = -EINVAL; 821 goto out; 822 } 823 } 824 } 825 826 j = 0; 827 for (i = 0; i < tcount; i++) { 828 829 if (template[i].iv) 830 memcpy(iv, template[i].iv, MAX_IVLEN); 831 else 832 memset(iv, 0, MAX_IVLEN); 833 834 if (template[i].np) { 835 j++; 836 837 crypto_ablkcipher_clear_flags(tfm, ~0); 838 if (template[i].wk) 839 crypto_ablkcipher_set_flags( 840 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 841 842 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 843 template[i].klen); 844 if (!ret == template[i].fail) { 845 printk(KERN_ERR "alg: skcipher: setkey failed " 846 "on chunk test %d for %s: flags=%x\n", 847 j, algo, 848 crypto_ablkcipher_get_flags(tfm)); 849 goto out; 850 } else if (ret) 851 continue; 852 853 temp = 0; 854 ret = -EINVAL; 855 sg_init_table(sg, template[i].np); 856 for (k = 0; k < template[i].np; k++) { 857 if (WARN_ON(offset_in_page(IDX[k]) + 858 template[i].tap[k] > PAGE_SIZE)) 859 goto out; 860 861 q = xbuf[IDX[k] >> PAGE_SHIFT] + 862 offset_in_page(IDX[k]); 863 864 memcpy(q, template[i].input + temp, 865 template[i].tap[k]); 866 867 if (offset_in_page(q) + template[i].tap[k] < 868 PAGE_SIZE) 869 q[template[i].tap[k]] = 0; 870 871 sg_set_buf(&sg[k], q, template[i].tap[k]); 872 873 temp += template[i].tap[k]; 874 } 875 876 ablkcipher_request_set_crypt(req, sg, sg, 877 template[i].ilen, iv); 878 879 ret = enc ? 880 crypto_ablkcipher_encrypt(req) : 881 crypto_ablkcipher_decrypt(req); 882 883 switch (ret) { 884 case 0: 885 break; 886 case -EINPROGRESS: 887 case -EBUSY: 888 ret = wait_for_completion_interruptible( 889 &result.completion); 890 if (!ret && !((ret = result.err))) { 891 INIT_COMPLETION(result.completion); 892 break; 893 } 894 /* fall through */ 895 default: 896 printk(KERN_ERR "alg: skcipher: %s failed on " 897 "chunk test %d for %s: ret=%d\n", e, j, 898 algo, -ret); 899 goto out; 900 } 901 902 temp = 0; 903 ret = -EINVAL; 904 for (k = 0; k < template[i].np; k++) { 905 q = xbuf[IDX[k] >> PAGE_SHIFT] + 906 offset_in_page(IDX[k]); 907 908 if (memcmp(q, template[i].result + temp, 909 template[i].tap[k])) { 910 printk(KERN_ERR "alg: skcipher: Chunk " 911 "test %d failed on %s at page " 912 "%u for %s\n", j, e, k, algo); 913 hexdump(q, template[i].tap[k]); 914 goto out; 915 } 916 917 q += template[i].tap[k]; 918 for (n = 0; offset_in_page(q + n) && q[n]; n++) 919 ; 920 if (n) { 921 printk(KERN_ERR "alg: skcipher: " 922 "Result buffer corruption in " 923 "chunk test %d on %s at page " 924 "%u for %s: %u bytes:\n", j, e, 925 k, algo, n); 926 hexdump(q, n); 927 goto out; 928 } 929 temp += template[i].tap[k]; 930 } 931 } 932 } 933 934 ret = 0; 935 936 out: 937 ablkcipher_request_free(req); 938 testmgr_free_buf(xbuf); 939 out_nobuf: 940 return ret; 941 } 942 943 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 944 struct comp_testvec *dtemplate, int ctcount, int dtcount) 945 { 946 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 947 unsigned int i; 948 char result[COMP_BUF_SIZE]; 949 int ret; 950 951 for (i = 0; i < ctcount; i++) { 952 int ilen; 953 unsigned int dlen = COMP_BUF_SIZE; 954 955 memset(result, 0, sizeof (result)); 956 957 ilen = ctemplate[i].inlen; 958 ret = crypto_comp_compress(tfm, ctemplate[i].input, 959 ilen, result, &dlen); 960 if (ret) { 961 printk(KERN_ERR "alg: comp: compression failed " 962 "on test %d for %s: ret=%d\n", i + 1, algo, 963 -ret); 964 goto out; 965 } 966 967 if (dlen != ctemplate[i].outlen) { 968 printk(KERN_ERR "alg: comp: Compression test %d " 969 "failed for %s: output len = %d\n", i + 1, algo, 970 dlen); 971 ret = -EINVAL; 972 goto out; 973 } 974 975 if (memcmp(result, ctemplate[i].output, dlen)) { 976 printk(KERN_ERR "alg: comp: Compression test %d " 977 "failed for %s\n", i + 1, algo); 978 hexdump(result, dlen); 979 ret = -EINVAL; 980 goto out; 981 } 982 } 983 984 for (i = 0; i < dtcount; i++) { 985 int ilen; 986 unsigned int dlen = COMP_BUF_SIZE; 987 988 memset(result, 0, sizeof (result)); 989 990 ilen = dtemplate[i].inlen; 991 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 992 ilen, result, &dlen); 993 if (ret) { 994 printk(KERN_ERR "alg: comp: decompression failed " 995 "on test %d for %s: ret=%d\n", i + 1, algo, 996 -ret); 997 goto out; 998 } 999 1000 if (dlen != dtemplate[i].outlen) { 1001 printk(KERN_ERR "alg: comp: Decompression test %d " 1002 "failed for %s: output len = %d\n", i + 1, algo, 1003 dlen); 1004 ret = -EINVAL; 1005 goto out; 1006 } 1007 1008 if (memcmp(result, dtemplate[i].output, dlen)) { 1009 printk(KERN_ERR "alg: comp: Decompression test %d " 1010 "failed for %s\n", i + 1, algo); 1011 hexdump(result, dlen); 1012 ret = -EINVAL; 1013 goto out; 1014 } 1015 } 1016 1017 ret = 0; 1018 1019 out: 1020 return ret; 1021 } 1022 1023 static int test_pcomp(struct crypto_pcomp *tfm, 1024 struct pcomp_testvec *ctemplate, 1025 struct pcomp_testvec *dtemplate, int ctcount, 1026 int dtcount) 1027 { 1028 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 1029 unsigned int i; 1030 char result[COMP_BUF_SIZE]; 1031 int res; 1032 1033 for (i = 0; i < ctcount; i++) { 1034 struct comp_request req; 1035 unsigned int produced = 0; 1036 1037 res = crypto_compress_setup(tfm, ctemplate[i].params, 1038 ctemplate[i].paramsize); 1039 if (res) { 1040 pr_err("alg: pcomp: compression setup failed on test " 1041 "%d for %s: error=%d\n", i + 1, algo, res); 1042 return res; 1043 } 1044 1045 res = crypto_compress_init(tfm); 1046 if (res) { 1047 pr_err("alg: pcomp: compression init failed on test " 1048 "%d for %s: error=%d\n", i + 1, algo, res); 1049 return res; 1050 } 1051 1052 memset(result, 0, sizeof(result)); 1053 1054 req.next_in = ctemplate[i].input; 1055 req.avail_in = ctemplate[i].inlen / 2; 1056 req.next_out = result; 1057 req.avail_out = ctemplate[i].outlen / 2; 1058 1059 res = crypto_compress_update(tfm, &req); 1060 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1061 pr_err("alg: pcomp: compression update failed on test " 1062 "%d for %s: error=%d\n", i + 1, algo, res); 1063 return res; 1064 } 1065 if (res > 0) 1066 produced += res; 1067 1068 /* Add remaining input data */ 1069 req.avail_in += (ctemplate[i].inlen + 1) / 2; 1070 1071 res = crypto_compress_update(tfm, &req); 1072 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1073 pr_err("alg: pcomp: compression update failed on test " 1074 "%d for %s: error=%d\n", i + 1, algo, res); 1075 return res; 1076 } 1077 if (res > 0) 1078 produced += res; 1079 1080 /* Provide remaining output space */ 1081 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 1082 1083 res = crypto_compress_final(tfm, &req); 1084 if (res < 0) { 1085 pr_err("alg: pcomp: compression final failed on test " 1086 "%d for %s: error=%d\n", i + 1, algo, res); 1087 return res; 1088 } 1089 produced += res; 1090 1091 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 1092 pr_err("alg: comp: Compression test %d failed for %s: " 1093 "output len = %d (expected %d)\n", i + 1, algo, 1094 COMP_BUF_SIZE - req.avail_out, 1095 ctemplate[i].outlen); 1096 return -EINVAL; 1097 } 1098 1099 if (produced != ctemplate[i].outlen) { 1100 pr_err("alg: comp: Compression test %d failed for %s: " 1101 "returned len = %u (expected %d)\n", i + 1, 1102 algo, produced, ctemplate[i].outlen); 1103 return -EINVAL; 1104 } 1105 1106 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 1107 pr_err("alg: pcomp: Compression test %d failed for " 1108 "%s\n", i + 1, algo); 1109 hexdump(result, ctemplate[i].outlen); 1110 return -EINVAL; 1111 } 1112 } 1113 1114 for (i = 0; i < dtcount; i++) { 1115 struct comp_request req; 1116 unsigned int produced = 0; 1117 1118 res = crypto_decompress_setup(tfm, dtemplate[i].params, 1119 dtemplate[i].paramsize); 1120 if (res) { 1121 pr_err("alg: pcomp: decompression setup failed on " 1122 "test %d for %s: error=%d\n", i + 1, algo, res); 1123 return res; 1124 } 1125 1126 res = crypto_decompress_init(tfm); 1127 if (res) { 1128 pr_err("alg: pcomp: decompression init failed on test " 1129 "%d for %s: error=%d\n", i + 1, algo, res); 1130 return res; 1131 } 1132 1133 memset(result, 0, sizeof(result)); 1134 1135 req.next_in = dtemplate[i].input; 1136 req.avail_in = dtemplate[i].inlen / 2; 1137 req.next_out = result; 1138 req.avail_out = dtemplate[i].outlen / 2; 1139 1140 res = crypto_decompress_update(tfm, &req); 1141 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1142 pr_err("alg: pcomp: decompression update failed on " 1143 "test %d for %s: error=%d\n", i + 1, algo, res); 1144 return res; 1145 } 1146 if (res > 0) 1147 produced += res; 1148 1149 /* Add remaining input data */ 1150 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1151 1152 res = crypto_decompress_update(tfm, &req); 1153 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1154 pr_err("alg: pcomp: decompression update failed on " 1155 "test %d for %s: error=%d\n", i + 1, algo, res); 1156 return res; 1157 } 1158 if (res > 0) 1159 produced += res; 1160 1161 /* Provide remaining output space */ 1162 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1163 1164 res = crypto_decompress_final(tfm, &req); 1165 if (res < 0 && (res != -EAGAIN || req.avail_in)) { 1166 pr_err("alg: pcomp: decompression final failed on " 1167 "test %d for %s: error=%d\n", i + 1, algo, res); 1168 return res; 1169 } 1170 if (res > 0) 1171 produced += res; 1172 1173 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1174 pr_err("alg: comp: Decompression test %d failed for " 1175 "%s: output len = %d (expected %d)\n", i + 1, 1176 algo, COMP_BUF_SIZE - req.avail_out, 1177 dtemplate[i].outlen); 1178 return -EINVAL; 1179 } 1180 1181 if (produced != dtemplate[i].outlen) { 1182 pr_err("alg: comp: Decompression test %d failed for " 1183 "%s: returned len = %u (expected %d)\n", i + 1, 1184 algo, produced, dtemplate[i].outlen); 1185 return -EINVAL; 1186 } 1187 1188 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1189 pr_err("alg: pcomp: Decompression test %d failed for " 1190 "%s\n", i + 1, algo); 1191 hexdump(result, dtemplate[i].outlen); 1192 return -EINVAL; 1193 } 1194 } 1195 1196 return 0; 1197 } 1198 1199 1200 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template, 1201 unsigned int tcount) 1202 { 1203 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 1204 int err = 0, i, j, seedsize; 1205 u8 *seed; 1206 char result[32]; 1207 1208 seedsize = crypto_rng_seedsize(tfm); 1209 1210 seed = kmalloc(seedsize, GFP_KERNEL); 1211 if (!seed) { 1212 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 1213 "for %s\n", algo); 1214 return -ENOMEM; 1215 } 1216 1217 for (i = 0; i < tcount; i++) { 1218 memset(result, 0, 32); 1219 1220 memcpy(seed, template[i].v, template[i].vlen); 1221 memcpy(seed + template[i].vlen, template[i].key, 1222 template[i].klen); 1223 memcpy(seed + template[i].vlen + template[i].klen, 1224 template[i].dt, template[i].dtlen); 1225 1226 err = crypto_rng_reset(tfm, seed, seedsize); 1227 if (err) { 1228 printk(KERN_ERR "alg: cprng: Failed to reset rng " 1229 "for %s\n", algo); 1230 goto out; 1231 } 1232 1233 for (j = 0; j < template[i].loops; j++) { 1234 err = crypto_rng_get_bytes(tfm, result, 1235 template[i].rlen); 1236 if (err != template[i].rlen) { 1237 printk(KERN_ERR "alg: cprng: Failed to obtain " 1238 "the correct amount of random data for " 1239 "%s (requested %d, got %d)\n", algo, 1240 template[i].rlen, err); 1241 goto out; 1242 } 1243 } 1244 1245 err = memcmp(result, template[i].result, 1246 template[i].rlen); 1247 if (err) { 1248 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 1249 i, algo); 1250 hexdump(result, template[i].rlen); 1251 err = -EINVAL; 1252 goto out; 1253 } 1254 } 1255 1256 out: 1257 kfree(seed); 1258 return err; 1259 } 1260 1261 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1262 u32 type, u32 mask) 1263 { 1264 struct crypto_aead *tfm; 1265 int err = 0; 1266 1267 tfm = crypto_alloc_aead(driver, type, mask); 1268 if (IS_ERR(tfm)) { 1269 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1270 "%ld\n", driver, PTR_ERR(tfm)); 1271 return PTR_ERR(tfm); 1272 } 1273 1274 if (desc->suite.aead.enc.vecs) { 1275 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1276 desc->suite.aead.enc.count); 1277 if (err) 1278 goto out; 1279 } 1280 1281 if (!err && desc->suite.aead.dec.vecs) 1282 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1283 desc->suite.aead.dec.count); 1284 1285 out: 1286 crypto_free_aead(tfm); 1287 return err; 1288 } 1289 1290 static int alg_test_cipher(const struct alg_test_desc *desc, 1291 const char *driver, u32 type, u32 mask) 1292 { 1293 struct crypto_cipher *tfm; 1294 int err = 0; 1295 1296 tfm = crypto_alloc_cipher(driver, type, mask); 1297 if (IS_ERR(tfm)) { 1298 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1299 "%s: %ld\n", driver, PTR_ERR(tfm)); 1300 return PTR_ERR(tfm); 1301 } 1302 1303 if (desc->suite.cipher.enc.vecs) { 1304 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1305 desc->suite.cipher.enc.count); 1306 if (err) 1307 goto out; 1308 } 1309 1310 if (desc->suite.cipher.dec.vecs) 1311 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1312 desc->suite.cipher.dec.count); 1313 1314 out: 1315 crypto_free_cipher(tfm); 1316 return err; 1317 } 1318 1319 static int alg_test_skcipher(const struct alg_test_desc *desc, 1320 const char *driver, u32 type, u32 mask) 1321 { 1322 struct crypto_ablkcipher *tfm; 1323 int err = 0; 1324 1325 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1326 if (IS_ERR(tfm)) { 1327 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1328 "%s: %ld\n", driver, PTR_ERR(tfm)); 1329 return PTR_ERR(tfm); 1330 } 1331 1332 if (desc->suite.cipher.enc.vecs) { 1333 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1334 desc->suite.cipher.enc.count); 1335 if (err) 1336 goto out; 1337 } 1338 1339 if (desc->suite.cipher.dec.vecs) 1340 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1341 desc->suite.cipher.dec.count); 1342 1343 out: 1344 crypto_free_ablkcipher(tfm); 1345 return err; 1346 } 1347 1348 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1349 u32 type, u32 mask) 1350 { 1351 struct crypto_comp *tfm; 1352 int err; 1353 1354 tfm = crypto_alloc_comp(driver, type, mask); 1355 if (IS_ERR(tfm)) { 1356 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1357 "%ld\n", driver, PTR_ERR(tfm)); 1358 return PTR_ERR(tfm); 1359 } 1360 1361 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1362 desc->suite.comp.decomp.vecs, 1363 desc->suite.comp.comp.count, 1364 desc->suite.comp.decomp.count); 1365 1366 crypto_free_comp(tfm); 1367 return err; 1368 } 1369 1370 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1371 u32 type, u32 mask) 1372 { 1373 struct crypto_pcomp *tfm; 1374 int err; 1375 1376 tfm = crypto_alloc_pcomp(driver, type, mask); 1377 if (IS_ERR(tfm)) { 1378 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1379 driver, PTR_ERR(tfm)); 1380 return PTR_ERR(tfm); 1381 } 1382 1383 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1384 desc->suite.pcomp.decomp.vecs, 1385 desc->suite.pcomp.comp.count, 1386 desc->suite.pcomp.decomp.count); 1387 1388 crypto_free_pcomp(tfm); 1389 return err; 1390 } 1391 1392 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1393 u32 type, u32 mask) 1394 { 1395 struct crypto_ahash *tfm; 1396 int err; 1397 1398 tfm = crypto_alloc_ahash(driver, type, mask); 1399 if (IS_ERR(tfm)) { 1400 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1401 "%ld\n", driver, PTR_ERR(tfm)); 1402 return PTR_ERR(tfm); 1403 } 1404 1405 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count); 1406 1407 crypto_free_ahash(tfm); 1408 return err; 1409 } 1410 1411 static int alg_test_crc32c(const struct alg_test_desc *desc, 1412 const char *driver, u32 type, u32 mask) 1413 { 1414 struct crypto_shash *tfm; 1415 u32 val; 1416 int err; 1417 1418 err = alg_test_hash(desc, driver, type, mask); 1419 if (err) 1420 goto out; 1421 1422 tfm = crypto_alloc_shash(driver, type, mask); 1423 if (IS_ERR(tfm)) { 1424 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1425 "%ld\n", driver, PTR_ERR(tfm)); 1426 err = PTR_ERR(tfm); 1427 goto out; 1428 } 1429 1430 do { 1431 struct { 1432 struct shash_desc shash; 1433 char ctx[crypto_shash_descsize(tfm)]; 1434 } sdesc; 1435 1436 sdesc.shash.tfm = tfm; 1437 sdesc.shash.flags = 0; 1438 1439 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1440 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1441 if (err) { 1442 printk(KERN_ERR "alg: crc32c: Operation failed for " 1443 "%s: %d\n", driver, err); 1444 break; 1445 } 1446 1447 if (val != ~420553207) { 1448 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1449 "%d\n", driver, val); 1450 err = -EINVAL; 1451 } 1452 } while (0); 1453 1454 crypto_free_shash(tfm); 1455 1456 out: 1457 return err; 1458 } 1459 1460 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 1461 u32 type, u32 mask) 1462 { 1463 struct crypto_rng *rng; 1464 int err; 1465 1466 rng = crypto_alloc_rng(driver, type, mask); 1467 if (IS_ERR(rng)) { 1468 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 1469 "%ld\n", driver, PTR_ERR(rng)); 1470 return PTR_ERR(rng); 1471 } 1472 1473 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 1474 1475 crypto_free_rng(rng); 1476 1477 return err; 1478 } 1479 1480 /* Please keep this list sorted by algorithm name. */ 1481 static const struct alg_test_desc alg_test_descs[] = { 1482 { 1483 .alg = "ansi_cprng", 1484 .test = alg_test_cprng, 1485 .fips_allowed = 1, 1486 .suite = { 1487 .cprng = { 1488 .vecs = ansi_cprng_aes_tv_template, 1489 .count = ANSI_CPRNG_AES_TEST_VECTORS 1490 } 1491 } 1492 }, { 1493 .alg = "cbc(aes)", 1494 .test = alg_test_skcipher, 1495 .fips_allowed = 1, 1496 .suite = { 1497 .cipher = { 1498 .enc = { 1499 .vecs = aes_cbc_enc_tv_template, 1500 .count = AES_CBC_ENC_TEST_VECTORS 1501 }, 1502 .dec = { 1503 .vecs = aes_cbc_dec_tv_template, 1504 .count = AES_CBC_DEC_TEST_VECTORS 1505 } 1506 } 1507 } 1508 }, { 1509 .alg = "cbc(anubis)", 1510 .test = alg_test_skcipher, 1511 .suite = { 1512 .cipher = { 1513 .enc = { 1514 .vecs = anubis_cbc_enc_tv_template, 1515 .count = ANUBIS_CBC_ENC_TEST_VECTORS 1516 }, 1517 .dec = { 1518 .vecs = anubis_cbc_dec_tv_template, 1519 .count = ANUBIS_CBC_DEC_TEST_VECTORS 1520 } 1521 } 1522 } 1523 }, { 1524 .alg = "cbc(blowfish)", 1525 .test = alg_test_skcipher, 1526 .suite = { 1527 .cipher = { 1528 .enc = { 1529 .vecs = bf_cbc_enc_tv_template, 1530 .count = BF_CBC_ENC_TEST_VECTORS 1531 }, 1532 .dec = { 1533 .vecs = bf_cbc_dec_tv_template, 1534 .count = BF_CBC_DEC_TEST_VECTORS 1535 } 1536 } 1537 } 1538 }, { 1539 .alg = "cbc(camellia)", 1540 .test = alg_test_skcipher, 1541 .suite = { 1542 .cipher = { 1543 .enc = { 1544 .vecs = camellia_cbc_enc_tv_template, 1545 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 1546 }, 1547 .dec = { 1548 .vecs = camellia_cbc_dec_tv_template, 1549 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 1550 } 1551 } 1552 } 1553 }, { 1554 .alg = "cbc(des)", 1555 .test = alg_test_skcipher, 1556 .suite = { 1557 .cipher = { 1558 .enc = { 1559 .vecs = des_cbc_enc_tv_template, 1560 .count = DES_CBC_ENC_TEST_VECTORS 1561 }, 1562 .dec = { 1563 .vecs = des_cbc_dec_tv_template, 1564 .count = DES_CBC_DEC_TEST_VECTORS 1565 } 1566 } 1567 } 1568 }, { 1569 .alg = "cbc(des3_ede)", 1570 .test = alg_test_skcipher, 1571 .fips_allowed = 1, 1572 .suite = { 1573 .cipher = { 1574 .enc = { 1575 .vecs = des3_ede_cbc_enc_tv_template, 1576 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 1577 }, 1578 .dec = { 1579 .vecs = des3_ede_cbc_dec_tv_template, 1580 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 1581 } 1582 } 1583 } 1584 }, { 1585 .alg = "cbc(twofish)", 1586 .test = alg_test_skcipher, 1587 .suite = { 1588 .cipher = { 1589 .enc = { 1590 .vecs = tf_cbc_enc_tv_template, 1591 .count = TF_CBC_ENC_TEST_VECTORS 1592 }, 1593 .dec = { 1594 .vecs = tf_cbc_dec_tv_template, 1595 .count = TF_CBC_DEC_TEST_VECTORS 1596 } 1597 } 1598 } 1599 }, { 1600 .alg = "ccm(aes)", 1601 .test = alg_test_aead, 1602 .fips_allowed = 1, 1603 .suite = { 1604 .aead = { 1605 .enc = { 1606 .vecs = aes_ccm_enc_tv_template, 1607 .count = AES_CCM_ENC_TEST_VECTORS 1608 }, 1609 .dec = { 1610 .vecs = aes_ccm_dec_tv_template, 1611 .count = AES_CCM_DEC_TEST_VECTORS 1612 } 1613 } 1614 } 1615 }, { 1616 .alg = "crc32c", 1617 .test = alg_test_crc32c, 1618 .fips_allowed = 1, 1619 .suite = { 1620 .hash = { 1621 .vecs = crc32c_tv_template, 1622 .count = CRC32C_TEST_VECTORS 1623 } 1624 } 1625 }, { 1626 .alg = "ctr(aes)", 1627 .test = alg_test_skcipher, 1628 .fips_allowed = 1, 1629 .suite = { 1630 .cipher = { 1631 .enc = { 1632 .vecs = aes_ctr_enc_tv_template, 1633 .count = AES_CTR_ENC_TEST_VECTORS 1634 }, 1635 .dec = { 1636 .vecs = aes_ctr_dec_tv_template, 1637 .count = AES_CTR_DEC_TEST_VECTORS 1638 } 1639 } 1640 } 1641 }, { 1642 .alg = "cts(cbc(aes))", 1643 .test = alg_test_skcipher, 1644 .suite = { 1645 .cipher = { 1646 .enc = { 1647 .vecs = cts_mode_enc_tv_template, 1648 .count = CTS_MODE_ENC_TEST_VECTORS 1649 }, 1650 .dec = { 1651 .vecs = cts_mode_dec_tv_template, 1652 .count = CTS_MODE_DEC_TEST_VECTORS 1653 } 1654 } 1655 } 1656 }, { 1657 .alg = "deflate", 1658 .test = alg_test_comp, 1659 .suite = { 1660 .comp = { 1661 .comp = { 1662 .vecs = deflate_comp_tv_template, 1663 .count = DEFLATE_COMP_TEST_VECTORS 1664 }, 1665 .decomp = { 1666 .vecs = deflate_decomp_tv_template, 1667 .count = DEFLATE_DECOMP_TEST_VECTORS 1668 } 1669 } 1670 } 1671 }, { 1672 .alg = "ecb(aes)", 1673 .test = alg_test_skcipher, 1674 .fips_allowed = 1, 1675 .suite = { 1676 .cipher = { 1677 .enc = { 1678 .vecs = aes_enc_tv_template, 1679 .count = AES_ENC_TEST_VECTORS 1680 }, 1681 .dec = { 1682 .vecs = aes_dec_tv_template, 1683 .count = AES_DEC_TEST_VECTORS 1684 } 1685 } 1686 } 1687 }, { 1688 .alg = "ecb(anubis)", 1689 .test = alg_test_skcipher, 1690 .suite = { 1691 .cipher = { 1692 .enc = { 1693 .vecs = anubis_enc_tv_template, 1694 .count = ANUBIS_ENC_TEST_VECTORS 1695 }, 1696 .dec = { 1697 .vecs = anubis_dec_tv_template, 1698 .count = ANUBIS_DEC_TEST_VECTORS 1699 } 1700 } 1701 } 1702 }, { 1703 .alg = "ecb(arc4)", 1704 .test = alg_test_skcipher, 1705 .suite = { 1706 .cipher = { 1707 .enc = { 1708 .vecs = arc4_enc_tv_template, 1709 .count = ARC4_ENC_TEST_VECTORS 1710 }, 1711 .dec = { 1712 .vecs = arc4_dec_tv_template, 1713 .count = ARC4_DEC_TEST_VECTORS 1714 } 1715 } 1716 } 1717 }, { 1718 .alg = "ecb(blowfish)", 1719 .test = alg_test_skcipher, 1720 .suite = { 1721 .cipher = { 1722 .enc = { 1723 .vecs = bf_enc_tv_template, 1724 .count = BF_ENC_TEST_VECTORS 1725 }, 1726 .dec = { 1727 .vecs = bf_dec_tv_template, 1728 .count = BF_DEC_TEST_VECTORS 1729 } 1730 } 1731 } 1732 }, { 1733 .alg = "ecb(camellia)", 1734 .test = alg_test_skcipher, 1735 .suite = { 1736 .cipher = { 1737 .enc = { 1738 .vecs = camellia_enc_tv_template, 1739 .count = CAMELLIA_ENC_TEST_VECTORS 1740 }, 1741 .dec = { 1742 .vecs = camellia_dec_tv_template, 1743 .count = CAMELLIA_DEC_TEST_VECTORS 1744 } 1745 } 1746 } 1747 }, { 1748 .alg = "ecb(cast5)", 1749 .test = alg_test_skcipher, 1750 .suite = { 1751 .cipher = { 1752 .enc = { 1753 .vecs = cast5_enc_tv_template, 1754 .count = CAST5_ENC_TEST_VECTORS 1755 }, 1756 .dec = { 1757 .vecs = cast5_dec_tv_template, 1758 .count = CAST5_DEC_TEST_VECTORS 1759 } 1760 } 1761 } 1762 }, { 1763 .alg = "ecb(cast6)", 1764 .test = alg_test_skcipher, 1765 .suite = { 1766 .cipher = { 1767 .enc = { 1768 .vecs = cast6_enc_tv_template, 1769 .count = CAST6_ENC_TEST_VECTORS 1770 }, 1771 .dec = { 1772 .vecs = cast6_dec_tv_template, 1773 .count = CAST6_DEC_TEST_VECTORS 1774 } 1775 } 1776 } 1777 }, { 1778 .alg = "ecb(des)", 1779 .test = alg_test_skcipher, 1780 .fips_allowed = 1, 1781 .suite = { 1782 .cipher = { 1783 .enc = { 1784 .vecs = des_enc_tv_template, 1785 .count = DES_ENC_TEST_VECTORS 1786 }, 1787 .dec = { 1788 .vecs = des_dec_tv_template, 1789 .count = DES_DEC_TEST_VECTORS 1790 } 1791 } 1792 } 1793 }, { 1794 .alg = "ecb(des3_ede)", 1795 .test = alg_test_skcipher, 1796 .fips_allowed = 1, 1797 .suite = { 1798 .cipher = { 1799 .enc = { 1800 .vecs = des3_ede_enc_tv_template, 1801 .count = DES3_EDE_ENC_TEST_VECTORS 1802 }, 1803 .dec = { 1804 .vecs = des3_ede_dec_tv_template, 1805 .count = DES3_EDE_DEC_TEST_VECTORS 1806 } 1807 } 1808 } 1809 }, { 1810 .alg = "ecb(khazad)", 1811 .test = alg_test_skcipher, 1812 .suite = { 1813 .cipher = { 1814 .enc = { 1815 .vecs = khazad_enc_tv_template, 1816 .count = KHAZAD_ENC_TEST_VECTORS 1817 }, 1818 .dec = { 1819 .vecs = khazad_dec_tv_template, 1820 .count = KHAZAD_DEC_TEST_VECTORS 1821 } 1822 } 1823 } 1824 }, { 1825 .alg = "ecb(seed)", 1826 .test = alg_test_skcipher, 1827 .suite = { 1828 .cipher = { 1829 .enc = { 1830 .vecs = seed_enc_tv_template, 1831 .count = SEED_ENC_TEST_VECTORS 1832 }, 1833 .dec = { 1834 .vecs = seed_dec_tv_template, 1835 .count = SEED_DEC_TEST_VECTORS 1836 } 1837 } 1838 } 1839 }, { 1840 .alg = "ecb(serpent)", 1841 .test = alg_test_skcipher, 1842 .suite = { 1843 .cipher = { 1844 .enc = { 1845 .vecs = serpent_enc_tv_template, 1846 .count = SERPENT_ENC_TEST_VECTORS 1847 }, 1848 .dec = { 1849 .vecs = serpent_dec_tv_template, 1850 .count = SERPENT_DEC_TEST_VECTORS 1851 } 1852 } 1853 } 1854 }, { 1855 .alg = "ecb(tea)", 1856 .test = alg_test_skcipher, 1857 .suite = { 1858 .cipher = { 1859 .enc = { 1860 .vecs = tea_enc_tv_template, 1861 .count = TEA_ENC_TEST_VECTORS 1862 }, 1863 .dec = { 1864 .vecs = tea_dec_tv_template, 1865 .count = TEA_DEC_TEST_VECTORS 1866 } 1867 } 1868 } 1869 }, { 1870 .alg = "ecb(tnepres)", 1871 .test = alg_test_skcipher, 1872 .suite = { 1873 .cipher = { 1874 .enc = { 1875 .vecs = tnepres_enc_tv_template, 1876 .count = TNEPRES_ENC_TEST_VECTORS 1877 }, 1878 .dec = { 1879 .vecs = tnepres_dec_tv_template, 1880 .count = TNEPRES_DEC_TEST_VECTORS 1881 } 1882 } 1883 } 1884 }, { 1885 .alg = "ecb(twofish)", 1886 .test = alg_test_skcipher, 1887 .suite = { 1888 .cipher = { 1889 .enc = { 1890 .vecs = tf_enc_tv_template, 1891 .count = TF_ENC_TEST_VECTORS 1892 }, 1893 .dec = { 1894 .vecs = tf_dec_tv_template, 1895 .count = TF_DEC_TEST_VECTORS 1896 } 1897 } 1898 } 1899 }, { 1900 .alg = "ecb(xeta)", 1901 .test = alg_test_skcipher, 1902 .suite = { 1903 .cipher = { 1904 .enc = { 1905 .vecs = xeta_enc_tv_template, 1906 .count = XETA_ENC_TEST_VECTORS 1907 }, 1908 .dec = { 1909 .vecs = xeta_dec_tv_template, 1910 .count = XETA_DEC_TEST_VECTORS 1911 } 1912 } 1913 } 1914 }, { 1915 .alg = "ecb(xtea)", 1916 .test = alg_test_skcipher, 1917 .suite = { 1918 .cipher = { 1919 .enc = { 1920 .vecs = xtea_enc_tv_template, 1921 .count = XTEA_ENC_TEST_VECTORS 1922 }, 1923 .dec = { 1924 .vecs = xtea_dec_tv_template, 1925 .count = XTEA_DEC_TEST_VECTORS 1926 } 1927 } 1928 } 1929 }, { 1930 .alg = "gcm(aes)", 1931 .test = alg_test_aead, 1932 .fips_allowed = 1, 1933 .suite = { 1934 .aead = { 1935 .enc = { 1936 .vecs = aes_gcm_enc_tv_template, 1937 .count = AES_GCM_ENC_TEST_VECTORS 1938 }, 1939 .dec = { 1940 .vecs = aes_gcm_dec_tv_template, 1941 .count = AES_GCM_DEC_TEST_VECTORS 1942 } 1943 } 1944 } 1945 }, { 1946 .alg = "ghash", 1947 .test = alg_test_hash, 1948 .suite = { 1949 .hash = { 1950 .vecs = ghash_tv_template, 1951 .count = GHASH_TEST_VECTORS 1952 } 1953 } 1954 }, { 1955 .alg = "hmac(md5)", 1956 .test = alg_test_hash, 1957 .suite = { 1958 .hash = { 1959 .vecs = hmac_md5_tv_template, 1960 .count = HMAC_MD5_TEST_VECTORS 1961 } 1962 } 1963 }, { 1964 .alg = "hmac(rmd128)", 1965 .test = alg_test_hash, 1966 .suite = { 1967 .hash = { 1968 .vecs = hmac_rmd128_tv_template, 1969 .count = HMAC_RMD128_TEST_VECTORS 1970 } 1971 } 1972 }, { 1973 .alg = "hmac(rmd160)", 1974 .test = alg_test_hash, 1975 .suite = { 1976 .hash = { 1977 .vecs = hmac_rmd160_tv_template, 1978 .count = HMAC_RMD160_TEST_VECTORS 1979 } 1980 } 1981 }, { 1982 .alg = "hmac(sha1)", 1983 .test = alg_test_hash, 1984 .fips_allowed = 1, 1985 .suite = { 1986 .hash = { 1987 .vecs = hmac_sha1_tv_template, 1988 .count = HMAC_SHA1_TEST_VECTORS 1989 } 1990 } 1991 }, { 1992 .alg = "hmac(sha224)", 1993 .test = alg_test_hash, 1994 .fips_allowed = 1, 1995 .suite = { 1996 .hash = { 1997 .vecs = hmac_sha224_tv_template, 1998 .count = HMAC_SHA224_TEST_VECTORS 1999 } 2000 } 2001 }, { 2002 .alg = "hmac(sha256)", 2003 .test = alg_test_hash, 2004 .fips_allowed = 1, 2005 .suite = { 2006 .hash = { 2007 .vecs = hmac_sha256_tv_template, 2008 .count = HMAC_SHA256_TEST_VECTORS 2009 } 2010 } 2011 }, { 2012 .alg = "hmac(sha384)", 2013 .test = alg_test_hash, 2014 .fips_allowed = 1, 2015 .suite = { 2016 .hash = { 2017 .vecs = hmac_sha384_tv_template, 2018 .count = HMAC_SHA384_TEST_VECTORS 2019 } 2020 } 2021 }, { 2022 .alg = "hmac(sha512)", 2023 .test = alg_test_hash, 2024 .fips_allowed = 1, 2025 .suite = { 2026 .hash = { 2027 .vecs = hmac_sha512_tv_template, 2028 .count = HMAC_SHA512_TEST_VECTORS 2029 } 2030 } 2031 }, { 2032 .alg = "lrw(aes)", 2033 .test = alg_test_skcipher, 2034 .suite = { 2035 .cipher = { 2036 .enc = { 2037 .vecs = aes_lrw_enc_tv_template, 2038 .count = AES_LRW_ENC_TEST_VECTORS 2039 }, 2040 .dec = { 2041 .vecs = aes_lrw_dec_tv_template, 2042 .count = AES_LRW_DEC_TEST_VECTORS 2043 } 2044 } 2045 } 2046 }, { 2047 .alg = "lzo", 2048 .test = alg_test_comp, 2049 .suite = { 2050 .comp = { 2051 .comp = { 2052 .vecs = lzo_comp_tv_template, 2053 .count = LZO_COMP_TEST_VECTORS 2054 }, 2055 .decomp = { 2056 .vecs = lzo_decomp_tv_template, 2057 .count = LZO_DECOMP_TEST_VECTORS 2058 } 2059 } 2060 } 2061 }, { 2062 .alg = "md4", 2063 .test = alg_test_hash, 2064 .suite = { 2065 .hash = { 2066 .vecs = md4_tv_template, 2067 .count = MD4_TEST_VECTORS 2068 } 2069 } 2070 }, { 2071 .alg = "md5", 2072 .test = alg_test_hash, 2073 .suite = { 2074 .hash = { 2075 .vecs = md5_tv_template, 2076 .count = MD5_TEST_VECTORS 2077 } 2078 } 2079 }, { 2080 .alg = "michael_mic", 2081 .test = alg_test_hash, 2082 .suite = { 2083 .hash = { 2084 .vecs = michael_mic_tv_template, 2085 .count = MICHAEL_MIC_TEST_VECTORS 2086 } 2087 } 2088 }, { 2089 .alg = "pcbc(fcrypt)", 2090 .test = alg_test_skcipher, 2091 .suite = { 2092 .cipher = { 2093 .enc = { 2094 .vecs = fcrypt_pcbc_enc_tv_template, 2095 .count = FCRYPT_ENC_TEST_VECTORS 2096 }, 2097 .dec = { 2098 .vecs = fcrypt_pcbc_dec_tv_template, 2099 .count = FCRYPT_DEC_TEST_VECTORS 2100 } 2101 } 2102 } 2103 }, { 2104 .alg = "rfc3686(ctr(aes))", 2105 .test = alg_test_skcipher, 2106 .fips_allowed = 1, 2107 .suite = { 2108 .cipher = { 2109 .enc = { 2110 .vecs = aes_ctr_rfc3686_enc_tv_template, 2111 .count = AES_CTR_3686_ENC_TEST_VECTORS 2112 }, 2113 .dec = { 2114 .vecs = aes_ctr_rfc3686_dec_tv_template, 2115 .count = AES_CTR_3686_DEC_TEST_VECTORS 2116 } 2117 } 2118 } 2119 }, { 2120 .alg = "rfc4309(ccm(aes))", 2121 .test = alg_test_aead, 2122 .fips_allowed = 1, 2123 .suite = { 2124 .aead = { 2125 .enc = { 2126 .vecs = aes_ccm_rfc4309_enc_tv_template, 2127 .count = AES_CCM_4309_ENC_TEST_VECTORS 2128 }, 2129 .dec = { 2130 .vecs = aes_ccm_rfc4309_dec_tv_template, 2131 .count = AES_CCM_4309_DEC_TEST_VECTORS 2132 } 2133 } 2134 } 2135 }, { 2136 .alg = "rmd128", 2137 .test = alg_test_hash, 2138 .suite = { 2139 .hash = { 2140 .vecs = rmd128_tv_template, 2141 .count = RMD128_TEST_VECTORS 2142 } 2143 } 2144 }, { 2145 .alg = "rmd160", 2146 .test = alg_test_hash, 2147 .suite = { 2148 .hash = { 2149 .vecs = rmd160_tv_template, 2150 .count = RMD160_TEST_VECTORS 2151 } 2152 } 2153 }, { 2154 .alg = "rmd256", 2155 .test = alg_test_hash, 2156 .suite = { 2157 .hash = { 2158 .vecs = rmd256_tv_template, 2159 .count = RMD256_TEST_VECTORS 2160 } 2161 } 2162 }, { 2163 .alg = "rmd320", 2164 .test = alg_test_hash, 2165 .suite = { 2166 .hash = { 2167 .vecs = rmd320_tv_template, 2168 .count = RMD320_TEST_VECTORS 2169 } 2170 } 2171 }, { 2172 .alg = "salsa20", 2173 .test = alg_test_skcipher, 2174 .suite = { 2175 .cipher = { 2176 .enc = { 2177 .vecs = salsa20_stream_enc_tv_template, 2178 .count = SALSA20_STREAM_ENC_TEST_VECTORS 2179 } 2180 } 2181 } 2182 }, { 2183 .alg = "sha1", 2184 .test = alg_test_hash, 2185 .fips_allowed = 1, 2186 .suite = { 2187 .hash = { 2188 .vecs = sha1_tv_template, 2189 .count = SHA1_TEST_VECTORS 2190 } 2191 } 2192 }, { 2193 .alg = "sha224", 2194 .test = alg_test_hash, 2195 .fips_allowed = 1, 2196 .suite = { 2197 .hash = { 2198 .vecs = sha224_tv_template, 2199 .count = SHA224_TEST_VECTORS 2200 } 2201 } 2202 }, { 2203 .alg = "sha256", 2204 .test = alg_test_hash, 2205 .fips_allowed = 1, 2206 .suite = { 2207 .hash = { 2208 .vecs = sha256_tv_template, 2209 .count = SHA256_TEST_VECTORS 2210 } 2211 } 2212 }, { 2213 .alg = "sha384", 2214 .test = alg_test_hash, 2215 .fips_allowed = 1, 2216 .suite = { 2217 .hash = { 2218 .vecs = sha384_tv_template, 2219 .count = SHA384_TEST_VECTORS 2220 } 2221 } 2222 }, { 2223 .alg = "sha512", 2224 .test = alg_test_hash, 2225 .fips_allowed = 1, 2226 .suite = { 2227 .hash = { 2228 .vecs = sha512_tv_template, 2229 .count = SHA512_TEST_VECTORS 2230 } 2231 } 2232 }, { 2233 .alg = "tgr128", 2234 .test = alg_test_hash, 2235 .suite = { 2236 .hash = { 2237 .vecs = tgr128_tv_template, 2238 .count = TGR128_TEST_VECTORS 2239 } 2240 } 2241 }, { 2242 .alg = "tgr160", 2243 .test = alg_test_hash, 2244 .suite = { 2245 .hash = { 2246 .vecs = tgr160_tv_template, 2247 .count = TGR160_TEST_VECTORS 2248 } 2249 } 2250 }, { 2251 .alg = "tgr192", 2252 .test = alg_test_hash, 2253 .suite = { 2254 .hash = { 2255 .vecs = tgr192_tv_template, 2256 .count = TGR192_TEST_VECTORS 2257 } 2258 } 2259 }, { 2260 .alg = "vmac(aes)", 2261 .test = alg_test_hash, 2262 .suite = { 2263 .hash = { 2264 .vecs = aes_vmac128_tv_template, 2265 .count = VMAC_AES_TEST_VECTORS 2266 } 2267 } 2268 }, { 2269 .alg = "wp256", 2270 .test = alg_test_hash, 2271 .suite = { 2272 .hash = { 2273 .vecs = wp256_tv_template, 2274 .count = WP256_TEST_VECTORS 2275 } 2276 } 2277 }, { 2278 .alg = "wp384", 2279 .test = alg_test_hash, 2280 .suite = { 2281 .hash = { 2282 .vecs = wp384_tv_template, 2283 .count = WP384_TEST_VECTORS 2284 } 2285 } 2286 }, { 2287 .alg = "wp512", 2288 .test = alg_test_hash, 2289 .suite = { 2290 .hash = { 2291 .vecs = wp512_tv_template, 2292 .count = WP512_TEST_VECTORS 2293 } 2294 } 2295 }, { 2296 .alg = "xcbc(aes)", 2297 .test = alg_test_hash, 2298 .suite = { 2299 .hash = { 2300 .vecs = aes_xcbc128_tv_template, 2301 .count = XCBC_AES_TEST_VECTORS 2302 } 2303 } 2304 }, { 2305 .alg = "xts(aes)", 2306 .test = alg_test_skcipher, 2307 .suite = { 2308 .cipher = { 2309 .enc = { 2310 .vecs = aes_xts_enc_tv_template, 2311 .count = AES_XTS_ENC_TEST_VECTORS 2312 }, 2313 .dec = { 2314 .vecs = aes_xts_dec_tv_template, 2315 .count = AES_XTS_DEC_TEST_VECTORS 2316 } 2317 } 2318 } 2319 }, { 2320 .alg = "zlib", 2321 .test = alg_test_pcomp, 2322 .suite = { 2323 .pcomp = { 2324 .comp = { 2325 .vecs = zlib_comp_tv_template, 2326 .count = ZLIB_COMP_TEST_VECTORS 2327 }, 2328 .decomp = { 2329 .vecs = zlib_decomp_tv_template, 2330 .count = ZLIB_DECOMP_TEST_VECTORS 2331 } 2332 } 2333 } 2334 } 2335 }; 2336 2337 static int alg_find_test(const char *alg) 2338 { 2339 int start = 0; 2340 int end = ARRAY_SIZE(alg_test_descs); 2341 2342 while (start < end) { 2343 int i = (start + end) / 2; 2344 int diff = strcmp(alg_test_descs[i].alg, alg); 2345 2346 if (diff > 0) { 2347 end = i; 2348 continue; 2349 } 2350 2351 if (diff < 0) { 2352 start = i + 1; 2353 continue; 2354 } 2355 2356 return i; 2357 } 2358 2359 return -1; 2360 } 2361 2362 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 2363 { 2364 int i; 2365 int j; 2366 int rc; 2367 2368 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 2369 char nalg[CRYPTO_MAX_ALG_NAME]; 2370 2371 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 2372 sizeof(nalg)) 2373 return -ENAMETOOLONG; 2374 2375 i = alg_find_test(nalg); 2376 if (i < 0) 2377 goto notest; 2378 2379 if (fips_enabled && !alg_test_descs[i].fips_allowed) 2380 goto non_fips_alg; 2381 2382 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 2383 goto test_done; 2384 } 2385 2386 i = alg_find_test(alg); 2387 j = alg_find_test(driver); 2388 if (i < 0 && j < 0) 2389 goto notest; 2390 2391 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 2392 (j >= 0 && !alg_test_descs[j].fips_allowed))) 2393 goto non_fips_alg; 2394 2395 rc = 0; 2396 if (i >= 0) 2397 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 2398 type, mask); 2399 if (j >= 0) 2400 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 2401 type, mask); 2402 2403 test_done: 2404 if (fips_enabled && rc) 2405 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 2406 2407 if (fips_enabled && !rc) 2408 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n", 2409 driver, alg); 2410 2411 return rc; 2412 2413 notest: 2414 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 2415 return 0; 2416 non_fips_alg: 2417 return -EINVAL; 2418 } 2419 EXPORT_SYMBOL_GPL(alg_test); 2420