1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * 9 * This program is free software; you can redistribute it and/or modify it 10 * under the terms of the GNU General Public License as published by the Free 11 * Software Foundation; either version 2 of the License, or (at your option) 12 * any later version. 13 * 14 */ 15 16 #include <crypto/hash.h> 17 #include <linux/err.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 #include <linux/slab.h> 21 #include <linux/string.h> 22 23 #include "internal.h" 24 #include "testmgr.h" 25 26 /* 27 * Need slab memory for testing (size in number of pages). 28 */ 29 #define XBUFSIZE 8 30 31 /* 32 * Indexes into the xbuf to simulate cross-page access. 33 */ 34 #define IDX1 32 35 #define IDX2 32400 36 #define IDX3 1 37 #define IDX4 8193 38 #define IDX5 22222 39 #define IDX6 17101 40 #define IDX7 27333 41 #define IDX8 3000 42 43 /* 44 * Used by test_cipher() 45 */ 46 #define ENCRYPT 1 47 #define DECRYPT 0 48 49 struct tcrypt_result { 50 struct completion completion; 51 int err; 52 }; 53 54 struct aead_test_suite { 55 struct { 56 struct aead_testvec *vecs; 57 unsigned int count; 58 } enc, dec; 59 }; 60 61 struct cipher_test_suite { 62 struct { 63 struct cipher_testvec *vecs; 64 unsigned int count; 65 } enc, dec; 66 }; 67 68 struct comp_test_suite { 69 struct { 70 struct comp_testvec *vecs; 71 unsigned int count; 72 } comp, decomp; 73 }; 74 75 struct pcomp_test_suite { 76 struct { 77 struct pcomp_testvec *vecs; 78 unsigned int count; 79 } comp, decomp; 80 }; 81 82 struct hash_test_suite { 83 struct hash_testvec *vecs; 84 unsigned int count; 85 }; 86 87 struct alg_test_desc { 88 const char *alg; 89 int (*test)(const struct alg_test_desc *desc, const char *driver, 90 u32 type, u32 mask); 91 92 union { 93 struct aead_test_suite aead; 94 struct cipher_test_suite cipher; 95 struct comp_test_suite comp; 96 struct pcomp_test_suite pcomp; 97 struct hash_test_suite hash; 98 } suite; 99 }; 100 101 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 102 103 static char *xbuf[XBUFSIZE]; 104 static char *axbuf[XBUFSIZE]; 105 106 static void hexdump(unsigned char *buf, unsigned int len) 107 { 108 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 109 16, 1, 110 buf, len, false); 111 } 112 113 static void tcrypt_complete(struct crypto_async_request *req, int err) 114 { 115 struct tcrypt_result *res = req->data; 116 117 if (err == -EINPROGRESS) 118 return; 119 120 res->err = err; 121 complete(&res->completion); 122 } 123 124 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template, 125 unsigned int tcount) 126 { 127 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 128 unsigned int i, j, k, temp; 129 struct scatterlist sg[8]; 130 char result[64]; 131 struct ahash_request *req; 132 struct tcrypt_result tresult; 133 int ret; 134 void *hash_buff; 135 136 init_completion(&tresult.completion); 137 138 req = ahash_request_alloc(tfm, GFP_KERNEL); 139 if (!req) { 140 printk(KERN_ERR "alg: hash: Failed to allocate request for " 141 "%s\n", algo); 142 ret = -ENOMEM; 143 goto out_noreq; 144 } 145 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 146 tcrypt_complete, &tresult); 147 148 for (i = 0; i < tcount; i++) { 149 memset(result, 0, 64); 150 151 hash_buff = xbuf[0]; 152 153 memcpy(hash_buff, template[i].plaintext, template[i].psize); 154 sg_init_one(&sg[0], hash_buff, template[i].psize); 155 156 if (template[i].ksize) { 157 crypto_ahash_clear_flags(tfm, ~0); 158 ret = crypto_ahash_setkey(tfm, template[i].key, 159 template[i].ksize); 160 if (ret) { 161 printk(KERN_ERR "alg: hash: setkey failed on " 162 "test %d for %s: ret=%d\n", i + 1, algo, 163 -ret); 164 goto out; 165 } 166 } 167 168 ahash_request_set_crypt(req, sg, result, template[i].psize); 169 ret = crypto_ahash_digest(req); 170 switch (ret) { 171 case 0: 172 break; 173 case -EINPROGRESS: 174 case -EBUSY: 175 ret = wait_for_completion_interruptible( 176 &tresult.completion); 177 if (!ret && !(ret = tresult.err)) { 178 INIT_COMPLETION(tresult.completion); 179 break; 180 } 181 /* fall through */ 182 default: 183 printk(KERN_ERR "alg: hash: digest failed on test %d " 184 "for %s: ret=%d\n", i + 1, algo, -ret); 185 goto out; 186 } 187 188 if (memcmp(result, template[i].digest, 189 crypto_ahash_digestsize(tfm))) { 190 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 191 i + 1, algo); 192 hexdump(result, crypto_ahash_digestsize(tfm)); 193 ret = -EINVAL; 194 goto out; 195 } 196 } 197 198 j = 0; 199 for (i = 0; i < tcount; i++) { 200 if (template[i].np) { 201 j++; 202 memset(result, 0, 64); 203 204 temp = 0; 205 sg_init_table(sg, template[i].np); 206 for (k = 0; k < template[i].np; k++) { 207 sg_set_buf(&sg[k], 208 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 209 offset_in_page(IDX[k]), 210 template[i].plaintext + temp, 211 template[i].tap[k]), 212 template[i].tap[k]); 213 temp += template[i].tap[k]; 214 } 215 216 if (template[i].ksize) { 217 crypto_ahash_clear_flags(tfm, ~0); 218 ret = crypto_ahash_setkey(tfm, template[i].key, 219 template[i].ksize); 220 221 if (ret) { 222 printk(KERN_ERR "alg: hash: setkey " 223 "failed on chunking test %d " 224 "for %s: ret=%d\n", j, algo, 225 -ret); 226 goto out; 227 } 228 } 229 230 ahash_request_set_crypt(req, sg, result, 231 template[i].psize); 232 ret = crypto_ahash_digest(req); 233 switch (ret) { 234 case 0: 235 break; 236 case -EINPROGRESS: 237 case -EBUSY: 238 ret = wait_for_completion_interruptible( 239 &tresult.completion); 240 if (!ret && !(ret = tresult.err)) { 241 INIT_COMPLETION(tresult.completion); 242 break; 243 } 244 /* fall through */ 245 default: 246 printk(KERN_ERR "alg: hash: digest failed " 247 "on chunking test %d for %s: " 248 "ret=%d\n", j, algo, -ret); 249 goto out; 250 } 251 252 if (memcmp(result, template[i].digest, 253 crypto_ahash_digestsize(tfm))) { 254 printk(KERN_ERR "alg: hash: Chunking test %d " 255 "failed for %s\n", j, algo); 256 hexdump(result, crypto_ahash_digestsize(tfm)); 257 ret = -EINVAL; 258 goto out; 259 } 260 } 261 } 262 263 ret = 0; 264 265 out: 266 ahash_request_free(req); 267 out_noreq: 268 return ret; 269 } 270 271 static int test_aead(struct crypto_aead *tfm, int enc, 272 struct aead_testvec *template, unsigned int tcount) 273 { 274 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 275 unsigned int i, j, k, n, temp; 276 int ret = 0; 277 char *q; 278 char *key; 279 struct aead_request *req; 280 struct scatterlist sg[8]; 281 struct scatterlist asg[8]; 282 const char *e; 283 struct tcrypt_result result; 284 unsigned int authsize; 285 void *input; 286 void *assoc; 287 char iv[MAX_IVLEN]; 288 289 if (enc == ENCRYPT) 290 e = "encryption"; 291 else 292 e = "decryption"; 293 294 init_completion(&result.completion); 295 296 req = aead_request_alloc(tfm, GFP_KERNEL); 297 if (!req) { 298 printk(KERN_ERR "alg: aead: Failed to allocate request for " 299 "%s\n", algo); 300 ret = -ENOMEM; 301 goto out; 302 } 303 304 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 305 tcrypt_complete, &result); 306 307 for (i = 0, j = 0; i < tcount; i++) { 308 if (!template[i].np) { 309 j++; 310 311 /* some tepmplates have no input data but they will 312 * touch input 313 */ 314 input = xbuf[0]; 315 assoc = axbuf[0]; 316 317 memcpy(input, template[i].input, template[i].ilen); 318 memcpy(assoc, template[i].assoc, template[i].alen); 319 if (template[i].iv) 320 memcpy(iv, template[i].iv, MAX_IVLEN); 321 else 322 memset(iv, 0, MAX_IVLEN); 323 324 crypto_aead_clear_flags(tfm, ~0); 325 if (template[i].wk) 326 crypto_aead_set_flags( 327 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 328 329 key = template[i].key; 330 331 ret = crypto_aead_setkey(tfm, key, 332 template[i].klen); 333 if (!ret == template[i].fail) { 334 printk(KERN_ERR "alg: aead: setkey failed on " 335 "test %d for %s: flags=%x\n", j, algo, 336 crypto_aead_get_flags(tfm)); 337 goto out; 338 } else if (ret) 339 continue; 340 341 authsize = abs(template[i].rlen - template[i].ilen); 342 ret = crypto_aead_setauthsize(tfm, authsize); 343 if (ret) { 344 printk(KERN_ERR "alg: aead: Failed to set " 345 "authsize to %u on test %d for %s\n", 346 authsize, j, algo); 347 goto out; 348 } 349 350 sg_init_one(&sg[0], input, 351 template[i].ilen + (enc ? authsize : 0)); 352 353 sg_init_one(&asg[0], assoc, template[i].alen); 354 355 aead_request_set_crypt(req, sg, sg, 356 template[i].ilen, iv); 357 358 aead_request_set_assoc(req, asg, template[i].alen); 359 360 ret = enc ? 361 crypto_aead_encrypt(req) : 362 crypto_aead_decrypt(req); 363 364 switch (ret) { 365 case 0: 366 break; 367 case -EINPROGRESS: 368 case -EBUSY: 369 ret = wait_for_completion_interruptible( 370 &result.completion); 371 if (!ret && !(ret = result.err)) { 372 INIT_COMPLETION(result.completion); 373 break; 374 } 375 /* fall through */ 376 default: 377 printk(KERN_ERR "alg: aead: %s failed on test " 378 "%d for %s: ret=%d\n", e, j, algo, -ret); 379 goto out; 380 } 381 382 q = input; 383 if (memcmp(q, template[i].result, template[i].rlen)) { 384 printk(KERN_ERR "alg: aead: Test %d failed on " 385 "%s for %s\n", j, e, algo); 386 hexdump(q, template[i].rlen); 387 ret = -EINVAL; 388 goto out; 389 } 390 } 391 } 392 393 for (i = 0, j = 0; i < tcount; i++) { 394 if (template[i].np) { 395 j++; 396 397 if (template[i].iv) 398 memcpy(iv, template[i].iv, MAX_IVLEN); 399 else 400 memset(iv, 0, MAX_IVLEN); 401 402 crypto_aead_clear_flags(tfm, ~0); 403 if (template[i].wk) 404 crypto_aead_set_flags( 405 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 406 key = template[i].key; 407 408 ret = crypto_aead_setkey(tfm, key, template[i].klen); 409 if (!ret == template[i].fail) { 410 printk(KERN_ERR "alg: aead: setkey failed on " 411 "chunk test %d for %s: flags=%x\n", j, 412 algo, crypto_aead_get_flags(tfm)); 413 goto out; 414 } else if (ret) 415 continue; 416 417 authsize = abs(template[i].rlen - template[i].ilen); 418 419 ret = -EINVAL; 420 sg_init_table(sg, template[i].np); 421 for (k = 0, temp = 0; k < template[i].np; k++) { 422 if (WARN_ON(offset_in_page(IDX[k]) + 423 template[i].tap[k] > PAGE_SIZE)) 424 goto out; 425 426 q = xbuf[IDX[k] >> PAGE_SHIFT] + 427 offset_in_page(IDX[k]); 428 429 memcpy(q, template[i].input + temp, 430 template[i].tap[k]); 431 432 n = template[i].tap[k]; 433 if (k == template[i].np - 1 && enc) 434 n += authsize; 435 if (offset_in_page(q) + n < PAGE_SIZE) 436 q[n] = 0; 437 438 sg_set_buf(&sg[k], q, template[i].tap[k]); 439 temp += template[i].tap[k]; 440 } 441 442 ret = crypto_aead_setauthsize(tfm, authsize); 443 if (ret) { 444 printk(KERN_ERR "alg: aead: Failed to set " 445 "authsize to %u on chunk test %d for " 446 "%s\n", authsize, j, algo); 447 goto out; 448 } 449 450 if (enc) { 451 if (WARN_ON(sg[k - 1].offset + 452 sg[k - 1].length + authsize > 453 PAGE_SIZE)) { 454 ret = -EINVAL; 455 goto out; 456 } 457 458 sg[k - 1].length += authsize; 459 } 460 461 sg_init_table(asg, template[i].anp); 462 for (k = 0, temp = 0; k < template[i].anp; k++) { 463 sg_set_buf(&asg[k], 464 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 465 offset_in_page(IDX[k]), 466 template[i].assoc + temp, 467 template[i].atap[k]), 468 template[i].atap[k]); 469 temp += template[i].atap[k]; 470 } 471 472 aead_request_set_crypt(req, sg, sg, 473 template[i].ilen, 474 iv); 475 476 aead_request_set_assoc(req, asg, template[i].alen); 477 478 ret = enc ? 479 crypto_aead_encrypt(req) : 480 crypto_aead_decrypt(req); 481 482 switch (ret) { 483 case 0: 484 break; 485 case -EINPROGRESS: 486 case -EBUSY: 487 ret = wait_for_completion_interruptible( 488 &result.completion); 489 if (!ret && !(ret = result.err)) { 490 INIT_COMPLETION(result.completion); 491 break; 492 } 493 /* fall through */ 494 default: 495 printk(KERN_ERR "alg: aead: %s failed on " 496 "chunk test %d for %s: ret=%d\n", e, j, 497 algo, -ret); 498 goto out; 499 } 500 501 ret = -EINVAL; 502 for (k = 0, temp = 0; k < template[i].np; k++) { 503 q = xbuf[IDX[k] >> PAGE_SHIFT] + 504 offset_in_page(IDX[k]); 505 506 n = template[i].tap[k]; 507 if (k == template[i].np - 1) 508 n += enc ? authsize : -authsize; 509 510 if (memcmp(q, template[i].result + temp, n)) { 511 printk(KERN_ERR "alg: aead: Chunk " 512 "test %d failed on %s at page " 513 "%u for %s\n", j, e, k, algo); 514 hexdump(q, n); 515 goto out; 516 } 517 518 q += n; 519 if (k == template[i].np - 1 && !enc) { 520 if (memcmp(q, template[i].input + 521 temp + n, authsize)) 522 n = authsize; 523 else 524 n = 0; 525 } else { 526 for (n = 0; offset_in_page(q + n) && 527 q[n]; n++) 528 ; 529 } 530 if (n) { 531 printk(KERN_ERR "alg: aead: Result " 532 "buffer corruption in chunk " 533 "test %d on %s at page %u for " 534 "%s: %u bytes:\n", j, e, k, 535 algo, n); 536 hexdump(q, n); 537 goto out; 538 } 539 540 temp += template[i].tap[k]; 541 } 542 } 543 } 544 545 ret = 0; 546 547 out: 548 aead_request_free(req); 549 return ret; 550 } 551 552 static int test_cipher(struct crypto_cipher *tfm, int enc, 553 struct cipher_testvec *template, unsigned int tcount) 554 { 555 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 556 unsigned int i, j, k; 557 int ret; 558 char *q; 559 const char *e; 560 void *data; 561 562 if (enc == ENCRYPT) 563 e = "encryption"; 564 else 565 e = "decryption"; 566 567 j = 0; 568 for (i = 0; i < tcount; i++) { 569 if (template[i].np) 570 continue; 571 572 j++; 573 574 data = xbuf[0]; 575 memcpy(data, template[i].input, template[i].ilen); 576 577 crypto_cipher_clear_flags(tfm, ~0); 578 if (template[i].wk) 579 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY); 580 581 ret = crypto_cipher_setkey(tfm, template[i].key, 582 template[i].klen); 583 if (!ret == template[i].fail) { 584 printk(KERN_ERR "alg: cipher: setkey failed " 585 "on test %d for %s: flags=%x\n", j, 586 algo, crypto_cipher_get_flags(tfm)); 587 goto out; 588 } else if (ret) 589 continue; 590 591 for (k = 0; k < template[i].ilen; 592 k += crypto_cipher_blocksize(tfm)) { 593 if (enc) 594 crypto_cipher_encrypt_one(tfm, data + k, 595 data + k); 596 else 597 crypto_cipher_decrypt_one(tfm, data + k, 598 data + k); 599 } 600 601 q = data; 602 if (memcmp(q, template[i].result, template[i].rlen)) { 603 printk(KERN_ERR "alg: cipher: Test %d failed " 604 "on %s for %s\n", j, e, algo); 605 hexdump(q, template[i].rlen); 606 ret = -EINVAL; 607 goto out; 608 } 609 } 610 611 ret = 0; 612 613 out: 614 return ret; 615 } 616 617 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc, 618 struct cipher_testvec *template, unsigned int tcount) 619 { 620 const char *algo = 621 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm)); 622 unsigned int i, j, k, n, temp; 623 int ret; 624 char *q; 625 struct ablkcipher_request *req; 626 struct scatterlist sg[8]; 627 const char *e; 628 struct tcrypt_result result; 629 void *data; 630 char iv[MAX_IVLEN]; 631 632 if (enc == ENCRYPT) 633 e = "encryption"; 634 else 635 e = "decryption"; 636 637 init_completion(&result.completion); 638 639 req = ablkcipher_request_alloc(tfm, GFP_KERNEL); 640 if (!req) { 641 printk(KERN_ERR "alg: skcipher: Failed to allocate request " 642 "for %s\n", algo); 643 ret = -ENOMEM; 644 goto out; 645 } 646 647 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 648 tcrypt_complete, &result); 649 650 j = 0; 651 for (i = 0; i < tcount; i++) { 652 if (template[i].iv) 653 memcpy(iv, template[i].iv, MAX_IVLEN); 654 else 655 memset(iv, 0, MAX_IVLEN); 656 657 if (!(template[i].np)) { 658 j++; 659 660 data = xbuf[0]; 661 memcpy(data, template[i].input, template[i].ilen); 662 663 crypto_ablkcipher_clear_flags(tfm, ~0); 664 if (template[i].wk) 665 crypto_ablkcipher_set_flags( 666 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 667 668 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 669 template[i].klen); 670 if (!ret == template[i].fail) { 671 printk(KERN_ERR "alg: skcipher: setkey failed " 672 "on test %d for %s: flags=%x\n", j, 673 algo, crypto_ablkcipher_get_flags(tfm)); 674 goto out; 675 } else if (ret) 676 continue; 677 678 sg_init_one(&sg[0], data, template[i].ilen); 679 680 ablkcipher_request_set_crypt(req, sg, sg, 681 template[i].ilen, iv); 682 ret = enc ? 683 crypto_ablkcipher_encrypt(req) : 684 crypto_ablkcipher_decrypt(req); 685 686 switch (ret) { 687 case 0: 688 break; 689 case -EINPROGRESS: 690 case -EBUSY: 691 ret = wait_for_completion_interruptible( 692 &result.completion); 693 if (!ret && !((ret = result.err))) { 694 INIT_COMPLETION(result.completion); 695 break; 696 } 697 /* fall through */ 698 default: 699 printk(KERN_ERR "alg: skcipher: %s failed on " 700 "test %d for %s: ret=%d\n", e, j, algo, 701 -ret); 702 goto out; 703 } 704 705 q = data; 706 if (memcmp(q, template[i].result, template[i].rlen)) { 707 printk(KERN_ERR "alg: skcipher: Test %d " 708 "failed on %s for %s\n", j, e, algo); 709 hexdump(q, template[i].rlen); 710 ret = -EINVAL; 711 goto out; 712 } 713 } 714 } 715 716 j = 0; 717 for (i = 0; i < tcount; i++) { 718 719 if (template[i].iv) 720 memcpy(iv, template[i].iv, MAX_IVLEN); 721 else 722 memset(iv, 0, MAX_IVLEN); 723 724 if (template[i].np) { 725 j++; 726 727 crypto_ablkcipher_clear_flags(tfm, ~0); 728 if (template[i].wk) 729 crypto_ablkcipher_set_flags( 730 tfm, CRYPTO_TFM_REQ_WEAK_KEY); 731 732 ret = crypto_ablkcipher_setkey(tfm, template[i].key, 733 template[i].klen); 734 if (!ret == template[i].fail) { 735 printk(KERN_ERR "alg: skcipher: setkey failed " 736 "on chunk test %d for %s: flags=%x\n", 737 j, algo, 738 crypto_ablkcipher_get_flags(tfm)); 739 goto out; 740 } else if (ret) 741 continue; 742 743 temp = 0; 744 ret = -EINVAL; 745 sg_init_table(sg, template[i].np); 746 for (k = 0; k < template[i].np; k++) { 747 if (WARN_ON(offset_in_page(IDX[k]) + 748 template[i].tap[k] > PAGE_SIZE)) 749 goto out; 750 751 q = xbuf[IDX[k] >> PAGE_SHIFT] + 752 offset_in_page(IDX[k]); 753 754 memcpy(q, template[i].input + temp, 755 template[i].tap[k]); 756 757 if (offset_in_page(q) + template[i].tap[k] < 758 PAGE_SIZE) 759 q[template[i].tap[k]] = 0; 760 761 sg_set_buf(&sg[k], q, template[i].tap[k]); 762 763 temp += template[i].tap[k]; 764 } 765 766 ablkcipher_request_set_crypt(req, sg, sg, 767 template[i].ilen, iv); 768 769 ret = enc ? 770 crypto_ablkcipher_encrypt(req) : 771 crypto_ablkcipher_decrypt(req); 772 773 switch (ret) { 774 case 0: 775 break; 776 case -EINPROGRESS: 777 case -EBUSY: 778 ret = wait_for_completion_interruptible( 779 &result.completion); 780 if (!ret && !((ret = result.err))) { 781 INIT_COMPLETION(result.completion); 782 break; 783 } 784 /* fall through */ 785 default: 786 printk(KERN_ERR "alg: skcipher: %s failed on " 787 "chunk test %d for %s: ret=%d\n", e, j, 788 algo, -ret); 789 goto out; 790 } 791 792 temp = 0; 793 ret = -EINVAL; 794 for (k = 0; k < template[i].np; k++) { 795 q = xbuf[IDX[k] >> PAGE_SHIFT] + 796 offset_in_page(IDX[k]); 797 798 if (memcmp(q, template[i].result + temp, 799 template[i].tap[k])) { 800 printk(KERN_ERR "alg: skcipher: Chunk " 801 "test %d failed on %s at page " 802 "%u for %s\n", j, e, k, algo); 803 hexdump(q, template[i].tap[k]); 804 goto out; 805 } 806 807 q += template[i].tap[k]; 808 for (n = 0; offset_in_page(q + n) && q[n]; n++) 809 ; 810 if (n) { 811 printk(KERN_ERR "alg: skcipher: " 812 "Result buffer corruption in " 813 "chunk test %d on %s at page " 814 "%u for %s: %u bytes:\n", j, e, 815 k, algo, n); 816 hexdump(q, n); 817 goto out; 818 } 819 temp += template[i].tap[k]; 820 } 821 } 822 } 823 824 ret = 0; 825 826 out: 827 ablkcipher_request_free(req); 828 return ret; 829 } 830 831 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate, 832 struct comp_testvec *dtemplate, int ctcount, int dtcount) 833 { 834 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 835 unsigned int i; 836 char result[COMP_BUF_SIZE]; 837 int ret; 838 839 for (i = 0; i < ctcount; i++) { 840 int ilen, dlen = COMP_BUF_SIZE; 841 842 memset(result, 0, sizeof (result)); 843 844 ilen = ctemplate[i].inlen; 845 ret = crypto_comp_compress(tfm, ctemplate[i].input, 846 ilen, result, &dlen); 847 if (ret) { 848 printk(KERN_ERR "alg: comp: compression failed " 849 "on test %d for %s: ret=%d\n", i + 1, algo, 850 -ret); 851 goto out; 852 } 853 854 if (dlen != ctemplate[i].outlen) { 855 printk(KERN_ERR "alg: comp: Compression test %d " 856 "failed for %s: output len = %d\n", i + 1, algo, 857 dlen); 858 ret = -EINVAL; 859 goto out; 860 } 861 862 if (memcmp(result, ctemplate[i].output, dlen)) { 863 printk(KERN_ERR "alg: comp: Compression test %d " 864 "failed for %s\n", i + 1, algo); 865 hexdump(result, dlen); 866 ret = -EINVAL; 867 goto out; 868 } 869 } 870 871 for (i = 0; i < dtcount; i++) { 872 int ilen, dlen = COMP_BUF_SIZE; 873 874 memset(result, 0, sizeof (result)); 875 876 ilen = dtemplate[i].inlen; 877 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 878 ilen, result, &dlen); 879 if (ret) { 880 printk(KERN_ERR "alg: comp: decompression failed " 881 "on test %d for %s: ret=%d\n", i + 1, algo, 882 -ret); 883 goto out; 884 } 885 886 if (dlen != dtemplate[i].outlen) { 887 printk(KERN_ERR "alg: comp: Decompression test %d " 888 "failed for %s: output len = %d\n", i + 1, algo, 889 dlen); 890 ret = -EINVAL; 891 goto out; 892 } 893 894 if (memcmp(result, dtemplate[i].output, dlen)) { 895 printk(KERN_ERR "alg: comp: Decompression test %d " 896 "failed for %s\n", i + 1, algo); 897 hexdump(result, dlen); 898 ret = -EINVAL; 899 goto out; 900 } 901 } 902 903 ret = 0; 904 905 out: 906 return ret; 907 } 908 909 static int test_pcomp(struct crypto_pcomp *tfm, 910 struct pcomp_testvec *ctemplate, 911 struct pcomp_testvec *dtemplate, int ctcount, 912 int dtcount) 913 { 914 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm)); 915 unsigned int i; 916 char result[COMP_BUF_SIZE]; 917 int error; 918 919 for (i = 0; i < ctcount; i++) { 920 struct comp_request req; 921 922 error = crypto_compress_setup(tfm, ctemplate[i].params, 923 ctemplate[i].paramsize); 924 if (error) { 925 pr_err("alg: pcomp: compression setup failed on test " 926 "%d for %s: error=%d\n", i + 1, algo, error); 927 return error; 928 } 929 930 error = crypto_compress_init(tfm); 931 if (error) { 932 pr_err("alg: pcomp: compression init failed on test " 933 "%d for %s: error=%d\n", i + 1, algo, error); 934 return error; 935 } 936 937 memset(result, 0, sizeof(result)); 938 939 req.next_in = ctemplate[i].input; 940 req.avail_in = ctemplate[i].inlen / 2; 941 req.next_out = result; 942 req.avail_out = ctemplate[i].outlen / 2; 943 944 error = crypto_compress_update(tfm, &req); 945 if (error && (error != -EAGAIN || req.avail_in)) { 946 pr_err("alg: pcomp: compression update failed on test " 947 "%d for %s: error=%d\n", i + 1, algo, error); 948 return error; 949 } 950 951 /* Add remaining input data */ 952 req.avail_in += (ctemplate[i].inlen + 1) / 2; 953 954 error = crypto_compress_update(tfm, &req); 955 if (error && (error != -EAGAIN || req.avail_in)) { 956 pr_err("alg: pcomp: compression update failed on test " 957 "%d for %s: error=%d\n", i + 1, algo, error); 958 return error; 959 } 960 961 /* Provide remaining output space */ 962 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2; 963 964 error = crypto_compress_final(tfm, &req); 965 if (error) { 966 pr_err("alg: pcomp: compression final failed on test " 967 "%d for %s: error=%d\n", i + 1, algo, error); 968 return error; 969 } 970 971 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) { 972 pr_err("alg: comp: Compression test %d failed for %s: " 973 "output len = %d (expected %d)\n", i + 1, algo, 974 COMP_BUF_SIZE - req.avail_out, 975 ctemplate[i].outlen); 976 return -EINVAL; 977 } 978 979 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) { 980 pr_err("alg: pcomp: Compression test %d failed for " 981 "%s\n", i + 1, algo); 982 hexdump(result, ctemplate[i].outlen); 983 return -EINVAL; 984 } 985 } 986 987 for (i = 0; i < dtcount; i++) { 988 struct comp_request req; 989 990 error = crypto_decompress_setup(tfm, dtemplate[i].params, 991 dtemplate[i].paramsize); 992 if (error) { 993 pr_err("alg: pcomp: decompression setup failed on " 994 "test %d for %s: error=%d\n", i + 1, algo, 995 error); 996 return error; 997 } 998 999 error = crypto_decompress_init(tfm); 1000 if (error) { 1001 pr_err("alg: pcomp: decompression init failed on test " 1002 "%d for %s: error=%d\n", i + 1, algo, error); 1003 return error; 1004 } 1005 1006 memset(result, 0, sizeof(result)); 1007 1008 req.next_in = dtemplate[i].input; 1009 req.avail_in = dtemplate[i].inlen / 2; 1010 req.next_out = result; 1011 req.avail_out = dtemplate[i].outlen / 2; 1012 1013 error = crypto_decompress_update(tfm, &req); 1014 if (error && (error != -EAGAIN || req.avail_in)) { 1015 pr_err("alg: pcomp: decompression update failed on " 1016 "test %d for %s: error=%d\n", i + 1, algo, 1017 error); 1018 return error; 1019 } 1020 1021 /* Add remaining input data */ 1022 req.avail_in += (dtemplate[i].inlen + 1) / 2; 1023 1024 error = crypto_decompress_update(tfm, &req); 1025 if (error && (error != -EAGAIN || req.avail_in)) { 1026 pr_err("alg: pcomp: decompression update failed on " 1027 "test %d for %s: error=%d\n", i + 1, algo, 1028 error); 1029 return error; 1030 } 1031 1032 /* Provide remaining output space */ 1033 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2; 1034 1035 error = crypto_decompress_final(tfm, &req); 1036 if (error && (error != -EAGAIN || req.avail_in)) { 1037 pr_err("alg: pcomp: decompression final failed on " 1038 "test %d for %s: error=%d\n", i + 1, algo, 1039 error); 1040 return error; 1041 } 1042 1043 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) { 1044 pr_err("alg: comp: Decompression test %d failed for " 1045 "%s: output len = %d (expected %d)\n", i + 1, 1046 algo, COMP_BUF_SIZE - req.avail_out, 1047 dtemplate[i].outlen); 1048 return -EINVAL; 1049 } 1050 1051 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) { 1052 pr_err("alg: pcomp: Decompression test %d failed for " 1053 "%s\n", i + 1, algo); 1054 hexdump(result, dtemplate[i].outlen); 1055 return -EINVAL; 1056 } 1057 } 1058 1059 return 0; 1060 } 1061 1062 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 1063 u32 type, u32 mask) 1064 { 1065 struct crypto_aead *tfm; 1066 int err = 0; 1067 1068 tfm = crypto_alloc_aead(driver, type, mask); 1069 if (IS_ERR(tfm)) { 1070 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 1071 "%ld\n", driver, PTR_ERR(tfm)); 1072 return PTR_ERR(tfm); 1073 } 1074 1075 if (desc->suite.aead.enc.vecs) { 1076 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs, 1077 desc->suite.aead.enc.count); 1078 if (err) 1079 goto out; 1080 } 1081 1082 if (!err && desc->suite.aead.dec.vecs) 1083 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs, 1084 desc->suite.aead.dec.count); 1085 1086 out: 1087 crypto_free_aead(tfm); 1088 return err; 1089 } 1090 1091 static int alg_test_cipher(const struct alg_test_desc *desc, 1092 const char *driver, u32 type, u32 mask) 1093 { 1094 struct crypto_cipher *tfm; 1095 int err = 0; 1096 1097 tfm = crypto_alloc_cipher(driver, type, mask); 1098 if (IS_ERR(tfm)) { 1099 printk(KERN_ERR "alg: cipher: Failed to load transform for " 1100 "%s: %ld\n", driver, PTR_ERR(tfm)); 1101 return PTR_ERR(tfm); 1102 } 1103 1104 if (desc->suite.cipher.enc.vecs) { 1105 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1106 desc->suite.cipher.enc.count); 1107 if (err) 1108 goto out; 1109 } 1110 1111 if (desc->suite.cipher.dec.vecs) 1112 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1113 desc->suite.cipher.dec.count); 1114 1115 out: 1116 crypto_free_cipher(tfm); 1117 return err; 1118 } 1119 1120 static int alg_test_skcipher(const struct alg_test_desc *desc, 1121 const char *driver, u32 type, u32 mask) 1122 { 1123 struct crypto_ablkcipher *tfm; 1124 int err = 0; 1125 1126 tfm = crypto_alloc_ablkcipher(driver, type, mask); 1127 if (IS_ERR(tfm)) { 1128 printk(KERN_ERR "alg: skcipher: Failed to load transform for " 1129 "%s: %ld\n", driver, PTR_ERR(tfm)); 1130 return PTR_ERR(tfm); 1131 } 1132 1133 if (desc->suite.cipher.enc.vecs) { 1134 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs, 1135 desc->suite.cipher.enc.count); 1136 if (err) 1137 goto out; 1138 } 1139 1140 if (desc->suite.cipher.dec.vecs) 1141 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs, 1142 desc->suite.cipher.dec.count); 1143 1144 out: 1145 crypto_free_ablkcipher(tfm); 1146 return err; 1147 } 1148 1149 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 1150 u32 type, u32 mask) 1151 { 1152 struct crypto_comp *tfm; 1153 int err; 1154 1155 tfm = crypto_alloc_comp(driver, type, mask); 1156 if (IS_ERR(tfm)) { 1157 printk(KERN_ERR "alg: comp: Failed to load transform for %s: " 1158 "%ld\n", driver, PTR_ERR(tfm)); 1159 return PTR_ERR(tfm); 1160 } 1161 1162 err = test_comp(tfm, desc->suite.comp.comp.vecs, 1163 desc->suite.comp.decomp.vecs, 1164 desc->suite.comp.comp.count, 1165 desc->suite.comp.decomp.count); 1166 1167 crypto_free_comp(tfm); 1168 return err; 1169 } 1170 1171 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver, 1172 u32 type, u32 mask) 1173 { 1174 struct crypto_pcomp *tfm; 1175 int err; 1176 1177 tfm = crypto_alloc_pcomp(driver, type, mask); 1178 if (IS_ERR(tfm)) { 1179 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n", 1180 driver, PTR_ERR(tfm)); 1181 return PTR_ERR(tfm); 1182 } 1183 1184 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs, 1185 desc->suite.pcomp.decomp.vecs, 1186 desc->suite.pcomp.comp.count, 1187 desc->suite.pcomp.decomp.count); 1188 1189 crypto_free_pcomp(tfm); 1190 return err; 1191 } 1192 1193 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 1194 u32 type, u32 mask) 1195 { 1196 struct crypto_ahash *tfm; 1197 int err; 1198 1199 tfm = crypto_alloc_ahash(driver, type, mask); 1200 if (IS_ERR(tfm)) { 1201 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 1202 "%ld\n", driver, PTR_ERR(tfm)); 1203 return PTR_ERR(tfm); 1204 } 1205 1206 err = test_hash(tfm, desc->suite.hash.vecs, desc->suite.hash.count); 1207 1208 crypto_free_ahash(tfm); 1209 return err; 1210 } 1211 1212 static int alg_test_crc32c(const struct alg_test_desc *desc, 1213 const char *driver, u32 type, u32 mask) 1214 { 1215 struct crypto_shash *tfm; 1216 u32 val; 1217 int err; 1218 1219 err = alg_test_hash(desc, driver, type, mask); 1220 if (err) 1221 goto out; 1222 1223 tfm = crypto_alloc_shash(driver, type, mask); 1224 if (IS_ERR(tfm)) { 1225 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 1226 "%ld\n", driver, PTR_ERR(tfm)); 1227 err = PTR_ERR(tfm); 1228 goto out; 1229 } 1230 1231 do { 1232 struct { 1233 struct shash_desc shash; 1234 char ctx[crypto_shash_descsize(tfm)]; 1235 } sdesc; 1236 1237 sdesc.shash.tfm = tfm; 1238 sdesc.shash.flags = 0; 1239 1240 *(u32 *)sdesc.ctx = le32_to_cpu(420553207); 1241 err = crypto_shash_final(&sdesc.shash, (u8 *)&val); 1242 if (err) { 1243 printk(KERN_ERR "alg: crc32c: Operation failed for " 1244 "%s: %d\n", driver, err); 1245 break; 1246 } 1247 1248 if (val != ~420553207) { 1249 printk(KERN_ERR "alg: crc32c: Test failed for %s: " 1250 "%d\n", driver, val); 1251 err = -EINVAL; 1252 } 1253 } while (0); 1254 1255 crypto_free_shash(tfm); 1256 1257 out: 1258 return err; 1259 } 1260 1261 /* Please keep this list sorted by algorithm name. */ 1262 static const struct alg_test_desc alg_test_descs[] = { 1263 { 1264 .alg = "cbc(aes)", 1265 .test = alg_test_skcipher, 1266 .suite = { 1267 .cipher = { 1268 .enc = { 1269 .vecs = aes_cbc_enc_tv_template, 1270 .count = AES_CBC_ENC_TEST_VECTORS 1271 }, 1272 .dec = { 1273 .vecs = aes_cbc_dec_tv_template, 1274 .count = AES_CBC_DEC_TEST_VECTORS 1275 } 1276 } 1277 } 1278 }, { 1279 .alg = "cbc(anubis)", 1280 .test = alg_test_skcipher, 1281 .suite = { 1282 .cipher = { 1283 .enc = { 1284 .vecs = anubis_cbc_enc_tv_template, 1285 .count = ANUBIS_CBC_ENC_TEST_VECTORS 1286 }, 1287 .dec = { 1288 .vecs = anubis_cbc_dec_tv_template, 1289 .count = ANUBIS_CBC_DEC_TEST_VECTORS 1290 } 1291 } 1292 } 1293 }, { 1294 .alg = "cbc(blowfish)", 1295 .test = alg_test_skcipher, 1296 .suite = { 1297 .cipher = { 1298 .enc = { 1299 .vecs = bf_cbc_enc_tv_template, 1300 .count = BF_CBC_ENC_TEST_VECTORS 1301 }, 1302 .dec = { 1303 .vecs = bf_cbc_dec_tv_template, 1304 .count = BF_CBC_DEC_TEST_VECTORS 1305 } 1306 } 1307 } 1308 }, { 1309 .alg = "cbc(camellia)", 1310 .test = alg_test_skcipher, 1311 .suite = { 1312 .cipher = { 1313 .enc = { 1314 .vecs = camellia_cbc_enc_tv_template, 1315 .count = CAMELLIA_CBC_ENC_TEST_VECTORS 1316 }, 1317 .dec = { 1318 .vecs = camellia_cbc_dec_tv_template, 1319 .count = CAMELLIA_CBC_DEC_TEST_VECTORS 1320 } 1321 } 1322 } 1323 }, { 1324 .alg = "cbc(des)", 1325 .test = alg_test_skcipher, 1326 .suite = { 1327 .cipher = { 1328 .enc = { 1329 .vecs = des_cbc_enc_tv_template, 1330 .count = DES_CBC_ENC_TEST_VECTORS 1331 }, 1332 .dec = { 1333 .vecs = des_cbc_dec_tv_template, 1334 .count = DES_CBC_DEC_TEST_VECTORS 1335 } 1336 } 1337 } 1338 }, { 1339 .alg = "cbc(des3_ede)", 1340 .test = alg_test_skcipher, 1341 .suite = { 1342 .cipher = { 1343 .enc = { 1344 .vecs = des3_ede_cbc_enc_tv_template, 1345 .count = DES3_EDE_CBC_ENC_TEST_VECTORS 1346 }, 1347 .dec = { 1348 .vecs = des3_ede_cbc_dec_tv_template, 1349 .count = DES3_EDE_CBC_DEC_TEST_VECTORS 1350 } 1351 } 1352 } 1353 }, { 1354 .alg = "cbc(twofish)", 1355 .test = alg_test_skcipher, 1356 .suite = { 1357 .cipher = { 1358 .enc = { 1359 .vecs = tf_cbc_enc_tv_template, 1360 .count = TF_CBC_ENC_TEST_VECTORS 1361 }, 1362 .dec = { 1363 .vecs = tf_cbc_dec_tv_template, 1364 .count = TF_CBC_DEC_TEST_VECTORS 1365 } 1366 } 1367 } 1368 }, { 1369 .alg = "ccm(aes)", 1370 .test = alg_test_aead, 1371 .suite = { 1372 .aead = { 1373 .enc = { 1374 .vecs = aes_ccm_enc_tv_template, 1375 .count = AES_CCM_ENC_TEST_VECTORS 1376 }, 1377 .dec = { 1378 .vecs = aes_ccm_dec_tv_template, 1379 .count = AES_CCM_DEC_TEST_VECTORS 1380 } 1381 } 1382 } 1383 }, { 1384 .alg = "crc32c", 1385 .test = alg_test_crc32c, 1386 .suite = { 1387 .hash = { 1388 .vecs = crc32c_tv_template, 1389 .count = CRC32C_TEST_VECTORS 1390 } 1391 } 1392 }, { 1393 .alg = "cts(cbc(aes))", 1394 .test = alg_test_skcipher, 1395 .suite = { 1396 .cipher = { 1397 .enc = { 1398 .vecs = cts_mode_enc_tv_template, 1399 .count = CTS_MODE_ENC_TEST_VECTORS 1400 }, 1401 .dec = { 1402 .vecs = cts_mode_dec_tv_template, 1403 .count = CTS_MODE_DEC_TEST_VECTORS 1404 } 1405 } 1406 } 1407 }, { 1408 .alg = "deflate", 1409 .test = alg_test_comp, 1410 .suite = { 1411 .comp = { 1412 .comp = { 1413 .vecs = deflate_comp_tv_template, 1414 .count = DEFLATE_COMP_TEST_VECTORS 1415 }, 1416 .decomp = { 1417 .vecs = deflate_decomp_tv_template, 1418 .count = DEFLATE_DECOMP_TEST_VECTORS 1419 } 1420 } 1421 } 1422 }, { 1423 .alg = "ecb(aes)", 1424 .test = alg_test_skcipher, 1425 .suite = { 1426 .cipher = { 1427 .enc = { 1428 .vecs = aes_enc_tv_template, 1429 .count = AES_ENC_TEST_VECTORS 1430 }, 1431 .dec = { 1432 .vecs = aes_dec_tv_template, 1433 .count = AES_DEC_TEST_VECTORS 1434 } 1435 } 1436 } 1437 }, { 1438 .alg = "ecb(anubis)", 1439 .test = alg_test_skcipher, 1440 .suite = { 1441 .cipher = { 1442 .enc = { 1443 .vecs = anubis_enc_tv_template, 1444 .count = ANUBIS_ENC_TEST_VECTORS 1445 }, 1446 .dec = { 1447 .vecs = anubis_dec_tv_template, 1448 .count = ANUBIS_DEC_TEST_VECTORS 1449 } 1450 } 1451 } 1452 }, { 1453 .alg = "ecb(arc4)", 1454 .test = alg_test_skcipher, 1455 .suite = { 1456 .cipher = { 1457 .enc = { 1458 .vecs = arc4_enc_tv_template, 1459 .count = ARC4_ENC_TEST_VECTORS 1460 }, 1461 .dec = { 1462 .vecs = arc4_dec_tv_template, 1463 .count = ARC4_DEC_TEST_VECTORS 1464 } 1465 } 1466 } 1467 }, { 1468 .alg = "ecb(blowfish)", 1469 .test = alg_test_skcipher, 1470 .suite = { 1471 .cipher = { 1472 .enc = { 1473 .vecs = bf_enc_tv_template, 1474 .count = BF_ENC_TEST_VECTORS 1475 }, 1476 .dec = { 1477 .vecs = bf_dec_tv_template, 1478 .count = BF_DEC_TEST_VECTORS 1479 } 1480 } 1481 } 1482 }, { 1483 .alg = "ecb(camellia)", 1484 .test = alg_test_skcipher, 1485 .suite = { 1486 .cipher = { 1487 .enc = { 1488 .vecs = camellia_enc_tv_template, 1489 .count = CAMELLIA_ENC_TEST_VECTORS 1490 }, 1491 .dec = { 1492 .vecs = camellia_dec_tv_template, 1493 .count = CAMELLIA_DEC_TEST_VECTORS 1494 } 1495 } 1496 } 1497 }, { 1498 .alg = "ecb(cast5)", 1499 .test = alg_test_skcipher, 1500 .suite = { 1501 .cipher = { 1502 .enc = { 1503 .vecs = cast5_enc_tv_template, 1504 .count = CAST5_ENC_TEST_VECTORS 1505 }, 1506 .dec = { 1507 .vecs = cast5_dec_tv_template, 1508 .count = CAST5_DEC_TEST_VECTORS 1509 } 1510 } 1511 } 1512 }, { 1513 .alg = "ecb(cast6)", 1514 .test = alg_test_skcipher, 1515 .suite = { 1516 .cipher = { 1517 .enc = { 1518 .vecs = cast6_enc_tv_template, 1519 .count = CAST6_ENC_TEST_VECTORS 1520 }, 1521 .dec = { 1522 .vecs = cast6_dec_tv_template, 1523 .count = CAST6_DEC_TEST_VECTORS 1524 } 1525 } 1526 } 1527 }, { 1528 .alg = "ecb(des)", 1529 .test = alg_test_skcipher, 1530 .suite = { 1531 .cipher = { 1532 .enc = { 1533 .vecs = des_enc_tv_template, 1534 .count = DES_ENC_TEST_VECTORS 1535 }, 1536 .dec = { 1537 .vecs = des_dec_tv_template, 1538 .count = DES_DEC_TEST_VECTORS 1539 } 1540 } 1541 } 1542 }, { 1543 .alg = "ecb(des3_ede)", 1544 .test = alg_test_skcipher, 1545 .suite = { 1546 .cipher = { 1547 .enc = { 1548 .vecs = des3_ede_enc_tv_template, 1549 .count = DES3_EDE_ENC_TEST_VECTORS 1550 }, 1551 .dec = { 1552 .vecs = des3_ede_dec_tv_template, 1553 .count = DES3_EDE_DEC_TEST_VECTORS 1554 } 1555 } 1556 } 1557 }, { 1558 .alg = "ecb(khazad)", 1559 .test = alg_test_skcipher, 1560 .suite = { 1561 .cipher = { 1562 .enc = { 1563 .vecs = khazad_enc_tv_template, 1564 .count = KHAZAD_ENC_TEST_VECTORS 1565 }, 1566 .dec = { 1567 .vecs = khazad_dec_tv_template, 1568 .count = KHAZAD_DEC_TEST_VECTORS 1569 } 1570 } 1571 } 1572 }, { 1573 .alg = "ecb(seed)", 1574 .test = alg_test_skcipher, 1575 .suite = { 1576 .cipher = { 1577 .enc = { 1578 .vecs = seed_enc_tv_template, 1579 .count = SEED_ENC_TEST_VECTORS 1580 }, 1581 .dec = { 1582 .vecs = seed_dec_tv_template, 1583 .count = SEED_DEC_TEST_VECTORS 1584 } 1585 } 1586 } 1587 }, { 1588 .alg = "ecb(serpent)", 1589 .test = alg_test_skcipher, 1590 .suite = { 1591 .cipher = { 1592 .enc = { 1593 .vecs = serpent_enc_tv_template, 1594 .count = SERPENT_ENC_TEST_VECTORS 1595 }, 1596 .dec = { 1597 .vecs = serpent_dec_tv_template, 1598 .count = SERPENT_DEC_TEST_VECTORS 1599 } 1600 } 1601 } 1602 }, { 1603 .alg = "ecb(tea)", 1604 .test = alg_test_skcipher, 1605 .suite = { 1606 .cipher = { 1607 .enc = { 1608 .vecs = tea_enc_tv_template, 1609 .count = TEA_ENC_TEST_VECTORS 1610 }, 1611 .dec = { 1612 .vecs = tea_dec_tv_template, 1613 .count = TEA_DEC_TEST_VECTORS 1614 } 1615 } 1616 } 1617 }, { 1618 .alg = "ecb(tnepres)", 1619 .test = alg_test_skcipher, 1620 .suite = { 1621 .cipher = { 1622 .enc = { 1623 .vecs = tnepres_enc_tv_template, 1624 .count = TNEPRES_ENC_TEST_VECTORS 1625 }, 1626 .dec = { 1627 .vecs = tnepres_dec_tv_template, 1628 .count = TNEPRES_DEC_TEST_VECTORS 1629 } 1630 } 1631 } 1632 }, { 1633 .alg = "ecb(twofish)", 1634 .test = alg_test_skcipher, 1635 .suite = { 1636 .cipher = { 1637 .enc = { 1638 .vecs = tf_enc_tv_template, 1639 .count = TF_ENC_TEST_VECTORS 1640 }, 1641 .dec = { 1642 .vecs = tf_dec_tv_template, 1643 .count = TF_DEC_TEST_VECTORS 1644 } 1645 } 1646 } 1647 }, { 1648 .alg = "ecb(xeta)", 1649 .test = alg_test_skcipher, 1650 .suite = { 1651 .cipher = { 1652 .enc = { 1653 .vecs = xeta_enc_tv_template, 1654 .count = XETA_ENC_TEST_VECTORS 1655 }, 1656 .dec = { 1657 .vecs = xeta_dec_tv_template, 1658 .count = XETA_DEC_TEST_VECTORS 1659 } 1660 } 1661 } 1662 }, { 1663 .alg = "ecb(xtea)", 1664 .test = alg_test_skcipher, 1665 .suite = { 1666 .cipher = { 1667 .enc = { 1668 .vecs = xtea_enc_tv_template, 1669 .count = XTEA_ENC_TEST_VECTORS 1670 }, 1671 .dec = { 1672 .vecs = xtea_dec_tv_template, 1673 .count = XTEA_DEC_TEST_VECTORS 1674 } 1675 } 1676 } 1677 }, { 1678 .alg = "gcm(aes)", 1679 .test = alg_test_aead, 1680 .suite = { 1681 .aead = { 1682 .enc = { 1683 .vecs = aes_gcm_enc_tv_template, 1684 .count = AES_GCM_ENC_TEST_VECTORS 1685 }, 1686 .dec = { 1687 .vecs = aes_gcm_dec_tv_template, 1688 .count = AES_GCM_DEC_TEST_VECTORS 1689 } 1690 } 1691 } 1692 }, { 1693 .alg = "hmac(md5)", 1694 .test = alg_test_hash, 1695 .suite = { 1696 .hash = { 1697 .vecs = hmac_md5_tv_template, 1698 .count = HMAC_MD5_TEST_VECTORS 1699 } 1700 } 1701 }, { 1702 .alg = "hmac(rmd128)", 1703 .test = alg_test_hash, 1704 .suite = { 1705 .hash = { 1706 .vecs = hmac_rmd128_tv_template, 1707 .count = HMAC_RMD128_TEST_VECTORS 1708 } 1709 } 1710 }, { 1711 .alg = "hmac(rmd160)", 1712 .test = alg_test_hash, 1713 .suite = { 1714 .hash = { 1715 .vecs = hmac_rmd160_tv_template, 1716 .count = HMAC_RMD160_TEST_VECTORS 1717 } 1718 } 1719 }, { 1720 .alg = "hmac(sha1)", 1721 .test = alg_test_hash, 1722 .suite = { 1723 .hash = { 1724 .vecs = hmac_sha1_tv_template, 1725 .count = HMAC_SHA1_TEST_VECTORS 1726 } 1727 } 1728 }, { 1729 .alg = "hmac(sha224)", 1730 .test = alg_test_hash, 1731 .suite = { 1732 .hash = { 1733 .vecs = hmac_sha224_tv_template, 1734 .count = HMAC_SHA224_TEST_VECTORS 1735 } 1736 } 1737 }, { 1738 .alg = "hmac(sha256)", 1739 .test = alg_test_hash, 1740 .suite = { 1741 .hash = { 1742 .vecs = hmac_sha256_tv_template, 1743 .count = HMAC_SHA256_TEST_VECTORS 1744 } 1745 } 1746 }, { 1747 .alg = "hmac(sha384)", 1748 .test = alg_test_hash, 1749 .suite = { 1750 .hash = { 1751 .vecs = hmac_sha384_tv_template, 1752 .count = HMAC_SHA384_TEST_VECTORS 1753 } 1754 } 1755 }, { 1756 .alg = "hmac(sha512)", 1757 .test = alg_test_hash, 1758 .suite = { 1759 .hash = { 1760 .vecs = hmac_sha512_tv_template, 1761 .count = HMAC_SHA512_TEST_VECTORS 1762 } 1763 } 1764 }, { 1765 .alg = "lrw(aes)", 1766 .test = alg_test_skcipher, 1767 .suite = { 1768 .cipher = { 1769 .enc = { 1770 .vecs = aes_lrw_enc_tv_template, 1771 .count = AES_LRW_ENC_TEST_VECTORS 1772 }, 1773 .dec = { 1774 .vecs = aes_lrw_dec_tv_template, 1775 .count = AES_LRW_DEC_TEST_VECTORS 1776 } 1777 } 1778 } 1779 }, { 1780 .alg = "lzo", 1781 .test = alg_test_comp, 1782 .suite = { 1783 .comp = { 1784 .comp = { 1785 .vecs = lzo_comp_tv_template, 1786 .count = LZO_COMP_TEST_VECTORS 1787 }, 1788 .decomp = { 1789 .vecs = lzo_decomp_tv_template, 1790 .count = LZO_DECOMP_TEST_VECTORS 1791 } 1792 } 1793 } 1794 }, { 1795 .alg = "md4", 1796 .test = alg_test_hash, 1797 .suite = { 1798 .hash = { 1799 .vecs = md4_tv_template, 1800 .count = MD4_TEST_VECTORS 1801 } 1802 } 1803 }, { 1804 .alg = "md5", 1805 .test = alg_test_hash, 1806 .suite = { 1807 .hash = { 1808 .vecs = md5_tv_template, 1809 .count = MD5_TEST_VECTORS 1810 } 1811 } 1812 }, { 1813 .alg = "michael_mic", 1814 .test = alg_test_hash, 1815 .suite = { 1816 .hash = { 1817 .vecs = michael_mic_tv_template, 1818 .count = MICHAEL_MIC_TEST_VECTORS 1819 } 1820 } 1821 }, { 1822 .alg = "pcbc(fcrypt)", 1823 .test = alg_test_skcipher, 1824 .suite = { 1825 .cipher = { 1826 .enc = { 1827 .vecs = fcrypt_pcbc_enc_tv_template, 1828 .count = FCRYPT_ENC_TEST_VECTORS 1829 }, 1830 .dec = { 1831 .vecs = fcrypt_pcbc_dec_tv_template, 1832 .count = FCRYPT_DEC_TEST_VECTORS 1833 } 1834 } 1835 } 1836 }, { 1837 .alg = "rfc3686(ctr(aes))", 1838 .test = alg_test_skcipher, 1839 .suite = { 1840 .cipher = { 1841 .enc = { 1842 .vecs = aes_ctr_enc_tv_template, 1843 .count = AES_CTR_ENC_TEST_VECTORS 1844 }, 1845 .dec = { 1846 .vecs = aes_ctr_dec_tv_template, 1847 .count = AES_CTR_DEC_TEST_VECTORS 1848 } 1849 } 1850 } 1851 }, { 1852 .alg = "rmd128", 1853 .test = alg_test_hash, 1854 .suite = { 1855 .hash = { 1856 .vecs = rmd128_tv_template, 1857 .count = RMD128_TEST_VECTORS 1858 } 1859 } 1860 }, { 1861 .alg = "rmd160", 1862 .test = alg_test_hash, 1863 .suite = { 1864 .hash = { 1865 .vecs = rmd160_tv_template, 1866 .count = RMD160_TEST_VECTORS 1867 } 1868 } 1869 }, { 1870 .alg = "rmd256", 1871 .test = alg_test_hash, 1872 .suite = { 1873 .hash = { 1874 .vecs = rmd256_tv_template, 1875 .count = RMD256_TEST_VECTORS 1876 } 1877 } 1878 }, { 1879 .alg = "rmd320", 1880 .test = alg_test_hash, 1881 .suite = { 1882 .hash = { 1883 .vecs = rmd320_tv_template, 1884 .count = RMD320_TEST_VECTORS 1885 } 1886 } 1887 }, { 1888 .alg = "salsa20", 1889 .test = alg_test_skcipher, 1890 .suite = { 1891 .cipher = { 1892 .enc = { 1893 .vecs = salsa20_stream_enc_tv_template, 1894 .count = SALSA20_STREAM_ENC_TEST_VECTORS 1895 } 1896 } 1897 } 1898 }, { 1899 .alg = "sha1", 1900 .test = alg_test_hash, 1901 .suite = { 1902 .hash = { 1903 .vecs = sha1_tv_template, 1904 .count = SHA1_TEST_VECTORS 1905 } 1906 } 1907 }, { 1908 .alg = "sha224", 1909 .test = alg_test_hash, 1910 .suite = { 1911 .hash = { 1912 .vecs = sha224_tv_template, 1913 .count = SHA224_TEST_VECTORS 1914 } 1915 } 1916 }, { 1917 .alg = "sha256", 1918 .test = alg_test_hash, 1919 .suite = { 1920 .hash = { 1921 .vecs = sha256_tv_template, 1922 .count = SHA256_TEST_VECTORS 1923 } 1924 } 1925 }, { 1926 .alg = "sha384", 1927 .test = alg_test_hash, 1928 .suite = { 1929 .hash = { 1930 .vecs = sha384_tv_template, 1931 .count = SHA384_TEST_VECTORS 1932 } 1933 } 1934 }, { 1935 .alg = "sha512", 1936 .test = alg_test_hash, 1937 .suite = { 1938 .hash = { 1939 .vecs = sha512_tv_template, 1940 .count = SHA512_TEST_VECTORS 1941 } 1942 } 1943 }, { 1944 .alg = "tgr128", 1945 .test = alg_test_hash, 1946 .suite = { 1947 .hash = { 1948 .vecs = tgr128_tv_template, 1949 .count = TGR128_TEST_VECTORS 1950 } 1951 } 1952 }, { 1953 .alg = "tgr160", 1954 .test = alg_test_hash, 1955 .suite = { 1956 .hash = { 1957 .vecs = tgr160_tv_template, 1958 .count = TGR160_TEST_VECTORS 1959 } 1960 } 1961 }, { 1962 .alg = "tgr192", 1963 .test = alg_test_hash, 1964 .suite = { 1965 .hash = { 1966 .vecs = tgr192_tv_template, 1967 .count = TGR192_TEST_VECTORS 1968 } 1969 } 1970 }, { 1971 .alg = "wp256", 1972 .test = alg_test_hash, 1973 .suite = { 1974 .hash = { 1975 .vecs = wp256_tv_template, 1976 .count = WP256_TEST_VECTORS 1977 } 1978 } 1979 }, { 1980 .alg = "wp384", 1981 .test = alg_test_hash, 1982 .suite = { 1983 .hash = { 1984 .vecs = wp384_tv_template, 1985 .count = WP384_TEST_VECTORS 1986 } 1987 } 1988 }, { 1989 .alg = "wp512", 1990 .test = alg_test_hash, 1991 .suite = { 1992 .hash = { 1993 .vecs = wp512_tv_template, 1994 .count = WP512_TEST_VECTORS 1995 } 1996 } 1997 }, { 1998 .alg = "xcbc(aes)", 1999 .test = alg_test_hash, 2000 .suite = { 2001 .hash = { 2002 .vecs = aes_xcbc128_tv_template, 2003 .count = XCBC_AES_TEST_VECTORS 2004 } 2005 } 2006 }, { 2007 .alg = "xts(aes)", 2008 .test = alg_test_skcipher, 2009 .suite = { 2010 .cipher = { 2011 .enc = { 2012 .vecs = aes_xts_enc_tv_template, 2013 .count = AES_XTS_ENC_TEST_VECTORS 2014 }, 2015 .dec = { 2016 .vecs = aes_xts_dec_tv_template, 2017 .count = AES_XTS_DEC_TEST_VECTORS 2018 } 2019 } 2020 } 2021 }, { 2022 .alg = "zlib", 2023 .test = alg_test_pcomp, 2024 .suite = { 2025 .pcomp = { 2026 .comp = { 2027 .vecs = zlib_comp_tv_template, 2028 .count = ZLIB_COMP_TEST_VECTORS 2029 }, 2030 .decomp = { 2031 .vecs = zlib_decomp_tv_template, 2032 .count = ZLIB_DECOMP_TEST_VECTORS 2033 } 2034 } 2035 } 2036 } 2037 }; 2038 2039 static int alg_find_test(const char *alg) 2040 { 2041 int start = 0; 2042 int end = ARRAY_SIZE(alg_test_descs); 2043 2044 while (start < end) { 2045 int i = (start + end) / 2; 2046 int diff = strcmp(alg_test_descs[i].alg, alg); 2047 2048 if (diff > 0) { 2049 end = i; 2050 continue; 2051 } 2052 2053 if (diff < 0) { 2054 start = i + 1; 2055 continue; 2056 } 2057 2058 return i; 2059 } 2060 2061 return -1; 2062 } 2063 2064 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 2065 { 2066 int i; 2067 int rc; 2068 2069 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 2070 char nalg[CRYPTO_MAX_ALG_NAME]; 2071 2072 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 2073 sizeof(nalg)) 2074 return -ENAMETOOLONG; 2075 2076 i = alg_find_test(nalg); 2077 if (i < 0) 2078 goto notest; 2079 2080 return alg_test_cipher(alg_test_descs + i, driver, type, mask); 2081 } 2082 2083 i = alg_find_test(alg); 2084 if (i < 0) 2085 goto notest; 2086 2087 rc = alg_test_descs[i].test(alg_test_descs + i, driver, 2088 type, mask); 2089 if (fips_enabled && rc) 2090 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 2091 2092 return rc; 2093 2094 notest: 2095 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 2096 return 0; 2097 } 2098 EXPORT_SYMBOL_GPL(alg_test); 2099 2100 int __init testmgr_init(void) 2101 { 2102 int i; 2103 2104 for (i = 0; i < XBUFSIZE; i++) { 2105 xbuf[i] = (void *)__get_free_page(GFP_KERNEL); 2106 if (!xbuf[i]) 2107 goto err_free_xbuf; 2108 } 2109 2110 for (i = 0; i < XBUFSIZE; i++) { 2111 axbuf[i] = (void *)__get_free_page(GFP_KERNEL); 2112 if (!axbuf[i]) 2113 goto err_free_axbuf; 2114 } 2115 2116 return 0; 2117 2118 err_free_axbuf: 2119 for (i = 0; i < XBUFSIZE && axbuf[i]; i++) 2120 free_page((unsigned long)axbuf[i]); 2121 err_free_xbuf: 2122 for (i = 0; i < XBUFSIZE && xbuf[i]; i++) 2123 free_page((unsigned long)xbuf[i]); 2124 2125 return -ENOMEM; 2126 } 2127 2128 void testmgr_exit(void) 2129 { 2130 int i; 2131 2132 for (i = 0; i < XBUFSIZE; i++) 2133 free_page((unsigned long)axbuf[i]); 2134 for (i = 0; i < XBUFSIZE; i++) 2135 free_page((unsigned long)xbuf[i]); 2136 } 2137