1 /* 2 * Algorithm testing framework and tests. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org> 6 * Copyright (c) 2007 Nokia Siemens Networks 7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> 8 * Copyright (c) 2019 Google LLC 9 * 10 * Updated RFC4106 AES-GCM testing. 11 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com) 12 * Adrian Hoban <adrian.hoban@intel.com> 13 * Gabriele Paoloni <gabriele.paoloni@intel.com> 14 * Tadeusz Struk (tadeusz.struk@intel.com) 15 * Copyright (c) 2010, Intel Corporation. 16 * 17 * This program is free software; you can redistribute it and/or modify it 18 * under the terms of the GNU General Public License as published by the Free 19 * Software Foundation; either version 2 of the License, or (at your option) 20 * any later version. 21 * 22 */ 23 24 #include <crypto/aead.h> 25 #include <crypto/hash.h> 26 #include <crypto/skcipher.h> 27 #include <linux/err.h> 28 #include <linux/fips.h> 29 #include <linux/module.h> 30 #include <linux/once.h> 31 #include <linux/random.h> 32 #include <linux/scatterlist.h> 33 #include <linux/slab.h> 34 #include <linux/string.h> 35 #include <crypto/rng.h> 36 #include <crypto/drbg.h> 37 #include <crypto/akcipher.h> 38 #include <crypto/kpp.h> 39 #include <crypto/acompress.h> 40 41 #include "internal.h" 42 43 static bool notests; 44 module_param(notests, bool, 0644); 45 MODULE_PARM_DESC(notests, "disable crypto self-tests"); 46 47 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS 48 static bool noextratests; 49 module_param(noextratests, bool, 0644); 50 MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests"); 51 52 static unsigned int fuzz_iterations = 100; 53 module_param(fuzz_iterations, uint, 0644); 54 MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations"); 55 #endif 56 57 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS 58 59 /* a perfect nop */ 60 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 61 { 62 return 0; 63 } 64 65 #else 66 67 #include "testmgr.h" 68 69 /* 70 * Need slab memory for testing (size in number of pages). 71 */ 72 #define XBUFSIZE 8 73 74 /* 75 * Indexes into the xbuf to simulate cross-page access. 76 */ 77 #define IDX1 32 78 #define IDX2 32400 79 #define IDX3 1511 80 #define IDX4 8193 81 #define IDX5 22222 82 #define IDX6 17101 83 #define IDX7 27333 84 #define IDX8 3000 85 86 /* 87 * Used by test_cipher() 88 */ 89 #define ENCRYPT 1 90 #define DECRYPT 0 91 92 struct aead_test_suite { 93 const struct aead_testvec *vecs; 94 unsigned int count; 95 }; 96 97 struct cipher_test_suite { 98 const struct cipher_testvec *vecs; 99 unsigned int count; 100 }; 101 102 struct comp_test_suite { 103 struct { 104 const struct comp_testvec *vecs; 105 unsigned int count; 106 } comp, decomp; 107 }; 108 109 struct hash_test_suite { 110 const struct hash_testvec *vecs; 111 unsigned int count; 112 }; 113 114 struct cprng_test_suite { 115 const struct cprng_testvec *vecs; 116 unsigned int count; 117 }; 118 119 struct drbg_test_suite { 120 const struct drbg_testvec *vecs; 121 unsigned int count; 122 }; 123 124 struct akcipher_test_suite { 125 const struct akcipher_testvec *vecs; 126 unsigned int count; 127 }; 128 129 struct kpp_test_suite { 130 const struct kpp_testvec *vecs; 131 unsigned int count; 132 }; 133 134 struct alg_test_desc { 135 const char *alg; 136 int (*test)(const struct alg_test_desc *desc, const char *driver, 137 u32 type, u32 mask); 138 int fips_allowed; /* set if alg is allowed in fips mode */ 139 140 union { 141 struct aead_test_suite aead; 142 struct cipher_test_suite cipher; 143 struct comp_test_suite comp; 144 struct hash_test_suite hash; 145 struct cprng_test_suite cprng; 146 struct drbg_test_suite drbg; 147 struct akcipher_test_suite akcipher; 148 struct kpp_test_suite kpp; 149 } suite; 150 }; 151 152 static const unsigned int IDX[8] = { 153 IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 }; 154 155 static void hexdump(unsigned char *buf, unsigned int len) 156 { 157 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET, 158 16, 1, 159 buf, len, false); 160 } 161 162 static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order) 163 { 164 int i; 165 166 for (i = 0; i < XBUFSIZE; i++) { 167 buf[i] = (char *)__get_free_pages(GFP_KERNEL, order); 168 if (!buf[i]) 169 goto err_free_buf; 170 } 171 172 return 0; 173 174 err_free_buf: 175 while (i-- > 0) 176 free_pages((unsigned long)buf[i], order); 177 178 return -ENOMEM; 179 } 180 181 static int testmgr_alloc_buf(char *buf[XBUFSIZE]) 182 { 183 return __testmgr_alloc_buf(buf, 0); 184 } 185 186 static void __testmgr_free_buf(char *buf[XBUFSIZE], int order) 187 { 188 int i; 189 190 for (i = 0; i < XBUFSIZE; i++) 191 free_pages((unsigned long)buf[i], order); 192 } 193 194 static void testmgr_free_buf(char *buf[XBUFSIZE]) 195 { 196 __testmgr_free_buf(buf, 0); 197 } 198 199 #define TESTMGR_POISON_BYTE 0xfe 200 #define TESTMGR_POISON_LEN 16 201 202 static inline void testmgr_poison(void *addr, size_t len) 203 { 204 memset(addr, TESTMGR_POISON_BYTE, len); 205 } 206 207 /* Is the memory region still fully poisoned? */ 208 static inline bool testmgr_is_poison(const void *addr, size_t len) 209 { 210 return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL; 211 } 212 213 /* flush type for hash algorithms */ 214 enum flush_type { 215 /* merge with update of previous buffer(s) */ 216 FLUSH_TYPE_NONE = 0, 217 218 /* update with previous buffer(s) before doing this one */ 219 FLUSH_TYPE_FLUSH, 220 221 /* likewise, but also export and re-import the intermediate state */ 222 FLUSH_TYPE_REIMPORT, 223 }; 224 225 /* finalization function for hash algorithms */ 226 enum finalization_type { 227 FINALIZATION_TYPE_FINAL, /* use final() */ 228 FINALIZATION_TYPE_FINUP, /* use finup() */ 229 FINALIZATION_TYPE_DIGEST, /* use digest() */ 230 }; 231 232 #define TEST_SG_TOTAL 10000 233 234 /** 235 * struct test_sg_division - description of a scatterlist entry 236 * 237 * This struct describes one entry of a scatterlist being constructed to check a 238 * crypto test vector. 239 * 240 * @proportion_of_total: length of this chunk relative to the total length, 241 * given as a proportion out of TEST_SG_TOTAL so that it 242 * scales to fit any test vector 243 * @offset: byte offset into a 2-page buffer at which this chunk will start 244 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the 245 * @offset 246 * @flush_type: for hashes, whether an update() should be done now vs. 247 * continuing to accumulate data 248 */ 249 struct test_sg_division { 250 unsigned int proportion_of_total; 251 unsigned int offset; 252 bool offset_relative_to_alignmask; 253 enum flush_type flush_type; 254 }; 255 256 /** 257 * struct testvec_config - configuration for testing a crypto test vector 258 * 259 * This struct describes the data layout and other parameters with which each 260 * crypto test vector can be tested. 261 * 262 * @name: name of this config, logged for debugging purposes if a test fails 263 * @inplace: operate on the data in-place, if applicable for the algorithm type? 264 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP 265 * @src_divs: description of how to arrange the source scatterlist 266 * @dst_divs: description of how to arrange the dst scatterlist, if applicable 267 * for the algorithm type. Defaults to @src_divs if unset. 268 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1], 269 * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary 270 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to 271 * the @iv_offset 272 * @finalization_type: what finalization function to use for hashes 273 */ 274 struct testvec_config { 275 const char *name; 276 bool inplace; 277 u32 req_flags; 278 struct test_sg_division src_divs[XBUFSIZE]; 279 struct test_sg_division dst_divs[XBUFSIZE]; 280 unsigned int iv_offset; 281 bool iv_offset_relative_to_alignmask; 282 enum finalization_type finalization_type; 283 }; 284 285 #define TESTVEC_CONFIG_NAMELEN 192 286 287 /* 288 * The following are the lists of testvec_configs to test for each algorithm 289 * type when the basic crypto self-tests are enabled, i.e. when 290 * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test 291 * coverage, while keeping the test time much shorter than the full fuzz tests 292 * so that the basic tests can be enabled in a wider range of circumstances. 293 */ 294 295 /* Configs for skciphers and aeads */ 296 static const struct testvec_config default_cipher_testvec_configs[] = { 297 { 298 .name = "in-place", 299 .inplace = true, 300 .src_divs = { { .proportion_of_total = 10000 } }, 301 }, { 302 .name = "out-of-place", 303 .src_divs = { { .proportion_of_total = 10000 } }, 304 }, { 305 .name = "unaligned buffer, offset=1", 306 .src_divs = { { .proportion_of_total = 10000, .offset = 1 } }, 307 .iv_offset = 1, 308 }, { 309 .name = "buffer aligned only to alignmask", 310 .src_divs = { 311 { 312 .proportion_of_total = 10000, 313 .offset = 1, 314 .offset_relative_to_alignmask = true, 315 }, 316 }, 317 .iv_offset = 1, 318 .iv_offset_relative_to_alignmask = true, 319 }, { 320 .name = "two even aligned splits", 321 .src_divs = { 322 { .proportion_of_total = 5000 }, 323 { .proportion_of_total = 5000 }, 324 }, 325 }, { 326 .name = "uneven misaligned splits, may sleep", 327 .req_flags = CRYPTO_TFM_REQ_MAY_SLEEP, 328 .src_divs = { 329 { .proportion_of_total = 1900, .offset = 33 }, 330 { .proportion_of_total = 3300, .offset = 7 }, 331 { .proportion_of_total = 4800, .offset = 18 }, 332 }, 333 .iv_offset = 3, 334 }, { 335 .name = "misaligned splits crossing pages, inplace", 336 .inplace = true, 337 .src_divs = { 338 { 339 .proportion_of_total = 7500, 340 .offset = PAGE_SIZE - 32 341 }, { 342 .proportion_of_total = 2500, 343 .offset = PAGE_SIZE - 7 344 }, 345 }, 346 } 347 }; 348 349 static unsigned int count_test_sg_divisions(const struct test_sg_division *divs) 350 { 351 unsigned int remaining = TEST_SG_TOTAL; 352 unsigned int ndivs = 0; 353 354 do { 355 remaining -= divs[ndivs++].proportion_of_total; 356 } while (remaining); 357 358 return ndivs; 359 } 360 361 static bool valid_sg_divisions(const struct test_sg_division *divs, 362 unsigned int count, bool *any_flushes_ret) 363 { 364 unsigned int total = 0; 365 unsigned int i; 366 367 for (i = 0; i < count && total != TEST_SG_TOTAL; i++) { 368 if (divs[i].proportion_of_total <= 0 || 369 divs[i].proportion_of_total > TEST_SG_TOTAL - total) 370 return false; 371 total += divs[i].proportion_of_total; 372 if (divs[i].flush_type != FLUSH_TYPE_NONE) 373 *any_flushes_ret = true; 374 } 375 return total == TEST_SG_TOTAL && 376 memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL; 377 } 378 379 /* 380 * Check whether the given testvec_config is valid. This isn't strictly needed 381 * since every testvec_config should be valid, but check anyway so that people 382 * don't unknowingly add broken configs that don't do what they wanted. 383 */ 384 static bool valid_testvec_config(const struct testvec_config *cfg) 385 { 386 bool any_flushes = false; 387 388 if (cfg->name == NULL) 389 return false; 390 391 if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs), 392 &any_flushes)) 393 return false; 394 395 if (cfg->dst_divs[0].proportion_of_total) { 396 if (!valid_sg_divisions(cfg->dst_divs, 397 ARRAY_SIZE(cfg->dst_divs), 398 &any_flushes)) 399 return false; 400 } else { 401 if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs))) 402 return false; 403 /* defaults to dst_divs=src_divs */ 404 } 405 406 if (cfg->iv_offset + 407 (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) > 408 MAX_ALGAPI_ALIGNMASK + 1) 409 return false; 410 411 if (any_flushes && cfg->finalization_type == FINALIZATION_TYPE_DIGEST) 412 return false; 413 414 return true; 415 } 416 417 struct test_sglist { 418 char *bufs[XBUFSIZE]; 419 struct scatterlist sgl[XBUFSIZE]; 420 struct scatterlist sgl_saved[XBUFSIZE]; 421 struct scatterlist *sgl_ptr; 422 unsigned int nents; 423 }; 424 425 static int init_test_sglist(struct test_sglist *tsgl) 426 { 427 return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */); 428 } 429 430 static void destroy_test_sglist(struct test_sglist *tsgl) 431 { 432 return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */); 433 } 434 435 /** 436 * build_test_sglist() - build a scatterlist for a crypto test 437 * 438 * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page 439 * buffers which the scatterlist @tsgl->sgl[] will be made to point into. 440 * @divs: the layout specification on which the scatterlist will be based 441 * @alignmask: the algorithm's alignmask 442 * @total_len: the total length of the scatterlist to build in bytes 443 * @data: if non-NULL, the buffers will be filled with this data until it ends. 444 * Otherwise the buffers will be poisoned. In both cases, some bytes 445 * past the end of each buffer will be poisoned to help detect overruns. 446 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry 447 * corresponds will be returned here. This will match @divs except 448 * that divisions resolving to a length of 0 are omitted as they are 449 * not included in the scatterlist. 450 * 451 * Return: 0 or a -errno value 452 */ 453 static int build_test_sglist(struct test_sglist *tsgl, 454 const struct test_sg_division *divs, 455 const unsigned int alignmask, 456 const unsigned int total_len, 457 struct iov_iter *data, 458 const struct test_sg_division *out_divs[XBUFSIZE]) 459 { 460 struct { 461 const struct test_sg_division *div; 462 size_t length; 463 } partitions[XBUFSIZE]; 464 const unsigned int ndivs = count_test_sg_divisions(divs); 465 unsigned int len_remaining = total_len; 466 unsigned int i; 467 468 BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl)); 469 if (WARN_ON(ndivs > ARRAY_SIZE(partitions))) 470 return -EINVAL; 471 472 /* Calculate the (div, length) pairs */ 473 tsgl->nents = 0; 474 for (i = 0; i < ndivs; i++) { 475 unsigned int len_this_sg = 476 min(len_remaining, 477 (total_len * divs[i].proportion_of_total + 478 TEST_SG_TOTAL / 2) / TEST_SG_TOTAL); 479 480 if (len_this_sg != 0) { 481 partitions[tsgl->nents].div = &divs[i]; 482 partitions[tsgl->nents].length = len_this_sg; 483 tsgl->nents++; 484 len_remaining -= len_this_sg; 485 } 486 } 487 if (tsgl->nents == 0) { 488 partitions[tsgl->nents].div = &divs[0]; 489 partitions[tsgl->nents].length = 0; 490 tsgl->nents++; 491 } 492 partitions[tsgl->nents - 1].length += len_remaining; 493 494 /* Set up the sgl entries and fill the data or poison */ 495 sg_init_table(tsgl->sgl, tsgl->nents); 496 for (i = 0; i < tsgl->nents; i++) { 497 unsigned int offset = partitions[i].div->offset; 498 void *addr; 499 500 if (partitions[i].div->offset_relative_to_alignmask) 501 offset += alignmask; 502 503 while (offset + partitions[i].length + TESTMGR_POISON_LEN > 504 2 * PAGE_SIZE) { 505 if (WARN_ON(offset <= 0)) 506 return -EINVAL; 507 offset /= 2; 508 } 509 510 addr = &tsgl->bufs[i][offset]; 511 sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length); 512 513 if (out_divs) 514 out_divs[i] = partitions[i].div; 515 516 if (data) { 517 size_t copy_len, copied; 518 519 copy_len = min(partitions[i].length, data->count); 520 copied = copy_from_iter(addr, copy_len, data); 521 if (WARN_ON(copied != copy_len)) 522 return -EINVAL; 523 testmgr_poison(addr + copy_len, partitions[i].length + 524 TESTMGR_POISON_LEN - copy_len); 525 } else { 526 testmgr_poison(addr, partitions[i].length + 527 TESTMGR_POISON_LEN); 528 } 529 } 530 531 sg_mark_end(&tsgl->sgl[tsgl->nents - 1]); 532 tsgl->sgl_ptr = tsgl->sgl; 533 memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0])); 534 return 0; 535 } 536 537 /* 538 * Verify that a scatterlist crypto operation produced the correct output. 539 * 540 * @tsgl: scatterlist containing the actual output 541 * @expected_output: buffer containing the expected output 542 * @len_to_check: length of @expected_output in bytes 543 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result 544 * @check_poison: verify that the poison bytes after each chunk are intact? 545 * 546 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun. 547 */ 548 static int verify_correct_output(const struct test_sglist *tsgl, 549 const char *expected_output, 550 unsigned int len_to_check, 551 unsigned int unchecked_prefix_len, 552 bool check_poison) 553 { 554 unsigned int i; 555 556 for (i = 0; i < tsgl->nents; i++) { 557 struct scatterlist *sg = &tsgl->sgl_ptr[i]; 558 unsigned int len = sg->length; 559 unsigned int offset = sg->offset; 560 const char *actual_output; 561 562 if (unchecked_prefix_len) { 563 if (unchecked_prefix_len >= len) { 564 unchecked_prefix_len -= len; 565 continue; 566 } 567 offset += unchecked_prefix_len; 568 len -= unchecked_prefix_len; 569 unchecked_prefix_len = 0; 570 } 571 len = min(len, len_to_check); 572 actual_output = page_address(sg_page(sg)) + offset; 573 if (memcmp(expected_output, actual_output, len) != 0) 574 return -EINVAL; 575 if (check_poison && 576 !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN)) 577 return -EOVERFLOW; 578 len_to_check -= len; 579 expected_output += len; 580 } 581 if (WARN_ON(len_to_check != 0)) 582 return -EINVAL; 583 return 0; 584 } 585 586 static bool is_test_sglist_corrupted(const struct test_sglist *tsgl) 587 { 588 unsigned int i; 589 590 for (i = 0; i < tsgl->nents; i++) { 591 if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link) 592 return true; 593 if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset) 594 return true; 595 if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length) 596 return true; 597 } 598 return false; 599 } 600 601 struct cipher_test_sglists { 602 struct test_sglist src; 603 struct test_sglist dst; 604 }; 605 606 static struct cipher_test_sglists *alloc_cipher_test_sglists(void) 607 { 608 struct cipher_test_sglists *tsgls; 609 610 tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL); 611 if (!tsgls) 612 return NULL; 613 614 if (init_test_sglist(&tsgls->src) != 0) 615 goto fail_kfree; 616 if (init_test_sglist(&tsgls->dst) != 0) 617 goto fail_destroy_src; 618 619 return tsgls; 620 621 fail_destroy_src: 622 destroy_test_sglist(&tsgls->src); 623 fail_kfree: 624 kfree(tsgls); 625 return NULL; 626 } 627 628 static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls) 629 { 630 if (tsgls) { 631 destroy_test_sglist(&tsgls->src); 632 destroy_test_sglist(&tsgls->dst); 633 kfree(tsgls); 634 } 635 } 636 637 /* Build the src and dst scatterlists for an skcipher or AEAD test */ 638 static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls, 639 const struct testvec_config *cfg, 640 unsigned int alignmask, 641 unsigned int src_total_len, 642 unsigned int dst_total_len, 643 const struct kvec *inputs, 644 unsigned int nr_inputs) 645 { 646 struct iov_iter input; 647 int err; 648 649 iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len); 650 err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask, 651 cfg->inplace ? 652 max(dst_total_len, src_total_len) : 653 src_total_len, 654 &input, NULL); 655 if (err) 656 return err; 657 658 if (cfg->inplace) { 659 tsgls->dst.sgl_ptr = tsgls->src.sgl; 660 tsgls->dst.nents = tsgls->src.nents; 661 return 0; 662 } 663 return build_test_sglist(&tsgls->dst, 664 cfg->dst_divs[0].proportion_of_total ? 665 cfg->dst_divs : cfg->src_divs, 666 alignmask, dst_total_len, NULL, NULL); 667 } 668 669 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS 670 static char *generate_random_sgl_divisions(struct test_sg_division *divs, 671 size_t max_divs, char *p, char *end, 672 bool gen_flushes) 673 { 674 struct test_sg_division *div = divs; 675 unsigned int remaining = TEST_SG_TOTAL; 676 677 do { 678 unsigned int this_len; 679 680 if (div == &divs[max_divs - 1] || prandom_u32() % 2 == 0) 681 this_len = remaining; 682 else 683 this_len = 1 + (prandom_u32() % remaining); 684 div->proportion_of_total = this_len; 685 686 if (prandom_u32() % 4 == 0) 687 div->offset = (PAGE_SIZE - 128) + (prandom_u32() % 128); 688 else if (prandom_u32() % 2 == 0) 689 div->offset = prandom_u32() % 32; 690 else 691 div->offset = prandom_u32() % PAGE_SIZE; 692 if (prandom_u32() % 8 == 0) 693 div->offset_relative_to_alignmask = true; 694 695 div->flush_type = FLUSH_TYPE_NONE; 696 if (gen_flushes) { 697 switch (prandom_u32() % 4) { 698 case 0: 699 div->flush_type = FLUSH_TYPE_REIMPORT; 700 break; 701 case 1: 702 div->flush_type = FLUSH_TYPE_FLUSH; 703 break; 704 } 705 } 706 707 BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */ 708 p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", 709 div->flush_type == FLUSH_TYPE_NONE ? "" : 710 div->flush_type == FLUSH_TYPE_FLUSH ? 711 "<flush> " : "<reimport> ", 712 this_len / 100, this_len % 100, 713 div->offset_relative_to_alignmask ? 714 "alignmask" : "", 715 div->offset, this_len == remaining ? "" : ", "); 716 remaining -= this_len; 717 div++; 718 } while (remaining); 719 720 return p; 721 } 722 723 /* Generate a random testvec_config for fuzz testing */ 724 static void generate_random_testvec_config(struct testvec_config *cfg, 725 char *name, size_t max_namelen) 726 { 727 char *p = name; 728 char * const end = name + max_namelen; 729 730 memset(cfg, 0, sizeof(*cfg)); 731 732 cfg->name = name; 733 734 p += scnprintf(p, end - p, "random:"); 735 736 if (prandom_u32() % 2 == 0) { 737 cfg->inplace = true; 738 p += scnprintf(p, end - p, " inplace"); 739 } 740 741 if (prandom_u32() % 2 == 0) { 742 cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP; 743 p += scnprintf(p, end - p, " may_sleep"); 744 } 745 746 switch (prandom_u32() % 4) { 747 case 0: 748 cfg->finalization_type = FINALIZATION_TYPE_FINAL; 749 p += scnprintf(p, end - p, " use_final"); 750 break; 751 case 1: 752 cfg->finalization_type = FINALIZATION_TYPE_FINUP; 753 p += scnprintf(p, end - p, " use_finup"); 754 break; 755 default: 756 cfg->finalization_type = FINALIZATION_TYPE_DIGEST; 757 p += scnprintf(p, end - p, " use_digest"); 758 break; 759 } 760 761 p += scnprintf(p, end - p, " src_divs=["); 762 p = generate_random_sgl_divisions(cfg->src_divs, 763 ARRAY_SIZE(cfg->src_divs), p, end, 764 (cfg->finalization_type != 765 FINALIZATION_TYPE_DIGEST)); 766 p += scnprintf(p, end - p, "]"); 767 768 if (!cfg->inplace && prandom_u32() % 2 == 0) { 769 p += scnprintf(p, end - p, " dst_divs=["); 770 p = generate_random_sgl_divisions(cfg->dst_divs, 771 ARRAY_SIZE(cfg->dst_divs), 772 p, end, false); 773 p += scnprintf(p, end - p, "]"); 774 } 775 776 if (prandom_u32() % 2 == 0) { 777 cfg->iv_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK); 778 p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset); 779 } 780 781 WARN_ON_ONCE(!valid_testvec_config(cfg)); 782 } 783 #endif /* CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */ 784 785 static int ahash_guard_result(char *result, char c, int size) 786 { 787 int i; 788 789 for (i = 0; i < size; i++) { 790 if (result[i] != c) 791 return -EINVAL; 792 } 793 794 return 0; 795 } 796 797 static int ahash_partial_update(struct ahash_request **preq, 798 struct crypto_ahash *tfm, const struct hash_testvec *template, 799 void *hash_buff, int k, int temp, struct scatterlist *sg, 800 const char *algo, char *result, struct crypto_wait *wait) 801 { 802 char *state; 803 struct ahash_request *req; 804 int statesize, ret = -EINVAL; 805 static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 }; 806 int digestsize = crypto_ahash_digestsize(tfm); 807 808 req = *preq; 809 statesize = crypto_ahash_statesize( 810 crypto_ahash_reqtfm(req)); 811 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL); 812 if (!state) { 813 pr_err("alg: hash: Failed to alloc state for %s\n", algo); 814 goto out_nostate; 815 } 816 memcpy(state + statesize, guard, sizeof(guard)); 817 memset(result, 1, digestsize); 818 ret = crypto_ahash_export(req, state); 819 WARN_ON(memcmp(state + statesize, guard, sizeof(guard))); 820 if (ret) { 821 pr_err("alg: hash: Failed to export() for %s\n", algo); 822 goto out; 823 } 824 ret = ahash_guard_result(result, 1, digestsize); 825 if (ret) { 826 pr_err("alg: hash: Failed, export used req->result for %s\n", 827 algo); 828 goto out; 829 } 830 ahash_request_free(req); 831 req = ahash_request_alloc(tfm, GFP_KERNEL); 832 if (!req) { 833 pr_err("alg: hash: Failed to alloc request for %s\n", algo); 834 goto out_noreq; 835 } 836 ahash_request_set_callback(req, 837 CRYPTO_TFM_REQ_MAY_BACKLOG, 838 crypto_req_done, wait); 839 840 memcpy(hash_buff, template->plaintext + temp, 841 template->tap[k]); 842 sg_init_one(&sg[0], hash_buff, template->tap[k]); 843 ahash_request_set_crypt(req, sg, result, template->tap[k]); 844 ret = crypto_ahash_import(req, state); 845 if (ret) { 846 pr_err("alg: hash: Failed to import() for %s\n", algo); 847 goto out; 848 } 849 ret = ahash_guard_result(result, 1, digestsize); 850 if (ret) { 851 pr_err("alg: hash: Failed, import used req->result for %s\n", 852 algo); 853 goto out; 854 } 855 ret = crypto_wait_req(crypto_ahash_update(req), wait); 856 if (ret) 857 goto out; 858 *preq = req; 859 ret = 0; 860 goto out_noreq; 861 out: 862 ahash_request_free(req); 863 out_noreq: 864 kfree(state); 865 out_nostate: 866 return ret; 867 } 868 869 enum hash_test { 870 HASH_TEST_DIGEST, 871 HASH_TEST_FINAL, 872 HASH_TEST_FINUP 873 }; 874 875 static int __test_hash(struct crypto_ahash *tfm, 876 const struct hash_testvec *template, unsigned int tcount, 877 enum hash_test test_type, const int align_offset) 878 { 879 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm)); 880 size_t digest_size = crypto_ahash_digestsize(tfm); 881 unsigned int i, j, k, temp; 882 struct scatterlist sg[8]; 883 char *result; 884 char *key; 885 struct ahash_request *req; 886 struct crypto_wait wait; 887 void *hash_buff; 888 char *xbuf[XBUFSIZE]; 889 int ret = -ENOMEM; 890 891 result = kmalloc(digest_size, GFP_KERNEL); 892 if (!result) 893 return ret; 894 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 895 if (!key) 896 goto out_nobuf; 897 if (testmgr_alloc_buf(xbuf)) 898 goto out_nobuf; 899 900 crypto_init_wait(&wait); 901 902 req = ahash_request_alloc(tfm, GFP_KERNEL); 903 if (!req) { 904 printk(KERN_ERR "alg: hash: Failed to allocate request for " 905 "%s\n", algo); 906 goto out_noreq; 907 } 908 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 909 crypto_req_done, &wait); 910 911 j = 0; 912 for (i = 0; i < tcount; i++) { 913 if (template[i].np) 914 continue; 915 916 ret = -EINVAL; 917 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE)) 918 goto out; 919 920 j++; 921 memset(result, 0, digest_size); 922 923 hash_buff = xbuf[0]; 924 hash_buff += align_offset; 925 926 memcpy(hash_buff, template[i].plaintext, template[i].psize); 927 sg_init_one(&sg[0], hash_buff, template[i].psize); 928 929 if (template[i].ksize) { 930 crypto_ahash_clear_flags(tfm, ~0); 931 if (template[i].ksize > MAX_KEYLEN) { 932 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 933 j, algo, template[i].ksize, MAX_KEYLEN); 934 ret = -EINVAL; 935 goto out; 936 } 937 memcpy(key, template[i].key, template[i].ksize); 938 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 939 if (ret) { 940 printk(KERN_ERR "alg: hash: setkey failed on " 941 "test %d for %s: ret=%d\n", j, algo, 942 -ret); 943 goto out; 944 } 945 } 946 947 ahash_request_set_crypt(req, sg, result, template[i].psize); 948 switch (test_type) { 949 case HASH_TEST_DIGEST: 950 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 951 if (ret) { 952 pr_err("alg: hash: digest failed on test %d " 953 "for %s: ret=%d\n", j, algo, -ret); 954 goto out; 955 } 956 break; 957 958 case HASH_TEST_FINAL: 959 memset(result, 1, digest_size); 960 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 961 if (ret) { 962 pr_err("alg: hash: init failed on test %d " 963 "for %s: ret=%d\n", j, algo, -ret); 964 goto out; 965 } 966 ret = ahash_guard_result(result, 1, digest_size); 967 if (ret) { 968 pr_err("alg: hash: init failed on test %d " 969 "for %s: used req->result\n", j, algo); 970 goto out; 971 } 972 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 973 if (ret) { 974 pr_err("alg: hash: update failed on test %d " 975 "for %s: ret=%d\n", j, algo, -ret); 976 goto out; 977 } 978 ret = ahash_guard_result(result, 1, digest_size); 979 if (ret) { 980 pr_err("alg: hash: update failed on test %d " 981 "for %s: used req->result\n", j, algo); 982 goto out; 983 } 984 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 985 if (ret) { 986 pr_err("alg: hash: final failed on test %d " 987 "for %s: ret=%d\n", j, algo, -ret); 988 goto out; 989 } 990 break; 991 992 case HASH_TEST_FINUP: 993 memset(result, 1, digest_size); 994 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 995 if (ret) { 996 pr_err("alg: hash: init failed on test %d " 997 "for %s: ret=%d\n", j, algo, -ret); 998 goto out; 999 } 1000 ret = ahash_guard_result(result, 1, digest_size); 1001 if (ret) { 1002 pr_err("alg: hash: init failed on test %d " 1003 "for %s: used req->result\n", j, algo); 1004 goto out; 1005 } 1006 ret = crypto_wait_req(crypto_ahash_finup(req), &wait); 1007 if (ret) { 1008 pr_err("alg: hash: final failed on test %d " 1009 "for %s: ret=%d\n", j, algo, -ret); 1010 goto out; 1011 } 1012 break; 1013 } 1014 1015 if (memcmp(result, template[i].digest, 1016 crypto_ahash_digestsize(tfm))) { 1017 printk(KERN_ERR "alg: hash: Test %d failed for %s\n", 1018 j, algo); 1019 hexdump(result, crypto_ahash_digestsize(tfm)); 1020 ret = -EINVAL; 1021 goto out; 1022 } 1023 } 1024 1025 if (test_type) 1026 goto out; 1027 1028 j = 0; 1029 for (i = 0; i < tcount; i++) { 1030 /* alignment tests are only done with continuous buffers */ 1031 if (align_offset != 0) 1032 break; 1033 1034 if (!template[i].np) 1035 continue; 1036 1037 j++; 1038 memset(result, 0, digest_size); 1039 1040 temp = 0; 1041 sg_init_table(sg, template[i].np); 1042 ret = -EINVAL; 1043 for (k = 0; k < template[i].np; k++) { 1044 if (WARN_ON(offset_in_page(IDX[k]) + 1045 template[i].tap[k] > PAGE_SIZE)) 1046 goto out; 1047 sg_set_buf(&sg[k], 1048 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] + 1049 offset_in_page(IDX[k]), 1050 template[i].plaintext + temp, 1051 template[i].tap[k]), 1052 template[i].tap[k]); 1053 temp += template[i].tap[k]; 1054 } 1055 1056 if (template[i].ksize) { 1057 if (template[i].ksize > MAX_KEYLEN) { 1058 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 1059 j, algo, template[i].ksize, MAX_KEYLEN); 1060 ret = -EINVAL; 1061 goto out; 1062 } 1063 crypto_ahash_clear_flags(tfm, ~0); 1064 memcpy(key, template[i].key, template[i].ksize); 1065 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 1066 1067 if (ret) { 1068 printk(KERN_ERR "alg: hash: setkey " 1069 "failed on chunking test %d " 1070 "for %s: ret=%d\n", j, algo, -ret); 1071 goto out; 1072 } 1073 } 1074 1075 ahash_request_set_crypt(req, sg, result, template[i].psize); 1076 ret = crypto_wait_req(crypto_ahash_digest(req), &wait); 1077 if (ret) { 1078 pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n", 1079 j, algo, -ret); 1080 goto out; 1081 } 1082 1083 if (memcmp(result, template[i].digest, 1084 crypto_ahash_digestsize(tfm))) { 1085 printk(KERN_ERR "alg: hash: Chunking test %d " 1086 "failed for %s\n", j, algo); 1087 hexdump(result, crypto_ahash_digestsize(tfm)); 1088 ret = -EINVAL; 1089 goto out; 1090 } 1091 } 1092 1093 /* partial update exercise */ 1094 j = 0; 1095 for (i = 0; i < tcount; i++) { 1096 /* alignment tests are only done with continuous buffers */ 1097 if (align_offset != 0) 1098 break; 1099 1100 if (template[i].np < 2) 1101 continue; 1102 1103 j++; 1104 memset(result, 0, digest_size); 1105 1106 ret = -EINVAL; 1107 hash_buff = xbuf[0]; 1108 memcpy(hash_buff, template[i].plaintext, 1109 template[i].tap[0]); 1110 sg_init_one(&sg[0], hash_buff, template[i].tap[0]); 1111 1112 if (template[i].ksize) { 1113 crypto_ahash_clear_flags(tfm, ~0); 1114 if (template[i].ksize > MAX_KEYLEN) { 1115 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n", 1116 j, algo, template[i].ksize, MAX_KEYLEN); 1117 ret = -EINVAL; 1118 goto out; 1119 } 1120 memcpy(key, template[i].key, template[i].ksize); 1121 ret = crypto_ahash_setkey(tfm, key, template[i].ksize); 1122 if (ret) { 1123 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n", 1124 j, algo, -ret); 1125 goto out; 1126 } 1127 } 1128 1129 ahash_request_set_crypt(req, sg, result, template[i].tap[0]); 1130 ret = crypto_wait_req(crypto_ahash_init(req), &wait); 1131 if (ret) { 1132 pr_err("alg: hash: init failed on test %d for %s: ret=%d\n", 1133 j, algo, -ret); 1134 goto out; 1135 } 1136 ret = crypto_wait_req(crypto_ahash_update(req), &wait); 1137 if (ret) { 1138 pr_err("alg: hash: update failed on test %d for %s: ret=%d\n", 1139 j, algo, -ret); 1140 goto out; 1141 } 1142 1143 temp = template[i].tap[0]; 1144 for (k = 1; k < template[i].np; k++) { 1145 ret = ahash_partial_update(&req, tfm, &template[i], 1146 hash_buff, k, temp, &sg[0], algo, result, 1147 &wait); 1148 if (ret) { 1149 pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n", 1150 j, algo, -ret); 1151 goto out_noreq; 1152 } 1153 temp += template[i].tap[k]; 1154 } 1155 ret = crypto_wait_req(crypto_ahash_final(req), &wait); 1156 if (ret) { 1157 pr_err("alg: hash: final failed on test %d for %s: ret=%d\n", 1158 j, algo, -ret); 1159 goto out; 1160 } 1161 if (memcmp(result, template[i].digest, 1162 crypto_ahash_digestsize(tfm))) { 1163 pr_err("alg: hash: Partial Test %d failed for %s\n", 1164 j, algo); 1165 hexdump(result, crypto_ahash_digestsize(tfm)); 1166 ret = -EINVAL; 1167 goto out; 1168 } 1169 } 1170 1171 ret = 0; 1172 1173 out: 1174 ahash_request_free(req); 1175 out_noreq: 1176 testmgr_free_buf(xbuf); 1177 out_nobuf: 1178 kfree(key); 1179 kfree(result); 1180 return ret; 1181 } 1182 1183 static int test_hash(struct crypto_ahash *tfm, 1184 const struct hash_testvec *template, 1185 unsigned int tcount, enum hash_test test_type) 1186 { 1187 unsigned int alignmask; 1188 int ret; 1189 1190 ret = __test_hash(tfm, template, tcount, test_type, 0); 1191 if (ret) 1192 return ret; 1193 1194 /* test unaligned buffers, check with one byte offset */ 1195 ret = __test_hash(tfm, template, tcount, test_type, 1); 1196 if (ret) 1197 return ret; 1198 1199 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1200 if (alignmask) { 1201 /* Check if alignment mask for tfm is correctly set. */ 1202 ret = __test_hash(tfm, template, tcount, test_type, 1203 alignmask + 1); 1204 if (ret) 1205 return ret; 1206 } 1207 1208 return 0; 1209 } 1210 1211 static int __test_aead(struct crypto_aead *tfm, int enc, 1212 const struct aead_testvec *template, unsigned int tcount, 1213 const bool diff_dst, const int align_offset) 1214 { 1215 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm)); 1216 unsigned int i, j, k, n, temp; 1217 int ret = -ENOMEM; 1218 char *q; 1219 char *key; 1220 struct aead_request *req; 1221 struct scatterlist *sg; 1222 struct scatterlist *sgout; 1223 const char *e, *d; 1224 struct crypto_wait wait; 1225 unsigned int authsize, iv_len; 1226 char *iv; 1227 char *xbuf[XBUFSIZE]; 1228 char *xoutbuf[XBUFSIZE]; 1229 char *axbuf[XBUFSIZE]; 1230 1231 iv = kzalloc(MAX_IVLEN, GFP_KERNEL); 1232 if (!iv) 1233 return ret; 1234 key = kmalloc(MAX_KEYLEN, GFP_KERNEL); 1235 if (!key) 1236 goto out_noxbuf; 1237 if (testmgr_alloc_buf(xbuf)) 1238 goto out_noxbuf; 1239 if (testmgr_alloc_buf(axbuf)) 1240 goto out_noaxbuf; 1241 if (diff_dst && testmgr_alloc_buf(xoutbuf)) 1242 goto out_nooutbuf; 1243 1244 /* avoid "the frame size is larger than 1024 bytes" compiler warning */ 1245 sg = kmalloc(array3_size(sizeof(*sg), 8, (diff_dst ? 4 : 2)), 1246 GFP_KERNEL); 1247 if (!sg) 1248 goto out_nosg; 1249 sgout = &sg[16]; 1250 1251 if (diff_dst) 1252 d = "-ddst"; 1253 else 1254 d = ""; 1255 1256 if (enc == ENCRYPT) 1257 e = "encryption"; 1258 else 1259 e = "decryption"; 1260 1261 crypto_init_wait(&wait); 1262 1263 req = aead_request_alloc(tfm, GFP_KERNEL); 1264 if (!req) { 1265 pr_err("alg: aead%s: Failed to allocate request for %s\n", 1266 d, algo); 1267 goto out; 1268 } 1269 1270 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 1271 crypto_req_done, &wait); 1272 1273 iv_len = crypto_aead_ivsize(tfm); 1274 1275 for (i = 0, j = 0; i < tcount; i++) { 1276 const char *input, *expected_output; 1277 unsigned int inlen, outlen; 1278 char *inbuf, *outbuf, *assocbuf; 1279 1280 if (template[i].np) 1281 continue; 1282 if (enc) { 1283 if (template[i].novrfy) 1284 continue; 1285 input = template[i].ptext; 1286 inlen = template[i].plen; 1287 expected_output = template[i].ctext; 1288 outlen = template[i].clen; 1289 } else { 1290 input = template[i].ctext; 1291 inlen = template[i].clen; 1292 expected_output = template[i].ptext; 1293 outlen = template[i].plen; 1294 } 1295 1296 j++; 1297 1298 /* some templates have no input data but they will 1299 * touch input 1300 */ 1301 inbuf = xbuf[0] + align_offset; 1302 assocbuf = axbuf[0]; 1303 1304 ret = -EINVAL; 1305 if (WARN_ON(align_offset + template[i].clen > PAGE_SIZE || 1306 template[i].alen > PAGE_SIZE)) 1307 goto out; 1308 1309 memcpy(inbuf, input, inlen); 1310 memcpy(assocbuf, template[i].assoc, template[i].alen); 1311 if (template[i].iv) 1312 memcpy(iv, template[i].iv, iv_len); 1313 else 1314 memset(iv, 0, iv_len); 1315 1316 crypto_aead_clear_flags(tfm, ~0); 1317 if (template[i].wk) 1318 crypto_aead_set_flags(tfm, 1319 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); 1320 1321 if (template[i].klen > MAX_KEYLEN) { 1322 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 1323 d, j, algo, template[i].klen, 1324 MAX_KEYLEN); 1325 ret = -EINVAL; 1326 goto out; 1327 } 1328 memcpy(key, template[i].key, template[i].klen); 1329 1330 ret = crypto_aead_setkey(tfm, key, template[i].klen); 1331 if (template[i].fail == !ret) { 1332 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n", 1333 d, j, algo, crypto_aead_get_flags(tfm)); 1334 goto out; 1335 } else if (ret) 1336 continue; 1337 1338 authsize = template[i].clen - template[i].plen; 1339 ret = crypto_aead_setauthsize(tfm, authsize); 1340 if (ret) { 1341 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n", 1342 d, authsize, j, algo); 1343 goto out; 1344 } 1345 1346 k = !!template[i].alen; 1347 sg_init_table(sg, k + 1); 1348 sg_set_buf(&sg[0], assocbuf, template[i].alen); 1349 sg_set_buf(&sg[k], inbuf, template[i].clen); 1350 outbuf = inbuf; 1351 1352 if (diff_dst) { 1353 sg_init_table(sgout, k + 1); 1354 sg_set_buf(&sgout[0], assocbuf, template[i].alen); 1355 1356 outbuf = xoutbuf[0] + align_offset; 1357 sg_set_buf(&sgout[k], outbuf, template[i].clen); 1358 } 1359 1360 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, inlen, 1361 iv); 1362 1363 aead_request_set_ad(req, template[i].alen); 1364 1365 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 1366 : crypto_aead_decrypt(req), &wait); 1367 1368 switch (ret) { 1369 case 0: 1370 if (template[i].novrfy) { 1371 /* verification was supposed to fail */ 1372 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n", 1373 d, e, j, algo); 1374 /* so really, we got a bad message */ 1375 ret = -EBADMSG; 1376 goto out; 1377 } 1378 break; 1379 case -EBADMSG: 1380 if (template[i].novrfy) 1381 /* verification failure was expected */ 1382 continue; 1383 /* fall through */ 1384 default: 1385 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n", 1386 d, e, j, algo, -ret); 1387 goto out; 1388 } 1389 1390 if (memcmp(outbuf, expected_output, outlen)) { 1391 pr_err("alg: aead%s: Test %d failed on %s for %s\n", 1392 d, j, e, algo); 1393 hexdump(outbuf, outlen); 1394 ret = -EINVAL; 1395 goto out; 1396 } 1397 } 1398 1399 for (i = 0, j = 0; i < tcount; i++) { 1400 const char *input, *expected_output; 1401 unsigned int inlen, outlen; 1402 1403 /* alignment tests are only done with continuous buffers */ 1404 if (align_offset != 0) 1405 break; 1406 1407 if (!template[i].np) 1408 continue; 1409 1410 if (enc) { 1411 if (template[i].novrfy) 1412 continue; 1413 input = template[i].ptext; 1414 inlen = template[i].plen; 1415 expected_output = template[i].ctext; 1416 outlen = template[i].clen; 1417 } else { 1418 input = template[i].ctext; 1419 inlen = template[i].clen; 1420 expected_output = template[i].ptext; 1421 outlen = template[i].plen; 1422 } 1423 1424 j++; 1425 1426 if (template[i].iv) 1427 memcpy(iv, template[i].iv, iv_len); 1428 else 1429 memset(iv, 0, MAX_IVLEN); 1430 1431 crypto_aead_clear_flags(tfm, ~0); 1432 if (template[i].wk) 1433 crypto_aead_set_flags(tfm, 1434 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); 1435 if (template[i].klen > MAX_KEYLEN) { 1436 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n", 1437 d, j, algo, template[i].klen, MAX_KEYLEN); 1438 ret = -EINVAL; 1439 goto out; 1440 } 1441 memcpy(key, template[i].key, template[i].klen); 1442 1443 ret = crypto_aead_setkey(tfm, key, template[i].klen); 1444 if (template[i].fail == !ret) { 1445 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n", 1446 d, j, algo, crypto_aead_get_flags(tfm)); 1447 goto out; 1448 } else if (ret) 1449 continue; 1450 1451 authsize = template[i].clen - template[i].plen; 1452 1453 ret = -EINVAL; 1454 sg_init_table(sg, template[i].anp + template[i].np); 1455 if (diff_dst) 1456 sg_init_table(sgout, template[i].anp + template[i].np); 1457 1458 ret = -EINVAL; 1459 for (k = 0, temp = 0; k < template[i].anp; k++) { 1460 if (WARN_ON(offset_in_page(IDX[k]) + 1461 template[i].atap[k] > PAGE_SIZE)) 1462 goto out; 1463 sg_set_buf(&sg[k], 1464 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] + 1465 offset_in_page(IDX[k]), 1466 template[i].assoc + temp, 1467 template[i].atap[k]), 1468 template[i].atap[k]); 1469 if (diff_dst) 1470 sg_set_buf(&sgout[k], 1471 axbuf[IDX[k] >> PAGE_SHIFT] + 1472 offset_in_page(IDX[k]), 1473 template[i].atap[k]); 1474 temp += template[i].atap[k]; 1475 } 1476 1477 for (k = 0, temp = 0; k < template[i].np; k++) { 1478 n = template[i].tap[k]; 1479 if (k == template[i].np - 1 && !enc) 1480 n += authsize; 1481 1482 if (WARN_ON(offset_in_page(IDX[k]) + n > PAGE_SIZE)) 1483 goto out; 1484 1485 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]); 1486 memcpy(q, input + temp, n); 1487 sg_set_buf(&sg[template[i].anp + k], q, n); 1488 1489 if (diff_dst) { 1490 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1491 offset_in_page(IDX[k]); 1492 1493 memset(q, 0, n); 1494 1495 sg_set_buf(&sgout[template[i].anp + k], q, n); 1496 } 1497 1498 if (k == template[i].np - 1 && enc) 1499 n += authsize; 1500 if (offset_in_page(q) + n < PAGE_SIZE) 1501 q[n] = 0; 1502 1503 temp += n; 1504 } 1505 1506 ret = crypto_aead_setauthsize(tfm, authsize); 1507 if (ret) { 1508 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n", 1509 d, authsize, j, algo); 1510 goto out; 1511 } 1512 1513 if (enc) { 1514 if (WARN_ON(sg[template[i].anp + k - 1].offset + 1515 sg[template[i].anp + k - 1].length + 1516 authsize > PAGE_SIZE)) { 1517 ret = -EINVAL; 1518 goto out; 1519 } 1520 1521 if (diff_dst) 1522 sgout[template[i].anp + k - 1].length += 1523 authsize; 1524 sg[template[i].anp + k - 1].length += authsize; 1525 } 1526 1527 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg, 1528 inlen, iv); 1529 1530 aead_request_set_ad(req, template[i].alen); 1531 1532 ret = crypto_wait_req(enc ? crypto_aead_encrypt(req) 1533 : crypto_aead_decrypt(req), &wait); 1534 1535 switch (ret) { 1536 case 0: 1537 if (template[i].novrfy) { 1538 /* verification was supposed to fail */ 1539 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n", 1540 d, e, j, algo); 1541 /* so really, we got a bad message */ 1542 ret = -EBADMSG; 1543 goto out; 1544 } 1545 break; 1546 case -EBADMSG: 1547 if (template[i].novrfy) 1548 /* verification failure was expected */ 1549 continue; 1550 /* fall through */ 1551 default: 1552 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n", 1553 d, e, j, algo, -ret); 1554 goto out; 1555 } 1556 1557 ret = -EINVAL; 1558 for (k = 0, temp = 0; k < template[i].np; k++) { 1559 if (diff_dst) 1560 q = xoutbuf[IDX[k] >> PAGE_SHIFT] + 1561 offset_in_page(IDX[k]); 1562 else 1563 q = xbuf[IDX[k] >> PAGE_SHIFT] + 1564 offset_in_page(IDX[k]); 1565 1566 n = template[i].tap[k]; 1567 if (k == template[i].np - 1 && enc) 1568 n += authsize; 1569 1570 if (memcmp(q, expected_output + temp, n)) { 1571 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n", 1572 d, j, e, k, algo); 1573 hexdump(q, n); 1574 goto out; 1575 } 1576 1577 q += n; 1578 if (k == template[i].np - 1 && !enc) { 1579 if (!diff_dst && memcmp(q, input + temp + n, 1580 authsize)) 1581 n = authsize; 1582 else 1583 n = 0; 1584 } else { 1585 for (n = 0; offset_in_page(q + n) && q[n]; n++) 1586 ; 1587 } 1588 if (n) { 1589 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n", 1590 d, j, e, k, algo, n); 1591 hexdump(q, n); 1592 goto out; 1593 } 1594 1595 temp += template[i].tap[k]; 1596 } 1597 } 1598 1599 ret = 0; 1600 1601 out: 1602 aead_request_free(req); 1603 kfree(sg); 1604 out_nosg: 1605 if (diff_dst) 1606 testmgr_free_buf(xoutbuf); 1607 out_nooutbuf: 1608 testmgr_free_buf(axbuf); 1609 out_noaxbuf: 1610 testmgr_free_buf(xbuf); 1611 out_noxbuf: 1612 kfree(key); 1613 kfree(iv); 1614 return ret; 1615 } 1616 1617 static int test_aead(struct crypto_aead *tfm, int enc, 1618 const struct aead_testvec *template, unsigned int tcount) 1619 { 1620 unsigned int alignmask; 1621 int ret; 1622 1623 /* test 'dst == src' case */ 1624 ret = __test_aead(tfm, enc, template, tcount, false, 0); 1625 if (ret) 1626 return ret; 1627 1628 /* test 'dst != src' case */ 1629 ret = __test_aead(tfm, enc, template, tcount, true, 0); 1630 if (ret) 1631 return ret; 1632 1633 /* test unaligned buffers, check with one byte offset */ 1634 ret = __test_aead(tfm, enc, template, tcount, true, 1); 1635 if (ret) 1636 return ret; 1637 1638 alignmask = crypto_tfm_alg_alignmask(&tfm->base); 1639 if (alignmask) { 1640 /* Check if alignment mask for tfm is correctly set. */ 1641 ret = __test_aead(tfm, enc, template, tcount, true, 1642 alignmask + 1); 1643 if (ret) 1644 return ret; 1645 } 1646 1647 return 0; 1648 } 1649 1650 static int test_cipher(struct crypto_cipher *tfm, int enc, 1651 const struct cipher_testvec *template, 1652 unsigned int tcount) 1653 { 1654 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm)); 1655 unsigned int i, j, k; 1656 char *q; 1657 const char *e; 1658 const char *input, *result; 1659 void *data; 1660 char *xbuf[XBUFSIZE]; 1661 int ret = -ENOMEM; 1662 1663 if (testmgr_alloc_buf(xbuf)) 1664 goto out_nobuf; 1665 1666 if (enc == ENCRYPT) 1667 e = "encryption"; 1668 else 1669 e = "decryption"; 1670 1671 j = 0; 1672 for (i = 0; i < tcount; i++) { 1673 1674 if (fips_enabled && template[i].fips_skip) 1675 continue; 1676 1677 input = enc ? template[i].ptext : template[i].ctext; 1678 result = enc ? template[i].ctext : template[i].ptext; 1679 j++; 1680 1681 ret = -EINVAL; 1682 if (WARN_ON(template[i].len > PAGE_SIZE)) 1683 goto out; 1684 1685 data = xbuf[0]; 1686 memcpy(data, input, template[i].len); 1687 1688 crypto_cipher_clear_flags(tfm, ~0); 1689 if (template[i].wk) 1690 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); 1691 1692 ret = crypto_cipher_setkey(tfm, template[i].key, 1693 template[i].klen); 1694 if (template[i].fail == !ret) { 1695 printk(KERN_ERR "alg: cipher: setkey failed " 1696 "on test %d for %s: flags=%x\n", j, 1697 algo, crypto_cipher_get_flags(tfm)); 1698 goto out; 1699 } else if (ret) 1700 continue; 1701 1702 for (k = 0; k < template[i].len; 1703 k += crypto_cipher_blocksize(tfm)) { 1704 if (enc) 1705 crypto_cipher_encrypt_one(tfm, data + k, 1706 data + k); 1707 else 1708 crypto_cipher_decrypt_one(tfm, data + k, 1709 data + k); 1710 } 1711 1712 q = data; 1713 if (memcmp(q, result, template[i].len)) { 1714 printk(KERN_ERR "alg: cipher: Test %d failed " 1715 "on %s for %s\n", j, e, algo); 1716 hexdump(q, template[i].len); 1717 ret = -EINVAL; 1718 goto out; 1719 } 1720 } 1721 1722 ret = 0; 1723 1724 out: 1725 testmgr_free_buf(xbuf); 1726 out_nobuf: 1727 return ret; 1728 } 1729 1730 static int test_skcipher_vec_cfg(const char *driver, int enc, 1731 const struct cipher_testvec *vec, 1732 unsigned int vec_num, 1733 const struct testvec_config *cfg, 1734 struct skcipher_request *req, 1735 struct cipher_test_sglists *tsgls) 1736 { 1737 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 1738 const unsigned int alignmask = crypto_skcipher_alignmask(tfm); 1739 const unsigned int ivsize = crypto_skcipher_ivsize(tfm); 1740 const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags; 1741 const char *op = enc ? "encryption" : "decryption"; 1742 DECLARE_CRYPTO_WAIT(wait); 1743 u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN]; 1744 u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) + 1745 cfg->iv_offset + 1746 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0); 1747 struct kvec input; 1748 int err; 1749 1750 /* Set the key */ 1751 if (vec->wk) 1752 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); 1753 else 1754 crypto_skcipher_clear_flags(tfm, 1755 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS); 1756 err = crypto_skcipher_setkey(tfm, vec->key, vec->klen); 1757 if (err) { 1758 if (vec->fail) /* expectedly failed to set key? */ 1759 return 0; 1760 pr_err("alg: skcipher: %s setkey failed with err %d on test vector %u; flags=%#x\n", 1761 driver, err, vec_num, crypto_skcipher_get_flags(tfm)); 1762 return err; 1763 } 1764 if (vec->fail) { 1765 pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %u\n", 1766 driver, vec_num); 1767 return -EINVAL; 1768 } 1769 1770 /* The IV must be copied to a buffer, as the algorithm may modify it */ 1771 if (ivsize) { 1772 if (WARN_ON(ivsize > MAX_IVLEN)) 1773 return -EINVAL; 1774 if (vec->iv && !(vec->generates_iv && enc)) 1775 memcpy(iv, vec->iv, ivsize); 1776 else 1777 memset(iv, 0, ivsize); 1778 } else { 1779 if (vec->generates_iv) { 1780 pr_err("alg: skcipher: %s has ivsize=0 but test vector %u generates IV!\n", 1781 driver, vec_num); 1782 return -EINVAL; 1783 } 1784 iv = NULL; 1785 } 1786 1787 /* Build the src/dst scatterlists */ 1788 input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext; 1789 input.iov_len = vec->len; 1790 err = build_cipher_test_sglists(tsgls, cfg, alignmask, 1791 vec->len, vec->len, &input, 1); 1792 if (err) { 1793 pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %u, cfg=\"%s\"\n", 1794 driver, op, vec_num, cfg->name); 1795 return err; 1796 } 1797 1798 /* Do the actual encryption or decryption */ 1799 testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm)); 1800 skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait); 1801 skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr, 1802 vec->len, iv); 1803 err = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) : 1804 crypto_skcipher_decrypt(req), &wait); 1805 if (err) { 1806 pr_err("alg: skcipher: %s %s failed with err %d on test vector %u, cfg=\"%s\"\n", 1807 driver, op, err, vec_num, cfg->name); 1808 return err; 1809 } 1810 1811 /* Check for the correct output (ciphertext or plaintext) */ 1812 err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext, 1813 vec->len, 0, true); 1814 if (err == -EOVERFLOW) { 1815 pr_err("alg: skcipher: %s %s overran dst buffer on test vector %u, cfg=\"%s\"\n", 1816 driver, op, vec_num, cfg->name); 1817 return err; 1818 } 1819 if (err) { 1820 pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %u, cfg=\"%s\"\n", 1821 driver, op, vec_num, cfg->name); 1822 return err; 1823 } 1824 1825 /* If applicable, check that the algorithm generated the correct IV */ 1826 if (vec->generates_iv && enc && memcmp(iv, vec->iv, ivsize) != 0) { 1827 pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %u, cfg=\"%s\"\n", 1828 driver, op, vec_num, cfg->name); 1829 hexdump(iv, ivsize); 1830 return -EINVAL; 1831 } 1832 1833 return 0; 1834 } 1835 1836 static int test_skcipher_vec(const char *driver, int enc, 1837 const struct cipher_testvec *vec, 1838 unsigned int vec_num, 1839 struct skcipher_request *req, 1840 struct cipher_test_sglists *tsgls) 1841 { 1842 unsigned int i; 1843 int err; 1844 1845 if (fips_enabled && vec->fips_skip) 1846 return 0; 1847 1848 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) { 1849 err = test_skcipher_vec_cfg(driver, enc, vec, vec_num, 1850 &default_cipher_testvec_configs[i], 1851 req, tsgls); 1852 if (err) 1853 return err; 1854 } 1855 1856 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS 1857 if (!noextratests) { 1858 struct testvec_config cfg; 1859 char cfgname[TESTVEC_CONFIG_NAMELEN]; 1860 1861 for (i = 0; i < fuzz_iterations; i++) { 1862 generate_random_testvec_config(&cfg, cfgname, 1863 sizeof(cfgname)); 1864 err = test_skcipher_vec_cfg(driver, enc, vec, vec_num, 1865 &cfg, req, tsgls); 1866 if (err) 1867 return err; 1868 } 1869 } 1870 #endif 1871 return 0; 1872 } 1873 1874 static int test_skcipher(const char *driver, int enc, 1875 const struct cipher_test_suite *suite, 1876 struct skcipher_request *req, 1877 struct cipher_test_sglists *tsgls) 1878 { 1879 unsigned int i; 1880 int err; 1881 1882 for (i = 0; i < suite->count; i++) { 1883 err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req, 1884 tsgls); 1885 if (err) 1886 return err; 1887 } 1888 return 0; 1889 } 1890 1891 static int alg_test_skcipher(const struct alg_test_desc *desc, 1892 const char *driver, u32 type, u32 mask) 1893 { 1894 const struct cipher_test_suite *suite = &desc->suite.cipher; 1895 struct crypto_skcipher *tfm; 1896 struct skcipher_request *req = NULL; 1897 struct cipher_test_sglists *tsgls = NULL; 1898 int err; 1899 1900 if (suite->count <= 0) { 1901 pr_err("alg: skcipher: empty test suite for %s\n", driver); 1902 return -EINVAL; 1903 } 1904 1905 tfm = crypto_alloc_skcipher(driver, type, mask); 1906 if (IS_ERR(tfm)) { 1907 pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n", 1908 driver, PTR_ERR(tfm)); 1909 return PTR_ERR(tfm); 1910 } 1911 1912 req = skcipher_request_alloc(tfm, GFP_KERNEL); 1913 if (!req) { 1914 pr_err("alg: skcipher: failed to allocate request for %s\n", 1915 driver); 1916 err = -ENOMEM; 1917 goto out; 1918 } 1919 1920 tsgls = alloc_cipher_test_sglists(); 1921 if (!tsgls) { 1922 pr_err("alg: skcipher: failed to allocate test buffers for %s\n", 1923 driver); 1924 err = -ENOMEM; 1925 goto out; 1926 } 1927 1928 err = test_skcipher(driver, ENCRYPT, suite, req, tsgls); 1929 if (err) 1930 goto out; 1931 1932 err = test_skcipher(driver, DECRYPT, suite, req, tsgls); 1933 out: 1934 free_cipher_test_sglists(tsgls); 1935 skcipher_request_free(req); 1936 crypto_free_skcipher(tfm); 1937 return err; 1938 } 1939 1940 static int test_comp(struct crypto_comp *tfm, 1941 const struct comp_testvec *ctemplate, 1942 const struct comp_testvec *dtemplate, 1943 int ctcount, int dtcount) 1944 { 1945 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm)); 1946 char *output, *decomp_output; 1947 unsigned int i; 1948 int ret; 1949 1950 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1951 if (!output) 1952 return -ENOMEM; 1953 1954 decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 1955 if (!decomp_output) { 1956 kfree(output); 1957 return -ENOMEM; 1958 } 1959 1960 for (i = 0; i < ctcount; i++) { 1961 int ilen; 1962 unsigned int dlen = COMP_BUF_SIZE; 1963 1964 memset(output, 0, COMP_BUF_SIZE); 1965 memset(decomp_output, 0, COMP_BUF_SIZE); 1966 1967 ilen = ctemplate[i].inlen; 1968 ret = crypto_comp_compress(tfm, ctemplate[i].input, 1969 ilen, output, &dlen); 1970 if (ret) { 1971 printk(KERN_ERR "alg: comp: compression failed " 1972 "on test %d for %s: ret=%d\n", i + 1, algo, 1973 -ret); 1974 goto out; 1975 } 1976 1977 ilen = dlen; 1978 dlen = COMP_BUF_SIZE; 1979 ret = crypto_comp_decompress(tfm, output, 1980 ilen, decomp_output, &dlen); 1981 if (ret) { 1982 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n", 1983 i + 1, algo, -ret); 1984 goto out; 1985 } 1986 1987 if (dlen != ctemplate[i].inlen) { 1988 printk(KERN_ERR "alg: comp: Compression test %d " 1989 "failed for %s: output len = %d\n", i + 1, algo, 1990 dlen); 1991 ret = -EINVAL; 1992 goto out; 1993 } 1994 1995 if (memcmp(decomp_output, ctemplate[i].input, 1996 ctemplate[i].inlen)) { 1997 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n", 1998 i + 1, algo); 1999 hexdump(decomp_output, dlen); 2000 ret = -EINVAL; 2001 goto out; 2002 } 2003 } 2004 2005 for (i = 0; i < dtcount; i++) { 2006 int ilen; 2007 unsigned int dlen = COMP_BUF_SIZE; 2008 2009 memset(decomp_output, 0, COMP_BUF_SIZE); 2010 2011 ilen = dtemplate[i].inlen; 2012 ret = crypto_comp_decompress(tfm, dtemplate[i].input, 2013 ilen, decomp_output, &dlen); 2014 if (ret) { 2015 printk(KERN_ERR "alg: comp: decompression failed " 2016 "on test %d for %s: ret=%d\n", i + 1, algo, 2017 -ret); 2018 goto out; 2019 } 2020 2021 if (dlen != dtemplate[i].outlen) { 2022 printk(KERN_ERR "alg: comp: Decompression test %d " 2023 "failed for %s: output len = %d\n", i + 1, algo, 2024 dlen); 2025 ret = -EINVAL; 2026 goto out; 2027 } 2028 2029 if (memcmp(decomp_output, dtemplate[i].output, dlen)) { 2030 printk(KERN_ERR "alg: comp: Decompression test %d " 2031 "failed for %s\n", i + 1, algo); 2032 hexdump(decomp_output, dlen); 2033 ret = -EINVAL; 2034 goto out; 2035 } 2036 } 2037 2038 ret = 0; 2039 2040 out: 2041 kfree(decomp_output); 2042 kfree(output); 2043 return ret; 2044 } 2045 2046 static int test_acomp(struct crypto_acomp *tfm, 2047 const struct comp_testvec *ctemplate, 2048 const struct comp_testvec *dtemplate, 2049 int ctcount, int dtcount) 2050 { 2051 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm)); 2052 unsigned int i; 2053 char *output, *decomp_out; 2054 int ret; 2055 struct scatterlist src, dst; 2056 struct acomp_req *req; 2057 struct crypto_wait wait; 2058 2059 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 2060 if (!output) 2061 return -ENOMEM; 2062 2063 decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL); 2064 if (!decomp_out) { 2065 kfree(output); 2066 return -ENOMEM; 2067 } 2068 2069 for (i = 0; i < ctcount; i++) { 2070 unsigned int dlen = COMP_BUF_SIZE; 2071 int ilen = ctemplate[i].inlen; 2072 void *input_vec; 2073 2074 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL); 2075 if (!input_vec) { 2076 ret = -ENOMEM; 2077 goto out; 2078 } 2079 2080 memset(output, 0, dlen); 2081 crypto_init_wait(&wait); 2082 sg_init_one(&src, input_vec, ilen); 2083 sg_init_one(&dst, output, dlen); 2084 2085 req = acomp_request_alloc(tfm); 2086 if (!req) { 2087 pr_err("alg: acomp: request alloc failed for %s\n", 2088 algo); 2089 kfree(input_vec); 2090 ret = -ENOMEM; 2091 goto out; 2092 } 2093 2094 acomp_request_set_params(req, &src, &dst, ilen, dlen); 2095 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2096 crypto_req_done, &wait); 2097 2098 ret = crypto_wait_req(crypto_acomp_compress(req), &wait); 2099 if (ret) { 2100 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 2101 i + 1, algo, -ret); 2102 kfree(input_vec); 2103 acomp_request_free(req); 2104 goto out; 2105 } 2106 2107 ilen = req->dlen; 2108 dlen = COMP_BUF_SIZE; 2109 sg_init_one(&src, output, ilen); 2110 sg_init_one(&dst, decomp_out, dlen); 2111 crypto_init_wait(&wait); 2112 acomp_request_set_params(req, &src, &dst, ilen, dlen); 2113 2114 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 2115 if (ret) { 2116 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n", 2117 i + 1, algo, -ret); 2118 kfree(input_vec); 2119 acomp_request_free(req); 2120 goto out; 2121 } 2122 2123 if (req->dlen != ctemplate[i].inlen) { 2124 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n", 2125 i + 1, algo, req->dlen); 2126 ret = -EINVAL; 2127 kfree(input_vec); 2128 acomp_request_free(req); 2129 goto out; 2130 } 2131 2132 if (memcmp(input_vec, decomp_out, req->dlen)) { 2133 pr_err("alg: acomp: Compression test %d failed for %s\n", 2134 i + 1, algo); 2135 hexdump(output, req->dlen); 2136 ret = -EINVAL; 2137 kfree(input_vec); 2138 acomp_request_free(req); 2139 goto out; 2140 } 2141 2142 kfree(input_vec); 2143 acomp_request_free(req); 2144 } 2145 2146 for (i = 0; i < dtcount; i++) { 2147 unsigned int dlen = COMP_BUF_SIZE; 2148 int ilen = dtemplate[i].inlen; 2149 void *input_vec; 2150 2151 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL); 2152 if (!input_vec) { 2153 ret = -ENOMEM; 2154 goto out; 2155 } 2156 2157 memset(output, 0, dlen); 2158 crypto_init_wait(&wait); 2159 sg_init_one(&src, input_vec, ilen); 2160 sg_init_one(&dst, output, dlen); 2161 2162 req = acomp_request_alloc(tfm); 2163 if (!req) { 2164 pr_err("alg: acomp: request alloc failed for %s\n", 2165 algo); 2166 kfree(input_vec); 2167 ret = -ENOMEM; 2168 goto out; 2169 } 2170 2171 acomp_request_set_params(req, &src, &dst, ilen, dlen); 2172 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2173 crypto_req_done, &wait); 2174 2175 ret = crypto_wait_req(crypto_acomp_decompress(req), &wait); 2176 if (ret) { 2177 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n", 2178 i + 1, algo, -ret); 2179 kfree(input_vec); 2180 acomp_request_free(req); 2181 goto out; 2182 } 2183 2184 if (req->dlen != dtemplate[i].outlen) { 2185 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n", 2186 i + 1, algo, req->dlen); 2187 ret = -EINVAL; 2188 kfree(input_vec); 2189 acomp_request_free(req); 2190 goto out; 2191 } 2192 2193 if (memcmp(output, dtemplate[i].output, req->dlen)) { 2194 pr_err("alg: acomp: Decompression test %d failed for %s\n", 2195 i + 1, algo); 2196 hexdump(output, req->dlen); 2197 ret = -EINVAL; 2198 kfree(input_vec); 2199 acomp_request_free(req); 2200 goto out; 2201 } 2202 2203 kfree(input_vec); 2204 acomp_request_free(req); 2205 } 2206 2207 ret = 0; 2208 2209 out: 2210 kfree(decomp_out); 2211 kfree(output); 2212 return ret; 2213 } 2214 2215 static int test_cprng(struct crypto_rng *tfm, 2216 const struct cprng_testvec *template, 2217 unsigned int tcount) 2218 { 2219 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm)); 2220 int err = 0, i, j, seedsize; 2221 u8 *seed; 2222 char result[32]; 2223 2224 seedsize = crypto_rng_seedsize(tfm); 2225 2226 seed = kmalloc(seedsize, GFP_KERNEL); 2227 if (!seed) { 2228 printk(KERN_ERR "alg: cprng: Failed to allocate seed space " 2229 "for %s\n", algo); 2230 return -ENOMEM; 2231 } 2232 2233 for (i = 0; i < tcount; i++) { 2234 memset(result, 0, 32); 2235 2236 memcpy(seed, template[i].v, template[i].vlen); 2237 memcpy(seed + template[i].vlen, template[i].key, 2238 template[i].klen); 2239 memcpy(seed + template[i].vlen + template[i].klen, 2240 template[i].dt, template[i].dtlen); 2241 2242 err = crypto_rng_reset(tfm, seed, seedsize); 2243 if (err) { 2244 printk(KERN_ERR "alg: cprng: Failed to reset rng " 2245 "for %s\n", algo); 2246 goto out; 2247 } 2248 2249 for (j = 0; j < template[i].loops; j++) { 2250 err = crypto_rng_get_bytes(tfm, result, 2251 template[i].rlen); 2252 if (err < 0) { 2253 printk(KERN_ERR "alg: cprng: Failed to obtain " 2254 "the correct amount of random data for " 2255 "%s (requested %d)\n", algo, 2256 template[i].rlen); 2257 goto out; 2258 } 2259 } 2260 2261 err = memcmp(result, template[i].result, 2262 template[i].rlen); 2263 if (err) { 2264 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n", 2265 i, algo); 2266 hexdump(result, template[i].rlen); 2267 err = -EINVAL; 2268 goto out; 2269 } 2270 } 2271 2272 out: 2273 kfree(seed); 2274 return err; 2275 } 2276 2277 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver, 2278 u32 type, u32 mask) 2279 { 2280 const struct aead_test_suite *suite = &desc->suite.aead; 2281 struct crypto_aead *tfm; 2282 int err; 2283 2284 tfm = crypto_alloc_aead(driver, type, mask); 2285 if (IS_ERR(tfm)) { 2286 printk(KERN_ERR "alg: aead: Failed to load transform for %s: " 2287 "%ld\n", driver, PTR_ERR(tfm)); 2288 return PTR_ERR(tfm); 2289 } 2290 2291 err = test_aead(tfm, ENCRYPT, suite->vecs, suite->count); 2292 if (!err) 2293 err = test_aead(tfm, DECRYPT, suite->vecs, suite->count); 2294 2295 crypto_free_aead(tfm); 2296 return err; 2297 } 2298 2299 static int alg_test_cipher(const struct alg_test_desc *desc, 2300 const char *driver, u32 type, u32 mask) 2301 { 2302 const struct cipher_test_suite *suite = &desc->suite.cipher; 2303 struct crypto_cipher *tfm; 2304 int err; 2305 2306 tfm = crypto_alloc_cipher(driver, type, mask); 2307 if (IS_ERR(tfm)) { 2308 printk(KERN_ERR "alg: cipher: Failed to load transform for " 2309 "%s: %ld\n", driver, PTR_ERR(tfm)); 2310 return PTR_ERR(tfm); 2311 } 2312 2313 err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count); 2314 if (!err) 2315 err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count); 2316 2317 crypto_free_cipher(tfm); 2318 return err; 2319 } 2320 2321 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver, 2322 u32 type, u32 mask) 2323 { 2324 struct crypto_comp *comp; 2325 struct crypto_acomp *acomp; 2326 int err; 2327 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK; 2328 2329 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) { 2330 acomp = crypto_alloc_acomp(driver, type, mask); 2331 if (IS_ERR(acomp)) { 2332 pr_err("alg: acomp: Failed to load transform for %s: %ld\n", 2333 driver, PTR_ERR(acomp)); 2334 return PTR_ERR(acomp); 2335 } 2336 err = test_acomp(acomp, desc->suite.comp.comp.vecs, 2337 desc->suite.comp.decomp.vecs, 2338 desc->suite.comp.comp.count, 2339 desc->suite.comp.decomp.count); 2340 crypto_free_acomp(acomp); 2341 } else { 2342 comp = crypto_alloc_comp(driver, type, mask); 2343 if (IS_ERR(comp)) { 2344 pr_err("alg: comp: Failed to load transform for %s: %ld\n", 2345 driver, PTR_ERR(comp)); 2346 return PTR_ERR(comp); 2347 } 2348 2349 err = test_comp(comp, desc->suite.comp.comp.vecs, 2350 desc->suite.comp.decomp.vecs, 2351 desc->suite.comp.comp.count, 2352 desc->suite.comp.decomp.count); 2353 2354 crypto_free_comp(comp); 2355 } 2356 return err; 2357 } 2358 2359 static int __alg_test_hash(const struct hash_testvec *template, 2360 unsigned int tcount, const char *driver, 2361 u32 type, u32 mask) 2362 { 2363 struct crypto_ahash *tfm; 2364 int err; 2365 2366 tfm = crypto_alloc_ahash(driver, type, mask); 2367 if (IS_ERR(tfm)) { 2368 printk(KERN_ERR "alg: hash: Failed to load transform for %s: " 2369 "%ld\n", driver, PTR_ERR(tfm)); 2370 return PTR_ERR(tfm); 2371 } 2372 2373 err = test_hash(tfm, template, tcount, HASH_TEST_DIGEST); 2374 if (!err) 2375 err = test_hash(tfm, template, tcount, HASH_TEST_FINAL); 2376 if (!err) 2377 err = test_hash(tfm, template, tcount, HASH_TEST_FINUP); 2378 crypto_free_ahash(tfm); 2379 return err; 2380 } 2381 2382 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver, 2383 u32 type, u32 mask) 2384 { 2385 const struct hash_testvec *template = desc->suite.hash.vecs; 2386 unsigned int tcount = desc->suite.hash.count; 2387 unsigned int nr_unkeyed, nr_keyed; 2388 int err; 2389 2390 /* 2391 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests 2392 * first, before setting a key on the tfm. To make this easier, we 2393 * require that the unkeyed test vectors (if any) are listed first. 2394 */ 2395 2396 for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) { 2397 if (template[nr_unkeyed].ksize) 2398 break; 2399 } 2400 for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) { 2401 if (!template[nr_unkeyed + nr_keyed].ksize) { 2402 pr_err("alg: hash: test vectors for %s out of order, " 2403 "unkeyed ones must come first\n", desc->alg); 2404 return -EINVAL; 2405 } 2406 } 2407 2408 err = 0; 2409 if (nr_unkeyed) { 2410 err = __alg_test_hash(template, nr_unkeyed, driver, type, mask); 2411 template += nr_unkeyed; 2412 } 2413 2414 if (!err && nr_keyed) 2415 err = __alg_test_hash(template, nr_keyed, driver, type, mask); 2416 2417 return err; 2418 } 2419 2420 static int alg_test_crc32c(const struct alg_test_desc *desc, 2421 const char *driver, u32 type, u32 mask) 2422 { 2423 struct crypto_shash *tfm; 2424 __le32 val; 2425 int err; 2426 2427 err = alg_test_hash(desc, driver, type, mask); 2428 if (err) 2429 return err; 2430 2431 tfm = crypto_alloc_shash(driver, type, mask); 2432 if (IS_ERR(tfm)) { 2433 if (PTR_ERR(tfm) == -ENOENT) { 2434 /* 2435 * This crc32c implementation is only available through 2436 * ahash API, not the shash API, so the remaining part 2437 * of the test is not applicable to it. 2438 */ 2439 return 0; 2440 } 2441 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: " 2442 "%ld\n", driver, PTR_ERR(tfm)); 2443 return PTR_ERR(tfm); 2444 } 2445 2446 do { 2447 SHASH_DESC_ON_STACK(shash, tfm); 2448 u32 *ctx = (u32 *)shash_desc_ctx(shash); 2449 2450 shash->tfm = tfm; 2451 shash->flags = 0; 2452 2453 *ctx = 420553207; 2454 err = crypto_shash_final(shash, (u8 *)&val); 2455 if (err) { 2456 printk(KERN_ERR "alg: crc32c: Operation failed for " 2457 "%s: %d\n", driver, err); 2458 break; 2459 } 2460 2461 if (val != cpu_to_le32(~420553207)) { 2462 pr_err("alg: crc32c: Test failed for %s: %u\n", 2463 driver, le32_to_cpu(val)); 2464 err = -EINVAL; 2465 } 2466 } while (0); 2467 2468 crypto_free_shash(tfm); 2469 2470 return err; 2471 } 2472 2473 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver, 2474 u32 type, u32 mask) 2475 { 2476 struct crypto_rng *rng; 2477 int err; 2478 2479 rng = crypto_alloc_rng(driver, type, mask); 2480 if (IS_ERR(rng)) { 2481 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: " 2482 "%ld\n", driver, PTR_ERR(rng)); 2483 return PTR_ERR(rng); 2484 } 2485 2486 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count); 2487 2488 crypto_free_rng(rng); 2489 2490 return err; 2491 } 2492 2493 2494 static int drbg_cavs_test(const struct drbg_testvec *test, int pr, 2495 const char *driver, u32 type, u32 mask) 2496 { 2497 int ret = -EAGAIN; 2498 struct crypto_rng *drng; 2499 struct drbg_test_data test_data; 2500 struct drbg_string addtl, pers, testentropy; 2501 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL); 2502 2503 if (!buf) 2504 return -ENOMEM; 2505 2506 drng = crypto_alloc_rng(driver, type, mask); 2507 if (IS_ERR(drng)) { 2508 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for " 2509 "%s\n", driver); 2510 kzfree(buf); 2511 return -ENOMEM; 2512 } 2513 2514 test_data.testentropy = &testentropy; 2515 drbg_string_fill(&testentropy, test->entropy, test->entropylen); 2516 drbg_string_fill(&pers, test->pers, test->perslen); 2517 ret = crypto_drbg_reset_test(drng, &pers, &test_data); 2518 if (ret) { 2519 printk(KERN_ERR "alg: drbg: Failed to reset rng\n"); 2520 goto outbuf; 2521 } 2522 2523 drbg_string_fill(&addtl, test->addtla, test->addtllen); 2524 if (pr) { 2525 drbg_string_fill(&testentropy, test->entpra, test->entprlen); 2526 ret = crypto_drbg_get_bytes_addtl_test(drng, 2527 buf, test->expectedlen, &addtl, &test_data); 2528 } else { 2529 ret = crypto_drbg_get_bytes_addtl(drng, 2530 buf, test->expectedlen, &addtl); 2531 } 2532 if (ret < 0) { 2533 printk(KERN_ERR "alg: drbg: could not obtain random data for " 2534 "driver %s\n", driver); 2535 goto outbuf; 2536 } 2537 2538 drbg_string_fill(&addtl, test->addtlb, test->addtllen); 2539 if (pr) { 2540 drbg_string_fill(&testentropy, test->entprb, test->entprlen); 2541 ret = crypto_drbg_get_bytes_addtl_test(drng, 2542 buf, test->expectedlen, &addtl, &test_data); 2543 } else { 2544 ret = crypto_drbg_get_bytes_addtl(drng, 2545 buf, test->expectedlen, &addtl); 2546 } 2547 if (ret < 0) { 2548 printk(KERN_ERR "alg: drbg: could not obtain random data for " 2549 "driver %s\n", driver); 2550 goto outbuf; 2551 } 2552 2553 ret = memcmp(test->expected, buf, test->expectedlen); 2554 2555 outbuf: 2556 crypto_free_rng(drng); 2557 kzfree(buf); 2558 return ret; 2559 } 2560 2561 2562 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver, 2563 u32 type, u32 mask) 2564 { 2565 int err = 0; 2566 int pr = 0; 2567 int i = 0; 2568 const struct drbg_testvec *template = desc->suite.drbg.vecs; 2569 unsigned int tcount = desc->suite.drbg.count; 2570 2571 if (0 == memcmp(driver, "drbg_pr_", 8)) 2572 pr = 1; 2573 2574 for (i = 0; i < tcount; i++) { 2575 err = drbg_cavs_test(&template[i], pr, driver, type, mask); 2576 if (err) { 2577 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n", 2578 i, driver); 2579 err = -EINVAL; 2580 break; 2581 } 2582 } 2583 return err; 2584 2585 } 2586 2587 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec, 2588 const char *alg) 2589 { 2590 struct kpp_request *req; 2591 void *input_buf = NULL; 2592 void *output_buf = NULL; 2593 void *a_public = NULL; 2594 void *a_ss = NULL; 2595 void *shared_secret = NULL; 2596 struct crypto_wait wait; 2597 unsigned int out_len_max; 2598 int err = -ENOMEM; 2599 struct scatterlist src, dst; 2600 2601 req = kpp_request_alloc(tfm, GFP_KERNEL); 2602 if (!req) 2603 return err; 2604 2605 crypto_init_wait(&wait); 2606 2607 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size); 2608 if (err < 0) 2609 goto free_req; 2610 2611 out_len_max = crypto_kpp_maxsize(tfm); 2612 output_buf = kzalloc(out_len_max, GFP_KERNEL); 2613 if (!output_buf) { 2614 err = -ENOMEM; 2615 goto free_req; 2616 } 2617 2618 /* Use appropriate parameter as base */ 2619 kpp_request_set_input(req, NULL, 0); 2620 sg_init_one(&dst, output_buf, out_len_max); 2621 kpp_request_set_output(req, &dst, out_len_max); 2622 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2623 crypto_req_done, &wait); 2624 2625 /* Compute party A's public key */ 2626 err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait); 2627 if (err) { 2628 pr_err("alg: %s: Party A: generate public key test failed. err %d\n", 2629 alg, err); 2630 goto free_output; 2631 } 2632 2633 if (vec->genkey) { 2634 /* Save party A's public key */ 2635 a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL); 2636 if (!a_public) { 2637 err = -ENOMEM; 2638 goto free_output; 2639 } 2640 } else { 2641 /* Verify calculated public key */ 2642 if (memcmp(vec->expected_a_public, sg_virt(req->dst), 2643 vec->expected_a_public_size)) { 2644 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n", 2645 alg); 2646 err = -EINVAL; 2647 goto free_output; 2648 } 2649 } 2650 2651 /* Calculate shared secret key by using counter part (b) public key. */ 2652 input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL); 2653 if (!input_buf) { 2654 err = -ENOMEM; 2655 goto free_output; 2656 } 2657 2658 sg_init_one(&src, input_buf, vec->b_public_size); 2659 sg_init_one(&dst, output_buf, out_len_max); 2660 kpp_request_set_input(req, &src, vec->b_public_size); 2661 kpp_request_set_output(req, &dst, out_len_max); 2662 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2663 crypto_req_done, &wait); 2664 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait); 2665 if (err) { 2666 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n", 2667 alg, err); 2668 goto free_all; 2669 } 2670 2671 if (vec->genkey) { 2672 /* Save the shared secret obtained by party A */ 2673 a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL); 2674 if (!a_ss) { 2675 err = -ENOMEM; 2676 goto free_all; 2677 } 2678 2679 /* 2680 * Calculate party B's shared secret by using party A's 2681 * public key. 2682 */ 2683 err = crypto_kpp_set_secret(tfm, vec->b_secret, 2684 vec->b_secret_size); 2685 if (err < 0) 2686 goto free_all; 2687 2688 sg_init_one(&src, a_public, vec->expected_a_public_size); 2689 sg_init_one(&dst, output_buf, out_len_max); 2690 kpp_request_set_input(req, &src, vec->expected_a_public_size); 2691 kpp_request_set_output(req, &dst, out_len_max); 2692 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2693 crypto_req_done, &wait); 2694 err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), 2695 &wait); 2696 if (err) { 2697 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n", 2698 alg, err); 2699 goto free_all; 2700 } 2701 2702 shared_secret = a_ss; 2703 } else { 2704 shared_secret = (void *)vec->expected_ss; 2705 } 2706 2707 /* 2708 * verify shared secret from which the user will derive 2709 * secret key by executing whatever hash it has chosen 2710 */ 2711 if (memcmp(shared_secret, sg_virt(req->dst), 2712 vec->expected_ss_size)) { 2713 pr_err("alg: %s: compute shared secret test failed. Invalid output\n", 2714 alg); 2715 err = -EINVAL; 2716 } 2717 2718 free_all: 2719 kfree(a_ss); 2720 kfree(input_buf); 2721 free_output: 2722 kfree(a_public); 2723 kfree(output_buf); 2724 free_req: 2725 kpp_request_free(req); 2726 return err; 2727 } 2728 2729 static int test_kpp(struct crypto_kpp *tfm, const char *alg, 2730 const struct kpp_testvec *vecs, unsigned int tcount) 2731 { 2732 int ret, i; 2733 2734 for (i = 0; i < tcount; i++) { 2735 ret = do_test_kpp(tfm, vecs++, alg); 2736 if (ret) { 2737 pr_err("alg: %s: test failed on vector %d, err=%d\n", 2738 alg, i + 1, ret); 2739 return ret; 2740 } 2741 } 2742 return 0; 2743 } 2744 2745 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver, 2746 u32 type, u32 mask) 2747 { 2748 struct crypto_kpp *tfm; 2749 int err = 0; 2750 2751 tfm = crypto_alloc_kpp(driver, type, mask); 2752 if (IS_ERR(tfm)) { 2753 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n", 2754 driver, PTR_ERR(tfm)); 2755 return PTR_ERR(tfm); 2756 } 2757 if (desc->suite.kpp.vecs) 2758 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs, 2759 desc->suite.kpp.count); 2760 2761 crypto_free_kpp(tfm); 2762 return err; 2763 } 2764 2765 static int test_akcipher_one(struct crypto_akcipher *tfm, 2766 const struct akcipher_testvec *vecs) 2767 { 2768 char *xbuf[XBUFSIZE]; 2769 struct akcipher_request *req; 2770 void *outbuf_enc = NULL; 2771 void *outbuf_dec = NULL; 2772 struct crypto_wait wait; 2773 unsigned int out_len_max, out_len = 0; 2774 int err = -ENOMEM; 2775 struct scatterlist src, dst, src_tab[2]; 2776 const char *m, *c; 2777 unsigned int m_size, c_size; 2778 const char *op; 2779 2780 if (testmgr_alloc_buf(xbuf)) 2781 return err; 2782 2783 req = akcipher_request_alloc(tfm, GFP_KERNEL); 2784 if (!req) 2785 goto free_xbuf; 2786 2787 crypto_init_wait(&wait); 2788 2789 if (vecs->public_key_vec) 2790 err = crypto_akcipher_set_pub_key(tfm, vecs->key, 2791 vecs->key_len); 2792 else 2793 err = crypto_akcipher_set_priv_key(tfm, vecs->key, 2794 vecs->key_len); 2795 if (err) 2796 goto free_req; 2797 2798 err = -ENOMEM; 2799 out_len_max = crypto_akcipher_maxsize(tfm); 2800 2801 /* 2802 * First run test which do not require a private key, such as 2803 * encrypt or verify. 2804 */ 2805 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL); 2806 if (!outbuf_enc) 2807 goto free_req; 2808 2809 if (!vecs->siggen_sigver_test) { 2810 m = vecs->m; 2811 m_size = vecs->m_size; 2812 c = vecs->c; 2813 c_size = vecs->c_size; 2814 op = "encrypt"; 2815 } else { 2816 /* Swap args so we could keep plaintext (digest) 2817 * in vecs->m, and cooked signature in vecs->c. 2818 */ 2819 m = vecs->c; /* signature */ 2820 m_size = vecs->c_size; 2821 c = vecs->m; /* digest */ 2822 c_size = vecs->m_size; 2823 op = "verify"; 2824 } 2825 2826 if (WARN_ON(m_size > PAGE_SIZE)) 2827 goto free_all; 2828 memcpy(xbuf[0], m, m_size); 2829 2830 sg_init_table(src_tab, 2); 2831 sg_set_buf(&src_tab[0], xbuf[0], 8); 2832 sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8); 2833 sg_init_one(&dst, outbuf_enc, out_len_max); 2834 akcipher_request_set_crypt(req, src_tab, &dst, m_size, 2835 out_len_max); 2836 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, 2837 crypto_req_done, &wait); 2838 2839 err = crypto_wait_req(vecs->siggen_sigver_test ? 2840 /* Run asymmetric signature verification */ 2841 crypto_akcipher_verify(req) : 2842 /* Run asymmetric encrypt */ 2843 crypto_akcipher_encrypt(req), &wait); 2844 if (err) { 2845 pr_err("alg: akcipher: %s test failed. err %d\n", op, err); 2846 goto free_all; 2847 } 2848 if (req->dst_len != c_size) { 2849 pr_err("alg: akcipher: %s test failed. Invalid output len\n", 2850 op); 2851 err = -EINVAL; 2852 goto free_all; 2853 } 2854 /* verify that encrypted message is equal to expected */ 2855 if (memcmp(c, outbuf_enc, c_size)) { 2856 pr_err("alg: akcipher: %s test failed. Invalid output\n", op); 2857 hexdump(outbuf_enc, c_size); 2858 err = -EINVAL; 2859 goto free_all; 2860 } 2861 2862 /* 2863 * Don't invoke (decrypt or sign) test which require a private key 2864 * for vectors with only a public key. 2865 */ 2866 if (vecs->public_key_vec) { 2867 err = 0; 2868 goto free_all; 2869 } 2870 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL); 2871 if (!outbuf_dec) { 2872 err = -ENOMEM; 2873 goto free_all; 2874 } 2875 2876 op = vecs->siggen_sigver_test ? "sign" : "decrypt"; 2877 if (WARN_ON(c_size > PAGE_SIZE)) 2878 goto free_all; 2879 memcpy(xbuf[0], c, c_size); 2880 2881 sg_init_one(&src, xbuf[0], c_size); 2882 sg_init_one(&dst, outbuf_dec, out_len_max); 2883 crypto_init_wait(&wait); 2884 akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max); 2885 2886 err = crypto_wait_req(vecs->siggen_sigver_test ? 2887 /* Run asymmetric signature generation */ 2888 crypto_akcipher_sign(req) : 2889 /* Run asymmetric decrypt */ 2890 crypto_akcipher_decrypt(req), &wait); 2891 if (err) { 2892 pr_err("alg: akcipher: %s test failed. err %d\n", op, err); 2893 goto free_all; 2894 } 2895 out_len = req->dst_len; 2896 if (out_len < m_size) { 2897 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n", 2898 op, out_len); 2899 err = -EINVAL; 2900 goto free_all; 2901 } 2902 /* verify that decrypted message is equal to the original msg */ 2903 if (memchr_inv(outbuf_dec, 0, out_len - m_size) || 2904 memcmp(m, outbuf_dec + out_len - m_size, m_size)) { 2905 pr_err("alg: akcipher: %s test failed. Invalid output\n", op); 2906 hexdump(outbuf_dec, out_len); 2907 err = -EINVAL; 2908 } 2909 free_all: 2910 kfree(outbuf_dec); 2911 kfree(outbuf_enc); 2912 free_req: 2913 akcipher_request_free(req); 2914 free_xbuf: 2915 testmgr_free_buf(xbuf); 2916 return err; 2917 } 2918 2919 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg, 2920 const struct akcipher_testvec *vecs, 2921 unsigned int tcount) 2922 { 2923 const char *algo = 2924 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm)); 2925 int ret, i; 2926 2927 for (i = 0; i < tcount; i++) { 2928 ret = test_akcipher_one(tfm, vecs++); 2929 if (!ret) 2930 continue; 2931 2932 pr_err("alg: akcipher: test %d failed for %s, err=%d\n", 2933 i + 1, algo, ret); 2934 return ret; 2935 } 2936 return 0; 2937 } 2938 2939 static int alg_test_akcipher(const struct alg_test_desc *desc, 2940 const char *driver, u32 type, u32 mask) 2941 { 2942 struct crypto_akcipher *tfm; 2943 int err = 0; 2944 2945 tfm = crypto_alloc_akcipher(driver, type, mask); 2946 if (IS_ERR(tfm)) { 2947 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n", 2948 driver, PTR_ERR(tfm)); 2949 return PTR_ERR(tfm); 2950 } 2951 if (desc->suite.akcipher.vecs) 2952 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs, 2953 desc->suite.akcipher.count); 2954 2955 crypto_free_akcipher(tfm); 2956 return err; 2957 } 2958 2959 static int alg_test_null(const struct alg_test_desc *desc, 2960 const char *driver, u32 type, u32 mask) 2961 { 2962 return 0; 2963 } 2964 2965 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) } 2966 2967 /* Please keep this list sorted by algorithm name. */ 2968 static const struct alg_test_desc alg_test_descs[] = { 2969 { 2970 .alg = "adiantum(xchacha12,aes)", 2971 .test = alg_test_skcipher, 2972 .suite = { 2973 .cipher = __VECS(adiantum_xchacha12_aes_tv_template) 2974 }, 2975 }, { 2976 .alg = "adiantum(xchacha20,aes)", 2977 .test = alg_test_skcipher, 2978 .suite = { 2979 .cipher = __VECS(adiantum_xchacha20_aes_tv_template) 2980 }, 2981 }, { 2982 .alg = "aegis128", 2983 .test = alg_test_aead, 2984 .suite = { 2985 .aead = __VECS(aegis128_tv_template) 2986 } 2987 }, { 2988 .alg = "aegis128l", 2989 .test = alg_test_aead, 2990 .suite = { 2991 .aead = __VECS(aegis128l_tv_template) 2992 } 2993 }, { 2994 .alg = "aegis256", 2995 .test = alg_test_aead, 2996 .suite = { 2997 .aead = __VECS(aegis256_tv_template) 2998 } 2999 }, { 3000 .alg = "ansi_cprng", 3001 .test = alg_test_cprng, 3002 .suite = { 3003 .cprng = __VECS(ansi_cprng_aes_tv_template) 3004 } 3005 }, { 3006 .alg = "authenc(hmac(md5),ecb(cipher_null))", 3007 .test = alg_test_aead, 3008 .suite = { 3009 .aead = __VECS(hmac_md5_ecb_cipher_null_tv_template) 3010 } 3011 }, { 3012 .alg = "authenc(hmac(sha1),cbc(aes))", 3013 .test = alg_test_aead, 3014 .fips_allowed = 1, 3015 .suite = { 3016 .aead = __VECS(hmac_sha1_aes_cbc_tv_temp) 3017 } 3018 }, { 3019 .alg = "authenc(hmac(sha1),cbc(des))", 3020 .test = alg_test_aead, 3021 .suite = { 3022 .aead = __VECS(hmac_sha1_des_cbc_tv_temp) 3023 } 3024 }, { 3025 .alg = "authenc(hmac(sha1),cbc(des3_ede))", 3026 .test = alg_test_aead, 3027 .fips_allowed = 1, 3028 .suite = { 3029 .aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp) 3030 } 3031 }, { 3032 .alg = "authenc(hmac(sha1),ctr(aes))", 3033 .test = alg_test_null, 3034 .fips_allowed = 1, 3035 }, { 3036 .alg = "authenc(hmac(sha1),ecb(cipher_null))", 3037 .test = alg_test_aead, 3038 .suite = { 3039 .aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp) 3040 } 3041 }, { 3042 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))", 3043 .test = alg_test_null, 3044 .fips_allowed = 1, 3045 }, { 3046 .alg = "authenc(hmac(sha224),cbc(des))", 3047 .test = alg_test_aead, 3048 .suite = { 3049 .aead = __VECS(hmac_sha224_des_cbc_tv_temp) 3050 } 3051 }, { 3052 .alg = "authenc(hmac(sha224),cbc(des3_ede))", 3053 .test = alg_test_aead, 3054 .fips_allowed = 1, 3055 .suite = { 3056 .aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp) 3057 } 3058 }, { 3059 .alg = "authenc(hmac(sha256),cbc(aes))", 3060 .test = alg_test_aead, 3061 .fips_allowed = 1, 3062 .suite = { 3063 .aead = __VECS(hmac_sha256_aes_cbc_tv_temp) 3064 } 3065 }, { 3066 .alg = "authenc(hmac(sha256),cbc(des))", 3067 .test = alg_test_aead, 3068 .suite = { 3069 .aead = __VECS(hmac_sha256_des_cbc_tv_temp) 3070 } 3071 }, { 3072 .alg = "authenc(hmac(sha256),cbc(des3_ede))", 3073 .test = alg_test_aead, 3074 .fips_allowed = 1, 3075 .suite = { 3076 .aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp) 3077 } 3078 }, { 3079 .alg = "authenc(hmac(sha256),ctr(aes))", 3080 .test = alg_test_null, 3081 .fips_allowed = 1, 3082 }, { 3083 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))", 3084 .test = alg_test_null, 3085 .fips_allowed = 1, 3086 }, { 3087 .alg = "authenc(hmac(sha384),cbc(des))", 3088 .test = alg_test_aead, 3089 .suite = { 3090 .aead = __VECS(hmac_sha384_des_cbc_tv_temp) 3091 } 3092 }, { 3093 .alg = "authenc(hmac(sha384),cbc(des3_ede))", 3094 .test = alg_test_aead, 3095 .fips_allowed = 1, 3096 .suite = { 3097 .aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp) 3098 } 3099 }, { 3100 .alg = "authenc(hmac(sha384),ctr(aes))", 3101 .test = alg_test_null, 3102 .fips_allowed = 1, 3103 }, { 3104 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))", 3105 .test = alg_test_null, 3106 .fips_allowed = 1, 3107 }, { 3108 .alg = "authenc(hmac(sha512),cbc(aes))", 3109 .fips_allowed = 1, 3110 .test = alg_test_aead, 3111 .suite = { 3112 .aead = __VECS(hmac_sha512_aes_cbc_tv_temp) 3113 } 3114 }, { 3115 .alg = "authenc(hmac(sha512),cbc(des))", 3116 .test = alg_test_aead, 3117 .suite = { 3118 .aead = __VECS(hmac_sha512_des_cbc_tv_temp) 3119 } 3120 }, { 3121 .alg = "authenc(hmac(sha512),cbc(des3_ede))", 3122 .test = alg_test_aead, 3123 .fips_allowed = 1, 3124 .suite = { 3125 .aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp) 3126 } 3127 }, { 3128 .alg = "authenc(hmac(sha512),ctr(aes))", 3129 .test = alg_test_null, 3130 .fips_allowed = 1, 3131 }, { 3132 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))", 3133 .test = alg_test_null, 3134 .fips_allowed = 1, 3135 }, { 3136 .alg = "cbc(aes)", 3137 .test = alg_test_skcipher, 3138 .fips_allowed = 1, 3139 .suite = { 3140 .cipher = __VECS(aes_cbc_tv_template) 3141 }, 3142 }, { 3143 .alg = "cbc(anubis)", 3144 .test = alg_test_skcipher, 3145 .suite = { 3146 .cipher = __VECS(anubis_cbc_tv_template) 3147 }, 3148 }, { 3149 .alg = "cbc(blowfish)", 3150 .test = alg_test_skcipher, 3151 .suite = { 3152 .cipher = __VECS(bf_cbc_tv_template) 3153 }, 3154 }, { 3155 .alg = "cbc(camellia)", 3156 .test = alg_test_skcipher, 3157 .suite = { 3158 .cipher = __VECS(camellia_cbc_tv_template) 3159 }, 3160 }, { 3161 .alg = "cbc(cast5)", 3162 .test = alg_test_skcipher, 3163 .suite = { 3164 .cipher = __VECS(cast5_cbc_tv_template) 3165 }, 3166 }, { 3167 .alg = "cbc(cast6)", 3168 .test = alg_test_skcipher, 3169 .suite = { 3170 .cipher = __VECS(cast6_cbc_tv_template) 3171 }, 3172 }, { 3173 .alg = "cbc(des)", 3174 .test = alg_test_skcipher, 3175 .suite = { 3176 .cipher = __VECS(des_cbc_tv_template) 3177 }, 3178 }, { 3179 .alg = "cbc(des3_ede)", 3180 .test = alg_test_skcipher, 3181 .fips_allowed = 1, 3182 .suite = { 3183 .cipher = __VECS(des3_ede_cbc_tv_template) 3184 }, 3185 }, { 3186 /* Same as cbc(aes) except the key is stored in 3187 * hardware secure memory which we reference by index 3188 */ 3189 .alg = "cbc(paes)", 3190 .test = alg_test_null, 3191 .fips_allowed = 1, 3192 }, { 3193 .alg = "cbc(serpent)", 3194 .test = alg_test_skcipher, 3195 .suite = { 3196 .cipher = __VECS(serpent_cbc_tv_template) 3197 }, 3198 }, { 3199 .alg = "cbc(sm4)", 3200 .test = alg_test_skcipher, 3201 .suite = { 3202 .cipher = __VECS(sm4_cbc_tv_template) 3203 } 3204 }, { 3205 .alg = "cbc(twofish)", 3206 .test = alg_test_skcipher, 3207 .suite = { 3208 .cipher = __VECS(tf_cbc_tv_template) 3209 }, 3210 }, { 3211 .alg = "cbcmac(aes)", 3212 .fips_allowed = 1, 3213 .test = alg_test_hash, 3214 .suite = { 3215 .hash = __VECS(aes_cbcmac_tv_template) 3216 } 3217 }, { 3218 .alg = "ccm(aes)", 3219 .test = alg_test_aead, 3220 .fips_allowed = 1, 3221 .suite = { 3222 .aead = __VECS(aes_ccm_tv_template) 3223 } 3224 }, { 3225 .alg = "cfb(aes)", 3226 .test = alg_test_skcipher, 3227 .fips_allowed = 1, 3228 .suite = { 3229 .cipher = __VECS(aes_cfb_tv_template) 3230 }, 3231 }, { 3232 .alg = "chacha20", 3233 .test = alg_test_skcipher, 3234 .suite = { 3235 .cipher = __VECS(chacha20_tv_template) 3236 }, 3237 }, { 3238 .alg = "cmac(aes)", 3239 .fips_allowed = 1, 3240 .test = alg_test_hash, 3241 .suite = { 3242 .hash = __VECS(aes_cmac128_tv_template) 3243 } 3244 }, { 3245 .alg = "cmac(des3_ede)", 3246 .fips_allowed = 1, 3247 .test = alg_test_hash, 3248 .suite = { 3249 .hash = __VECS(des3_ede_cmac64_tv_template) 3250 } 3251 }, { 3252 .alg = "compress_null", 3253 .test = alg_test_null, 3254 }, { 3255 .alg = "crc32", 3256 .test = alg_test_hash, 3257 .fips_allowed = 1, 3258 .suite = { 3259 .hash = __VECS(crc32_tv_template) 3260 } 3261 }, { 3262 .alg = "crc32c", 3263 .test = alg_test_crc32c, 3264 .fips_allowed = 1, 3265 .suite = { 3266 .hash = __VECS(crc32c_tv_template) 3267 } 3268 }, { 3269 .alg = "crct10dif", 3270 .test = alg_test_hash, 3271 .fips_allowed = 1, 3272 .suite = { 3273 .hash = __VECS(crct10dif_tv_template) 3274 } 3275 }, { 3276 .alg = "ctr(aes)", 3277 .test = alg_test_skcipher, 3278 .fips_allowed = 1, 3279 .suite = { 3280 .cipher = __VECS(aes_ctr_tv_template) 3281 } 3282 }, { 3283 .alg = "ctr(blowfish)", 3284 .test = alg_test_skcipher, 3285 .suite = { 3286 .cipher = __VECS(bf_ctr_tv_template) 3287 } 3288 }, { 3289 .alg = "ctr(camellia)", 3290 .test = alg_test_skcipher, 3291 .suite = { 3292 .cipher = __VECS(camellia_ctr_tv_template) 3293 } 3294 }, { 3295 .alg = "ctr(cast5)", 3296 .test = alg_test_skcipher, 3297 .suite = { 3298 .cipher = __VECS(cast5_ctr_tv_template) 3299 } 3300 }, { 3301 .alg = "ctr(cast6)", 3302 .test = alg_test_skcipher, 3303 .suite = { 3304 .cipher = __VECS(cast6_ctr_tv_template) 3305 } 3306 }, { 3307 .alg = "ctr(des)", 3308 .test = alg_test_skcipher, 3309 .suite = { 3310 .cipher = __VECS(des_ctr_tv_template) 3311 } 3312 }, { 3313 .alg = "ctr(des3_ede)", 3314 .test = alg_test_skcipher, 3315 .fips_allowed = 1, 3316 .suite = { 3317 .cipher = __VECS(des3_ede_ctr_tv_template) 3318 } 3319 }, { 3320 /* Same as ctr(aes) except the key is stored in 3321 * hardware secure memory which we reference by index 3322 */ 3323 .alg = "ctr(paes)", 3324 .test = alg_test_null, 3325 .fips_allowed = 1, 3326 }, { 3327 .alg = "ctr(serpent)", 3328 .test = alg_test_skcipher, 3329 .suite = { 3330 .cipher = __VECS(serpent_ctr_tv_template) 3331 } 3332 }, { 3333 .alg = "ctr(sm4)", 3334 .test = alg_test_skcipher, 3335 .suite = { 3336 .cipher = __VECS(sm4_ctr_tv_template) 3337 } 3338 }, { 3339 .alg = "ctr(twofish)", 3340 .test = alg_test_skcipher, 3341 .suite = { 3342 .cipher = __VECS(tf_ctr_tv_template) 3343 } 3344 }, { 3345 .alg = "cts(cbc(aes))", 3346 .test = alg_test_skcipher, 3347 .fips_allowed = 1, 3348 .suite = { 3349 .cipher = __VECS(cts_mode_tv_template) 3350 } 3351 }, { 3352 .alg = "deflate", 3353 .test = alg_test_comp, 3354 .fips_allowed = 1, 3355 .suite = { 3356 .comp = { 3357 .comp = __VECS(deflate_comp_tv_template), 3358 .decomp = __VECS(deflate_decomp_tv_template) 3359 } 3360 } 3361 }, { 3362 .alg = "dh", 3363 .test = alg_test_kpp, 3364 .fips_allowed = 1, 3365 .suite = { 3366 .kpp = __VECS(dh_tv_template) 3367 } 3368 }, { 3369 .alg = "digest_null", 3370 .test = alg_test_null, 3371 }, { 3372 .alg = "drbg_nopr_ctr_aes128", 3373 .test = alg_test_drbg, 3374 .fips_allowed = 1, 3375 .suite = { 3376 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template) 3377 } 3378 }, { 3379 .alg = "drbg_nopr_ctr_aes192", 3380 .test = alg_test_drbg, 3381 .fips_allowed = 1, 3382 .suite = { 3383 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template) 3384 } 3385 }, { 3386 .alg = "drbg_nopr_ctr_aes256", 3387 .test = alg_test_drbg, 3388 .fips_allowed = 1, 3389 .suite = { 3390 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template) 3391 } 3392 }, { 3393 /* 3394 * There is no need to specifically test the DRBG with every 3395 * backend cipher -- covered by drbg_nopr_hmac_sha256 test 3396 */ 3397 .alg = "drbg_nopr_hmac_sha1", 3398 .fips_allowed = 1, 3399 .test = alg_test_null, 3400 }, { 3401 .alg = "drbg_nopr_hmac_sha256", 3402 .test = alg_test_drbg, 3403 .fips_allowed = 1, 3404 .suite = { 3405 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template) 3406 } 3407 }, { 3408 /* covered by drbg_nopr_hmac_sha256 test */ 3409 .alg = "drbg_nopr_hmac_sha384", 3410 .fips_allowed = 1, 3411 .test = alg_test_null, 3412 }, { 3413 .alg = "drbg_nopr_hmac_sha512", 3414 .test = alg_test_null, 3415 .fips_allowed = 1, 3416 }, { 3417 .alg = "drbg_nopr_sha1", 3418 .fips_allowed = 1, 3419 .test = alg_test_null, 3420 }, { 3421 .alg = "drbg_nopr_sha256", 3422 .test = alg_test_drbg, 3423 .fips_allowed = 1, 3424 .suite = { 3425 .drbg = __VECS(drbg_nopr_sha256_tv_template) 3426 } 3427 }, { 3428 /* covered by drbg_nopr_sha256 test */ 3429 .alg = "drbg_nopr_sha384", 3430 .fips_allowed = 1, 3431 .test = alg_test_null, 3432 }, { 3433 .alg = "drbg_nopr_sha512", 3434 .fips_allowed = 1, 3435 .test = alg_test_null, 3436 }, { 3437 .alg = "drbg_pr_ctr_aes128", 3438 .test = alg_test_drbg, 3439 .fips_allowed = 1, 3440 .suite = { 3441 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template) 3442 } 3443 }, { 3444 /* covered by drbg_pr_ctr_aes128 test */ 3445 .alg = "drbg_pr_ctr_aes192", 3446 .fips_allowed = 1, 3447 .test = alg_test_null, 3448 }, { 3449 .alg = "drbg_pr_ctr_aes256", 3450 .fips_allowed = 1, 3451 .test = alg_test_null, 3452 }, { 3453 .alg = "drbg_pr_hmac_sha1", 3454 .fips_allowed = 1, 3455 .test = alg_test_null, 3456 }, { 3457 .alg = "drbg_pr_hmac_sha256", 3458 .test = alg_test_drbg, 3459 .fips_allowed = 1, 3460 .suite = { 3461 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template) 3462 } 3463 }, { 3464 /* covered by drbg_pr_hmac_sha256 test */ 3465 .alg = "drbg_pr_hmac_sha384", 3466 .fips_allowed = 1, 3467 .test = alg_test_null, 3468 }, { 3469 .alg = "drbg_pr_hmac_sha512", 3470 .test = alg_test_null, 3471 .fips_allowed = 1, 3472 }, { 3473 .alg = "drbg_pr_sha1", 3474 .fips_allowed = 1, 3475 .test = alg_test_null, 3476 }, { 3477 .alg = "drbg_pr_sha256", 3478 .test = alg_test_drbg, 3479 .fips_allowed = 1, 3480 .suite = { 3481 .drbg = __VECS(drbg_pr_sha256_tv_template) 3482 } 3483 }, { 3484 /* covered by drbg_pr_sha256 test */ 3485 .alg = "drbg_pr_sha384", 3486 .fips_allowed = 1, 3487 .test = alg_test_null, 3488 }, { 3489 .alg = "drbg_pr_sha512", 3490 .fips_allowed = 1, 3491 .test = alg_test_null, 3492 }, { 3493 .alg = "ecb(aes)", 3494 .test = alg_test_skcipher, 3495 .fips_allowed = 1, 3496 .suite = { 3497 .cipher = __VECS(aes_tv_template) 3498 } 3499 }, { 3500 .alg = "ecb(anubis)", 3501 .test = alg_test_skcipher, 3502 .suite = { 3503 .cipher = __VECS(anubis_tv_template) 3504 } 3505 }, { 3506 .alg = "ecb(arc4)", 3507 .test = alg_test_skcipher, 3508 .suite = { 3509 .cipher = __VECS(arc4_tv_template) 3510 } 3511 }, { 3512 .alg = "ecb(blowfish)", 3513 .test = alg_test_skcipher, 3514 .suite = { 3515 .cipher = __VECS(bf_tv_template) 3516 } 3517 }, { 3518 .alg = "ecb(camellia)", 3519 .test = alg_test_skcipher, 3520 .suite = { 3521 .cipher = __VECS(camellia_tv_template) 3522 } 3523 }, { 3524 .alg = "ecb(cast5)", 3525 .test = alg_test_skcipher, 3526 .suite = { 3527 .cipher = __VECS(cast5_tv_template) 3528 } 3529 }, { 3530 .alg = "ecb(cast6)", 3531 .test = alg_test_skcipher, 3532 .suite = { 3533 .cipher = __VECS(cast6_tv_template) 3534 } 3535 }, { 3536 .alg = "ecb(cipher_null)", 3537 .test = alg_test_null, 3538 .fips_allowed = 1, 3539 }, { 3540 .alg = "ecb(des)", 3541 .test = alg_test_skcipher, 3542 .suite = { 3543 .cipher = __VECS(des_tv_template) 3544 } 3545 }, { 3546 .alg = "ecb(des3_ede)", 3547 .test = alg_test_skcipher, 3548 .fips_allowed = 1, 3549 .suite = { 3550 .cipher = __VECS(des3_ede_tv_template) 3551 } 3552 }, { 3553 .alg = "ecb(fcrypt)", 3554 .test = alg_test_skcipher, 3555 .suite = { 3556 .cipher = { 3557 .vecs = fcrypt_pcbc_tv_template, 3558 .count = 1 3559 } 3560 } 3561 }, { 3562 .alg = "ecb(khazad)", 3563 .test = alg_test_skcipher, 3564 .suite = { 3565 .cipher = __VECS(khazad_tv_template) 3566 } 3567 }, { 3568 /* Same as ecb(aes) except the key is stored in 3569 * hardware secure memory which we reference by index 3570 */ 3571 .alg = "ecb(paes)", 3572 .test = alg_test_null, 3573 .fips_allowed = 1, 3574 }, { 3575 .alg = "ecb(seed)", 3576 .test = alg_test_skcipher, 3577 .suite = { 3578 .cipher = __VECS(seed_tv_template) 3579 } 3580 }, { 3581 .alg = "ecb(serpent)", 3582 .test = alg_test_skcipher, 3583 .suite = { 3584 .cipher = __VECS(serpent_tv_template) 3585 } 3586 }, { 3587 .alg = "ecb(sm4)", 3588 .test = alg_test_skcipher, 3589 .suite = { 3590 .cipher = __VECS(sm4_tv_template) 3591 } 3592 }, { 3593 .alg = "ecb(tea)", 3594 .test = alg_test_skcipher, 3595 .suite = { 3596 .cipher = __VECS(tea_tv_template) 3597 } 3598 }, { 3599 .alg = "ecb(tnepres)", 3600 .test = alg_test_skcipher, 3601 .suite = { 3602 .cipher = __VECS(tnepres_tv_template) 3603 } 3604 }, { 3605 .alg = "ecb(twofish)", 3606 .test = alg_test_skcipher, 3607 .suite = { 3608 .cipher = __VECS(tf_tv_template) 3609 } 3610 }, { 3611 .alg = "ecb(xeta)", 3612 .test = alg_test_skcipher, 3613 .suite = { 3614 .cipher = __VECS(xeta_tv_template) 3615 } 3616 }, { 3617 .alg = "ecb(xtea)", 3618 .test = alg_test_skcipher, 3619 .suite = { 3620 .cipher = __VECS(xtea_tv_template) 3621 } 3622 }, { 3623 .alg = "ecdh", 3624 .test = alg_test_kpp, 3625 .fips_allowed = 1, 3626 .suite = { 3627 .kpp = __VECS(ecdh_tv_template) 3628 } 3629 }, { 3630 .alg = "gcm(aes)", 3631 .test = alg_test_aead, 3632 .fips_allowed = 1, 3633 .suite = { 3634 .aead = __VECS(aes_gcm_tv_template) 3635 } 3636 }, { 3637 .alg = "ghash", 3638 .test = alg_test_hash, 3639 .fips_allowed = 1, 3640 .suite = { 3641 .hash = __VECS(ghash_tv_template) 3642 } 3643 }, { 3644 .alg = "hmac(md5)", 3645 .test = alg_test_hash, 3646 .suite = { 3647 .hash = __VECS(hmac_md5_tv_template) 3648 } 3649 }, { 3650 .alg = "hmac(rmd128)", 3651 .test = alg_test_hash, 3652 .suite = { 3653 .hash = __VECS(hmac_rmd128_tv_template) 3654 } 3655 }, { 3656 .alg = "hmac(rmd160)", 3657 .test = alg_test_hash, 3658 .suite = { 3659 .hash = __VECS(hmac_rmd160_tv_template) 3660 } 3661 }, { 3662 .alg = "hmac(sha1)", 3663 .test = alg_test_hash, 3664 .fips_allowed = 1, 3665 .suite = { 3666 .hash = __VECS(hmac_sha1_tv_template) 3667 } 3668 }, { 3669 .alg = "hmac(sha224)", 3670 .test = alg_test_hash, 3671 .fips_allowed = 1, 3672 .suite = { 3673 .hash = __VECS(hmac_sha224_tv_template) 3674 } 3675 }, { 3676 .alg = "hmac(sha256)", 3677 .test = alg_test_hash, 3678 .fips_allowed = 1, 3679 .suite = { 3680 .hash = __VECS(hmac_sha256_tv_template) 3681 } 3682 }, { 3683 .alg = "hmac(sha3-224)", 3684 .test = alg_test_hash, 3685 .fips_allowed = 1, 3686 .suite = { 3687 .hash = __VECS(hmac_sha3_224_tv_template) 3688 } 3689 }, { 3690 .alg = "hmac(sha3-256)", 3691 .test = alg_test_hash, 3692 .fips_allowed = 1, 3693 .suite = { 3694 .hash = __VECS(hmac_sha3_256_tv_template) 3695 } 3696 }, { 3697 .alg = "hmac(sha3-384)", 3698 .test = alg_test_hash, 3699 .fips_allowed = 1, 3700 .suite = { 3701 .hash = __VECS(hmac_sha3_384_tv_template) 3702 } 3703 }, { 3704 .alg = "hmac(sha3-512)", 3705 .test = alg_test_hash, 3706 .fips_allowed = 1, 3707 .suite = { 3708 .hash = __VECS(hmac_sha3_512_tv_template) 3709 } 3710 }, { 3711 .alg = "hmac(sha384)", 3712 .test = alg_test_hash, 3713 .fips_allowed = 1, 3714 .suite = { 3715 .hash = __VECS(hmac_sha384_tv_template) 3716 } 3717 }, { 3718 .alg = "hmac(sha512)", 3719 .test = alg_test_hash, 3720 .fips_allowed = 1, 3721 .suite = { 3722 .hash = __VECS(hmac_sha512_tv_template) 3723 } 3724 }, { 3725 .alg = "hmac(streebog256)", 3726 .test = alg_test_hash, 3727 .suite = { 3728 .hash = __VECS(hmac_streebog256_tv_template) 3729 } 3730 }, { 3731 .alg = "hmac(streebog512)", 3732 .test = alg_test_hash, 3733 .suite = { 3734 .hash = __VECS(hmac_streebog512_tv_template) 3735 } 3736 }, { 3737 .alg = "jitterentropy_rng", 3738 .fips_allowed = 1, 3739 .test = alg_test_null, 3740 }, { 3741 .alg = "kw(aes)", 3742 .test = alg_test_skcipher, 3743 .fips_allowed = 1, 3744 .suite = { 3745 .cipher = __VECS(aes_kw_tv_template) 3746 } 3747 }, { 3748 .alg = "lrw(aes)", 3749 .test = alg_test_skcipher, 3750 .suite = { 3751 .cipher = __VECS(aes_lrw_tv_template) 3752 } 3753 }, { 3754 .alg = "lrw(camellia)", 3755 .test = alg_test_skcipher, 3756 .suite = { 3757 .cipher = __VECS(camellia_lrw_tv_template) 3758 } 3759 }, { 3760 .alg = "lrw(cast6)", 3761 .test = alg_test_skcipher, 3762 .suite = { 3763 .cipher = __VECS(cast6_lrw_tv_template) 3764 } 3765 }, { 3766 .alg = "lrw(serpent)", 3767 .test = alg_test_skcipher, 3768 .suite = { 3769 .cipher = __VECS(serpent_lrw_tv_template) 3770 } 3771 }, { 3772 .alg = "lrw(twofish)", 3773 .test = alg_test_skcipher, 3774 .suite = { 3775 .cipher = __VECS(tf_lrw_tv_template) 3776 } 3777 }, { 3778 .alg = "lz4", 3779 .test = alg_test_comp, 3780 .fips_allowed = 1, 3781 .suite = { 3782 .comp = { 3783 .comp = __VECS(lz4_comp_tv_template), 3784 .decomp = __VECS(lz4_decomp_tv_template) 3785 } 3786 } 3787 }, { 3788 .alg = "lz4hc", 3789 .test = alg_test_comp, 3790 .fips_allowed = 1, 3791 .suite = { 3792 .comp = { 3793 .comp = __VECS(lz4hc_comp_tv_template), 3794 .decomp = __VECS(lz4hc_decomp_tv_template) 3795 } 3796 } 3797 }, { 3798 .alg = "lzo", 3799 .test = alg_test_comp, 3800 .fips_allowed = 1, 3801 .suite = { 3802 .comp = { 3803 .comp = __VECS(lzo_comp_tv_template), 3804 .decomp = __VECS(lzo_decomp_tv_template) 3805 } 3806 } 3807 }, { 3808 .alg = "md4", 3809 .test = alg_test_hash, 3810 .suite = { 3811 .hash = __VECS(md4_tv_template) 3812 } 3813 }, { 3814 .alg = "md5", 3815 .test = alg_test_hash, 3816 .suite = { 3817 .hash = __VECS(md5_tv_template) 3818 } 3819 }, { 3820 .alg = "michael_mic", 3821 .test = alg_test_hash, 3822 .suite = { 3823 .hash = __VECS(michael_mic_tv_template) 3824 } 3825 }, { 3826 .alg = "morus1280", 3827 .test = alg_test_aead, 3828 .suite = { 3829 .aead = __VECS(morus1280_tv_template) 3830 } 3831 }, { 3832 .alg = "morus640", 3833 .test = alg_test_aead, 3834 .suite = { 3835 .aead = __VECS(morus640_tv_template) 3836 } 3837 }, { 3838 .alg = "nhpoly1305", 3839 .test = alg_test_hash, 3840 .suite = { 3841 .hash = __VECS(nhpoly1305_tv_template) 3842 } 3843 }, { 3844 .alg = "ofb(aes)", 3845 .test = alg_test_skcipher, 3846 .fips_allowed = 1, 3847 .suite = { 3848 .cipher = __VECS(aes_ofb_tv_template) 3849 } 3850 }, { 3851 /* Same as ofb(aes) except the key is stored in 3852 * hardware secure memory which we reference by index 3853 */ 3854 .alg = "ofb(paes)", 3855 .test = alg_test_null, 3856 .fips_allowed = 1, 3857 }, { 3858 .alg = "pcbc(fcrypt)", 3859 .test = alg_test_skcipher, 3860 .suite = { 3861 .cipher = __VECS(fcrypt_pcbc_tv_template) 3862 } 3863 }, { 3864 .alg = "pkcs1pad(rsa,sha224)", 3865 .test = alg_test_null, 3866 .fips_allowed = 1, 3867 }, { 3868 .alg = "pkcs1pad(rsa,sha256)", 3869 .test = alg_test_akcipher, 3870 .fips_allowed = 1, 3871 .suite = { 3872 .akcipher = __VECS(pkcs1pad_rsa_tv_template) 3873 } 3874 }, { 3875 .alg = "pkcs1pad(rsa,sha384)", 3876 .test = alg_test_null, 3877 .fips_allowed = 1, 3878 }, { 3879 .alg = "pkcs1pad(rsa,sha512)", 3880 .test = alg_test_null, 3881 .fips_allowed = 1, 3882 }, { 3883 .alg = "poly1305", 3884 .test = alg_test_hash, 3885 .suite = { 3886 .hash = __VECS(poly1305_tv_template) 3887 } 3888 }, { 3889 .alg = "rfc3686(ctr(aes))", 3890 .test = alg_test_skcipher, 3891 .fips_allowed = 1, 3892 .suite = { 3893 .cipher = __VECS(aes_ctr_rfc3686_tv_template) 3894 } 3895 }, { 3896 .alg = "rfc4106(gcm(aes))", 3897 .test = alg_test_aead, 3898 .fips_allowed = 1, 3899 .suite = { 3900 .aead = __VECS(aes_gcm_rfc4106_tv_template) 3901 } 3902 }, { 3903 .alg = "rfc4309(ccm(aes))", 3904 .test = alg_test_aead, 3905 .fips_allowed = 1, 3906 .suite = { 3907 .aead = __VECS(aes_ccm_rfc4309_tv_template) 3908 } 3909 }, { 3910 .alg = "rfc4543(gcm(aes))", 3911 .test = alg_test_aead, 3912 .suite = { 3913 .aead = __VECS(aes_gcm_rfc4543_tv_template) 3914 } 3915 }, { 3916 .alg = "rfc7539(chacha20,poly1305)", 3917 .test = alg_test_aead, 3918 .suite = { 3919 .aead = __VECS(rfc7539_tv_template) 3920 } 3921 }, { 3922 .alg = "rfc7539esp(chacha20,poly1305)", 3923 .test = alg_test_aead, 3924 .suite = { 3925 .aead = __VECS(rfc7539esp_tv_template) 3926 } 3927 }, { 3928 .alg = "rmd128", 3929 .test = alg_test_hash, 3930 .suite = { 3931 .hash = __VECS(rmd128_tv_template) 3932 } 3933 }, { 3934 .alg = "rmd160", 3935 .test = alg_test_hash, 3936 .suite = { 3937 .hash = __VECS(rmd160_tv_template) 3938 } 3939 }, { 3940 .alg = "rmd256", 3941 .test = alg_test_hash, 3942 .suite = { 3943 .hash = __VECS(rmd256_tv_template) 3944 } 3945 }, { 3946 .alg = "rmd320", 3947 .test = alg_test_hash, 3948 .suite = { 3949 .hash = __VECS(rmd320_tv_template) 3950 } 3951 }, { 3952 .alg = "rsa", 3953 .test = alg_test_akcipher, 3954 .fips_allowed = 1, 3955 .suite = { 3956 .akcipher = __VECS(rsa_tv_template) 3957 } 3958 }, { 3959 .alg = "salsa20", 3960 .test = alg_test_skcipher, 3961 .suite = { 3962 .cipher = __VECS(salsa20_stream_tv_template) 3963 } 3964 }, { 3965 .alg = "sha1", 3966 .test = alg_test_hash, 3967 .fips_allowed = 1, 3968 .suite = { 3969 .hash = __VECS(sha1_tv_template) 3970 } 3971 }, { 3972 .alg = "sha224", 3973 .test = alg_test_hash, 3974 .fips_allowed = 1, 3975 .suite = { 3976 .hash = __VECS(sha224_tv_template) 3977 } 3978 }, { 3979 .alg = "sha256", 3980 .test = alg_test_hash, 3981 .fips_allowed = 1, 3982 .suite = { 3983 .hash = __VECS(sha256_tv_template) 3984 } 3985 }, { 3986 .alg = "sha3-224", 3987 .test = alg_test_hash, 3988 .fips_allowed = 1, 3989 .suite = { 3990 .hash = __VECS(sha3_224_tv_template) 3991 } 3992 }, { 3993 .alg = "sha3-256", 3994 .test = alg_test_hash, 3995 .fips_allowed = 1, 3996 .suite = { 3997 .hash = __VECS(sha3_256_tv_template) 3998 } 3999 }, { 4000 .alg = "sha3-384", 4001 .test = alg_test_hash, 4002 .fips_allowed = 1, 4003 .suite = { 4004 .hash = __VECS(sha3_384_tv_template) 4005 } 4006 }, { 4007 .alg = "sha3-512", 4008 .test = alg_test_hash, 4009 .fips_allowed = 1, 4010 .suite = { 4011 .hash = __VECS(sha3_512_tv_template) 4012 } 4013 }, { 4014 .alg = "sha384", 4015 .test = alg_test_hash, 4016 .fips_allowed = 1, 4017 .suite = { 4018 .hash = __VECS(sha384_tv_template) 4019 } 4020 }, { 4021 .alg = "sha512", 4022 .test = alg_test_hash, 4023 .fips_allowed = 1, 4024 .suite = { 4025 .hash = __VECS(sha512_tv_template) 4026 } 4027 }, { 4028 .alg = "sm3", 4029 .test = alg_test_hash, 4030 .suite = { 4031 .hash = __VECS(sm3_tv_template) 4032 } 4033 }, { 4034 .alg = "streebog256", 4035 .test = alg_test_hash, 4036 .suite = { 4037 .hash = __VECS(streebog256_tv_template) 4038 } 4039 }, { 4040 .alg = "streebog512", 4041 .test = alg_test_hash, 4042 .suite = { 4043 .hash = __VECS(streebog512_tv_template) 4044 } 4045 }, { 4046 .alg = "tgr128", 4047 .test = alg_test_hash, 4048 .suite = { 4049 .hash = __VECS(tgr128_tv_template) 4050 } 4051 }, { 4052 .alg = "tgr160", 4053 .test = alg_test_hash, 4054 .suite = { 4055 .hash = __VECS(tgr160_tv_template) 4056 } 4057 }, { 4058 .alg = "tgr192", 4059 .test = alg_test_hash, 4060 .suite = { 4061 .hash = __VECS(tgr192_tv_template) 4062 } 4063 }, { 4064 .alg = "vmac64(aes)", 4065 .test = alg_test_hash, 4066 .suite = { 4067 .hash = __VECS(vmac64_aes_tv_template) 4068 } 4069 }, { 4070 .alg = "wp256", 4071 .test = alg_test_hash, 4072 .suite = { 4073 .hash = __VECS(wp256_tv_template) 4074 } 4075 }, { 4076 .alg = "wp384", 4077 .test = alg_test_hash, 4078 .suite = { 4079 .hash = __VECS(wp384_tv_template) 4080 } 4081 }, { 4082 .alg = "wp512", 4083 .test = alg_test_hash, 4084 .suite = { 4085 .hash = __VECS(wp512_tv_template) 4086 } 4087 }, { 4088 .alg = "xcbc(aes)", 4089 .test = alg_test_hash, 4090 .suite = { 4091 .hash = __VECS(aes_xcbc128_tv_template) 4092 } 4093 }, { 4094 .alg = "xchacha12", 4095 .test = alg_test_skcipher, 4096 .suite = { 4097 .cipher = __VECS(xchacha12_tv_template) 4098 }, 4099 }, { 4100 .alg = "xchacha20", 4101 .test = alg_test_skcipher, 4102 .suite = { 4103 .cipher = __VECS(xchacha20_tv_template) 4104 }, 4105 }, { 4106 .alg = "xts(aes)", 4107 .test = alg_test_skcipher, 4108 .fips_allowed = 1, 4109 .suite = { 4110 .cipher = __VECS(aes_xts_tv_template) 4111 } 4112 }, { 4113 .alg = "xts(camellia)", 4114 .test = alg_test_skcipher, 4115 .suite = { 4116 .cipher = __VECS(camellia_xts_tv_template) 4117 } 4118 }, { 4119 .alg = "xts(cast6)", 4120 .test = alg_test_skcipher, 4121 .suite = { 4122 .cipher = __VECS(cast6_xts_tv_template) 4123 } 4124 }, { 4125 /* Same as xts(aes) except the key is stored in 4126 * hardware secure memory which we reference by index 4127 */ 4128 .alg = "xts(paes)", 4129 .test = alg_test_null, 4130 .fips_allowed = 1, 4131 }, { 4132 .alg = "xts(serpent)", 4133 .test = alg_test_skcipher, 4134 .suite = { 4135 .cipher = __VECS(serpent_xts_tv_template) 4136 } 4137 }, { 4138 .alg = "xts(twofish)", 4139 .test = alg_test_skcipher, 4140 .suite = { 4141 .cipher = __VECS(tf_xts_tv_template) 4142 } 4143 }, { 4144 .alg = "xts4096(paes)", 4145 .test = alg_test_null, 4146 .fips_allowed = 1, 4147 }, { 4148 .alg = "xts512(paes)", 4149 .test = alg_test_null, 4150 .fips_allowed = 1, 4151 }, { 4152 .alg = "zlib-deflate", 4153 .test = alg_test_comp, 4154 .fips_allowed = 1, 4155 .suite = { 4156 .comp = { 4157 .comp = __VECS(zlib_deflate_comp_tv_template), 4158 .decomp = __VECS(zlib_deflate_decomp_tv_template) 4159 } 4160 } 4161 }, { 4162 .alg = "zstd", 4163 .test = alg_test_comp, 4164 .fips_allowed = 1, 4165 .suite = { 4166 .comp = { 4167 .comp = __VECS(zstd_comp_tv_template), 4168 .decomp = __VECS(zstd_decomp_tv_template) 4169 } 4170 } 4171 } 4172 }; 4173 4174 static void alg_check_test_descs_order(void) 4175 { 4176 int i; 4177 4178 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) { 4179 int diff = strcmp(alg_test_descs[i - 1].alg, 4180 alg_test_descs[i].alg); 4181 4182 if (WARN_ON(diff > 0)) { 4183 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n", 4184 alg_test_descs[i - 1].alg, 4185 alg_test_descs[i].alg); 4186 } 4187 4188 if (WARN_ON(diff == 0)) { 4189 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n", 4190 alg_test_descs[i].alg); 4191 } 4192 } 4193 } 4194 4195 static void alg_check_testvec_configs(void) 4196 { 4197 int i; 4198 4199 for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) 4200 WARN_ON(!valid_testvec_config( 4201 &default_cipher_testvec_configs[i])); 4202 } 4203 4204 static void testmgr_onetime_init(void) 4205 { 4206 alg_check_test_descs_order(); 4207 alg_check_testvec_configs(); 4208 4209 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS 4210 pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n"); 4211 #endif 4212 } 4213 4214 static int alg_find_test(const char *alg) 4215 { 4216 int start = 0; 4217 int end = ARRAY_SIZE(alg_test_descs); 4218 4219 while (start < end) { 4220 int i = (start + end) / 2; 4221 int diff = strcmp(alg_test_descs[i].alg, alg); 4222 4223 if (diff > 0) { 4224 end = i; 4225 continue; 4226 } 4227 4228 if (diff < 0) { 4229 start = i + 1; 4230 continue; 4231 } 4232 4233 return i; 4234 } 4235 4236 return -1; 4237 } 4238 4239 int alg_test(const char *driver, const char *alg, u32 type, u32 mask) 4240 { 4241 int i; 4242 int j; 4243 int rc; 4244 4245 if (!fips_enabled && notests) { 4246 printk_once(KERN_INFO "alg: self-tests disabled\n"); 4247 return 0; 4248 } 4249 4250 DO_ONCE(testmgr_onetime_init); 4251 4252 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) { 4253 char nalg[CRYPTO_MAX_ALG_NAME]; 4254 4255 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >= 4256 sizeof(nalg)) 4257 return -ENAMETOOLONG; 4258 4259 i = alg_find_test(nalg); 4260 if (i < 0) 4261 goto notest; 4262 4263 if (fips_enabled && !alg_test_descs[i].fips_allowed) 4264 goto non_fips_alg; 4265 4266 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask); 4267 goto test_done; 4268 } 4269 4270 i = alg_find_test(alg); 4271 j = alg_find_test(driver); 4272 if (i < 0 && j < 0) 4273 goto notest; 4274 4275 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) || 4276 (j >= 0 && !alg_test_descs[j].fips_allowed))) 4277 goto non_fips_alg; 4278 4279 rc = 0; 4280 if (i >= 0) 4281 rc |= alg_test_descs[i].test(alg_test_descs + i, driver, 4282 type, mask); 4283 if (j >= 0 && j != i) 4284 rc |= alg_test_descs[j].test(alg_test_descs + j, driver, 4285 type, mask); 4286 4287 test_done: 4288 if (fips_enabled && rc) 4289 panic("%s: %s alg self test failed in fips mode!\n", driver, alg); 4290 4291 if (fips_enabled && !rc) 4292 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg); 4293 4294 return rc; 4295 4296 notest: 4297 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver); 4298 return 0; 4299 non_fips_alg: 4300 return -EINVAL; 4301 } 4302 4303 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */ 4304 4305 EXPORT_SYMBOL_GPL(alg_test); 4306