xref: /openbmc/linux/crypto/testmgr.c (revision d4fd6347)
1 /*
2  * Algorithm testing framework and tests.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6  * Copyright (c) 2007 Nokia Siemens Networks
7  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8  * Copyright (c) 2019 Google LLC
9  *
10  * Updated RFC4106 AES-GCM testing.
11  *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
12  *             Adrian Hoban <adrian.hoban@intel.com>
13  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
14  *             Tadeusz Struk (tadeusz.struk@intel.com)
15  *    Copyright (c) 2010, Intel Corporation.
16  *
17  * This program is free software; you can redistribute it and/or modify it
18  * under the terms of the GNU General Public License as published by the Free
19  * Software Foundation; either version 2 of the License, or (at your option)
20  * any later version.
21  *
22  */
23 
24 #include <crypto/aead.h>
25 #include <crypto/hash.h>
26 #include <crypto/skcipher.h>
27 #include <linux/err.h>
28 #include <linux/fips.h>
29 #include <linux/module.h>
30 #include <linux/once.h>
31 #include <linux/random.h>
32 #include <linux/scatterlist.h>
33 #include <linux/slab.h>
34 #include <linux/string.h>
35 #include <crypto/rng.h>
36 #include <crypto/drbg.h>
37 #include <crypto/akcipher.h>
38 #include <crypto/kpp.h>
39 #include <crypto/acompress.h>
40 #include <crypto/internal/simd.h>
41 
42 #include "internal.h"
43 
44 static bool notests;
45 module_param(notests, bool, 0644);
46 MODULE_PARM_DESC(notests, "disable crypto self-tests");
47 
48 static bool panic_on_fail;
49 module_param(panic_on_fail, bool, 0444);
50 
51 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
52 static bool noextratests;
53 module_param(noextratests, bool, 0644);
54 MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
55 
56 static unsigned int fuzz_iterations = 100;
57 module_param(fuzz_iterations, uint, 0644);
58 MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
59 
60 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
61 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
62 #endif
63 
64 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
65 
66 /* a perfect nop */
67 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
68 {
69 	return 0;
70 }
71 
72 #else
73 
74 #include "testmgr.h"
75 
76 /*
77  * Need slab memory for testing (size in number of pages).
78  */
79 #define XBUFSIZE	8
80 
81 /*
82 * Used by test_cipher()
83 */
84 #define ENCRYPT 1
85 #define DECRYPT 0
86 
87 struct aead_test_suite {
88 	const struct aead_testvec *vecs;
89 	unsigned int count;
90 };
91 
92 struct cipher_test_suite {
93 	const struct cipher_testvec *vecs;
94 	unsigned int count;
95 };
96 
97 struct comp_test_suite {
98 	struct {
99 		const struct comp_testvec *vecs;
100 		unsigned int count;
101 	} comp, decomp;
102 };
103 
104 struct hash_test_suite {
105 	const struct hash_testvec *vecs;
106 	unsigned int count;
107 };
108 
109 struct cprng_test_suite {
110 	const struct cprng_testvec *vecs;
111 	unsigned int count;
112 };
113 
114 struct drbg_test_suite {
115 	const struct drbg_testvec *vecs;
116 	unsigned int count;
117 };
118 
119 struct akcipher_test_suite {
120 	const struct akcipher_testvec *vecs;
121 	unsigned int count;
122 };
123 
124 struct kpp_test_suite {
125 	const struct kpp_testvec *vecs;
126 	unsigned int count;
127 };
128 
129 struct alg_test_desc {
130 	const char *alg;
131 	const char *generic_driver;
132 	int (*test)(const struct alg_test_desc *desc, const char *driver,
133 		    u32 type, u32 mask);
134 	int fips_allowed;	/* set if alg is allowed in fips mode */
135 
136 	union {
137 		struct aead_test_suite aead;
138 		struct cipher_test_suite cipher;
139 		struct comp_test_suite comp;
140 		struct hash_test_suite hash;
141 		struct cprng_test_suite cprng;
142 		struct drbg_test_suite drbg;
143 		struct akcipher_test_suite akcipher;
144 		struct kpp_test_suite kpp;
145 	} suite;
146 };
147 
148 static void hexdump(unsigned char *buf, unsigned int len)
149 {
150 	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
151 			16, 1,
152 			buf, len, false);
153 }
154 
155 static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
156 {
157 	int i;
158 
159 	for (i = 0; i < XBUFSIZE; i++) {
160 		buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
161 		if (!buf[i])
162 			goto err_free_buf;
163 	}
164 
165 	return 0;
166 
167 err_free_buf:
168 	while (i-- > 0)
169 		free_pages((unsigned long)buf[i], order);
170 
171 	return -ENOMEM;
172 }
173 
174 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
175 {
176 	return __testmgr_alloc_buf(buf, 0);
177 }
178 
179 static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
180 {
181 	int i;
182 
183 	for (i = 0; i < XBUFSIZE; i++)
184 		free_pages((unsigned long)buf[i], order);
185 }
186 
187 static void testmgr_free_buf(char *buf[XBUFSIZE])
188 {
189 	__testmgr_free_buf(buf, 0);
190 }
191 
192 #define TESTMGR_POISON_BYTE	0xfe
193 #define TESTMGR_POISON_LEN	16
194 
195 static inline void testmgr_poison(void *addr, size_t len)
196 {
197 	memset(addr, TESTMGR_POISON_BYTE, len);
198 }
199 
200 /* Is the memory region still fully poisoned? */
201 static inline bool testmgr_is_poison(const void *addr, size_t len)
202 {
203 	return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
204 }
205 
206 /* flush type for hash algorithms */
207 enum flush_type {
208 	/* merge with update of previous buffer(s) */
209 	FLUSH_TYPE_NONE = 0,
210 
211 	/* update with previous buffer(s) before doing this one */
212 	FLUSH_TYPE_FLUSH,
213 
214 	/* likewise, but also export and re-import the intermediate state */
215 	FLUSH_TYPE_REIMPORT,
216 };
217 
218 /* finalization function for hash algorithms */
219 enum finalization_type {
220 	FINALIZATION_TYPE_FINAL,	/* use final() */
221 	FINALIZATION_TYPE_FINUP,	/* use finup() */
222 	FINALIZATION_TYPE_DIGEST,	/* use digest() */
223 };
224 
225 #define TEST_SG_TOTAL	10000
226 
227 /**
228  * struct test_sg_division - description of a scatterlist entry
229  *
230  * This struct describes one entry of a scatterlist being constructed to check a
231  * crypto test vector.
232  *
233  * @proportion_of_total: length of this chunk relative to the total length,
234  *			 given as a proportion out of TEST_SG_TOTAL so that it
235  *			 scales to fit any test vector
236  * @offset: byte offset into a 2-page buffer at which this chunk will start
237  * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
238  *				  @offset
239  * @flush_type: for hashes, whether an update() should be done now vs.
240  *		continuing to accumulate data
241  * @nosimd: if doing the pending update(), do it with SIMD disabled?
242  */
243 struct test_sg_division {
244 	unsigned int proportion_of_total;
245 	unsigned int offset;
246 	bool offset_relative_to_alignmask;
247 	enum flush_type flush_type;
248 	bool nosimd;
249 };
250 
251 /**
252  * struct testvec_config - configuration for testing a crypto test vector
253  *
254  * This struct describes the data layout and other parameters with which each
255  * crypto test vector can be tested.
256  *
257  * @name: name of this config, logged for debugging purposes if a test fails
258  * @inplace: operate on the data in-place, if applicable for the algorithm type?
259  * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
260  * @src_divs: description of how to arrange the source scatterlist
261  * @dst_divs: description of how to arrange the dst scatterlist, if applicable
262  *	      for the algorithm type.  Defaults to @src_divs if unset.
263  * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
264  *	       where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
265  * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
266  *				     the @iv_offset
267  * @finalization_type: what finalization function to use for hashes
268  * @nosimd: execute with SIMD disabled?  Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
269  */
270 struct testvec_config {
271 	const char *name;
272 	bool inplace;
273 	u32 req_flags;
274 	struct test_sg_division src_divs[XBUFSIZE];
275 	struct test_sg_division dst_divs[XBUFSIZE];
276 	unsigned int iv_offset;
277 	bool iv_offset_relative_to_alignmask;
278 	enum finalization_type finalization_type;
279 	bool nosimd;
280 };
281 
282 #define TESTVEC_CONFIG_NAMELEN	192
283 
284 /*
285  * The following are the lists of testvec_configs to test for each algorithm
286  * type when the basic crypto self-tests are enabled, i.e. when
287  * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset.  They aim to provide good test
288  * coverage, while keeping the test time much shorter than the full fuzz tests
289  * so that the basic tests can be enabled in a wider range of circumstances.
290  */
291 
292 /* Configs for skciphers and aeads */
293 static const struct testvec_config default_cipher_testvec_configs[] = {
294 	{
295 		.name = "in-place",
296 		.inplace = true,
297 		.src_divs = { { .proportion_of_total = 10000 } },
298 	}, {
299 		.name = "out-of-place",
300 		.src_divs = { { .proportion_of_total = 10000 } },
301 	}, {
302 		.name = "unaligned buffer, offset=1",
303 		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
304 		.iv_offset = 1,
305 	}, {
306 		.name = "buffer aligned only to alignmask",
307 		.src_divs = {
308 			{
309 				.proportion_of_total = 10000,
310 				.offset = 1,
311 				.offset_relative_to_alignmask = true,
312 			},
313 		},
314 		.iv_offset = 1,
315 		.iv_offset_relative_to_alignmask = true,
316 	}, {
317 		.name = "two even aligned splits",
318 		.src_divs = {
319 			{ .proportion_of_total = 5000 },
320 			{ .proportion_of_total = 5000 },
321 		},
322 	}, {
323 		.name = "uneven misaligned splits, may sleep",
324 		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
325 		.src_divs = {
326 			{ .proportion_of_total = 1900, .offset = 33 },
327 			{ .proportion_of_total = 3300, .offset = 7  },
328 			{ .proportion_of_total = 4800, .offset = 18 },
329 		},
330 		.iv_offset = 3,
331 	}, {
332 		.name = "misaligned splits crossing pages, inplace",
333 		.inplace = true,
334 		.src_divs = {
335 			{
336 				.proportion_of_total = 7500,
337 				.offset = PAGE_SIZE - 32
338 			}, {
339 				.proportion_of_total = 2500,
340 				.offset = PAGE_SIZE - 7
341 			},
342 		},
343 	}
344 };
345 
346 static const struct testvec_config default_hash_testvec_configs[] = {
347 	{
348 		.name = "init+update+final aligned buffer",
349 		.src_divs = { { .proportion_of_total = 10000 } },
350 		.finalization_type = FINALIZATION_TYPE_FINAL,
351 	}, {
352 		.name = "init+finup aligned buffer",
353 		.src_divs = { { .proportion_of_total = 10000 } },
354 		.finalization_type = FINALIZATION_TYPE_FINUP,
355 	}, {
356 		.name = "digest aligned buffer",
357 		.src_divs = { { .proportion_of_total = 10000 } },
358 		.finalization_type = FINALIZATION_TYPE_DIGEST,
359 	}, {
360 		.name = "init+update+final misaligned buffer",
361 		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
362 		.finalization_type = FINALIZATION_TYPE_FINAL,
363 	}, {
364 		.name = "digest buffer aligned only to alignmask",
365 		.src_divs = {
366 			{
367 				.proportion_of_total = 10000,
368 				.offset = 1,
369 				.offset_relative_to_alignmask = true,
370 			},
371 		},
372 		.finalization_type = FINALIZATION_TYPE_DIGEST,
373 	}, {
374 		.name = "init+update+update+final two even splits",
375 		.src_divs = {
376 			{ .proportion_of_total = 5000 },
377 			{
378 				.proportion_of_total = 5000,
379 				.flush_type = FLUSH_TYPE_FLUSH,
380 			},
381 		},
382 		.finalization_type = FINALIZATION_TYPE_FINAL,
383 	}, {
384 		.name = "digest uneven misaligned splits, may sleep",
385 		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
386 		.src_divs = {
387 			{ .proportion_of_total = 1900, .offset = 33 },
388 			{ .proportion_of_total = 3300, .offset = 7  },
389 			{ .proportion_of_total = 4800, .offset = 18 },
390 		},
391 		.finalization_type = FINALIZATION_TYPE_DIGEST,
392 	}, {
393 		.name = "digest misaligned splits crossing pages",
394 		.src_divs = {
395 			{
396 				.proportion_of_total = 7500,
397 				.offset = PAGE_SIZE - 32,
398 			}, {
399 				.proportion_of_total = 2500,
400 				.offset = PAGE_SIZE - 7,
401 			},
402 		},
403 		.finalization_type = FINALIZATION_TYPE_DIGEST,
404 	}, {
405 		.name = "import/export",
406 		.src_divs = {
407 			{
408 				.proportion_of_total = 6500,
409 				.flush_type = FLUSH_TYPE_REIMPORT,
410 			}, {
411 				.proportion_of_total = 3500,
412 				.flush_type = FLUSH_TYPE_REIMPORT,
413 			},
414 		},
415 		.finalization_type = FINALIZATION_TYPE_FINAL,
416 	}
417 };
418 
419 static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
420 {
421 	unsigned int remaining = TEST_SG_TOTAL;
422 	unsigned int ndivs = 0;
423 
424 	do {
425 		remaining -= divs[ndivs++].proportion_of_total;
426 	} while (remaining);
427 
428 	return ndivs;
429 }
430 
431 #define SGDIVS_HAVE_FLUSHES	BIT(0)
432 #define SGDIVS_HAVE_NOSIMD	BIT(1)
433 
434 static bool valid_sg_divisions(const struct test_sg_division *divs,
435 			       unsigned int count, int *flags_ret)
436 {
437 	unsigned int total = 0;
438 	unsigned int i;
439 
440 	for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
441 		if (divs[i].proportion_of_total <= 0 ||
442 		    divs[i].proportion_of_total > TEST_SG_TOTAL - total)
443 			return false;
444 		total += divs[i].proportion_of_total;
445 		if (divs[i].flush_type != FLUSH_TYPE_NONE)
446 			*flags_ret |= SGDIVS_HAVE_FLUSHES;
447 		if (divs[i].nosimd)
448 			*flags_ret |= SGDIVS_HAVE_NOSIMD;
449 	}
450 	return total == TEST_SG_TOTAL &&
451 		memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
452 }
453 
454 /*
455  * Check whether the given testvec_config is valid.  This isn't strictly needed
456  * since every testvec_config should be valid, but check anyway so that people
457  * don't unknowingly add broken configs that don't do what they wanted.
458  */
459 static bool valid_testvec_config(const struct testvec_config *cfg)
460 {
461 	int flags = 0;
462 
463 	if (cfg->name == NULL)
464 		return false;
465 
466 	if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
467 				&flags))
468 		return false;
469 
470 	if (cfg->dst_divs[0].proportion_of_total) {
471 		if (!valid_sg_divisions(cfg->dst_divs,
472 					ARRAY_SIZE(cfg->dst_divs), &flags))
473 			return false;
474 	} else {
475 		if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
476 			return false;
477 		/* defaults to dst_divs=src_divs */
478 	}
479 
480 	if (cfg->iv_offset +
481 	    (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
482 	    MAX_ALGAPI_ALIGNMASK + 1)
483 		return false;
484 
485 	if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
486 	    cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
487 		return false;
488 
489 	if ((cfg->nosimd || (flags & SGDIVS_HAVE_NOSIMD)) &&
490 	    (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
491 		return false;
492 
493 	return true;
494 }
495 
496 struct test_sglist {
497 	char *bufs[XBUFSIZE];
498 	struct scatterlist sgl[XBUFSIZE];
499 	struct scatterlist sgl_saved[XBUFSIZE];
500 	struct scatterlist *sgl_ptr;
501 	unsigned int nents;
502 };
503 
504 static int init_test_sglist(struct test_sglist *tsgl)
505 {
506 	return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
507 }
508 
509 static void destroy_test_sglist(struct test_sglist *tsgl)
510 {
511 	return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
512 }
513 
514 /**
515  * build_test_sglist() - build a scatterlist for a crypto test
516  *
517  * @tsgl: the scatterlist to build.  @tsgl->bufs[] contains an array of 2-page
518  *	  buffers which the scatterlist @tsgl->sgl[] will be made to point into.
519  * @divs: the layout specification on which the scatterlist will be based
520  * @alignmask: the algorithm's alignmask
521  * @total_len: the total length of the scatterlist to build in bytes
522  * @data: if non-NULL, the buffers will be filled with this data until it ends.
523  *	  Otherwise the buffers will be poisoned.  In both cases, some bytes
524  *	  past the end of each buffer will be poisoned to help detect overruns.
525  * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
526  *	      corresponds will be returned here.  This will match @divs except
527  *	      that divisions resolving to a length of 0 are omitted as they are
528  *	      not included in the scatterlist.
529  *
530  * Return: 0 or a -errno value
531  */
532 static int build_test_sglist(struct test_sglist *tsgl,
533 			     const struct test_sg_division *divs,
534 			     const unsigned int alignmask,
535 			     const unsigned int total_len,
536 			     struct iov_iter *data,
537 			     const struct test_sg_division *out_divs[XBUFSIZE])
538 {
539 	struct {
540 		const struct test_sg_division *div;
541 		size_t length;
542 	} partitions[XBUFSIZE];
543 	const unsigned int ndivs = count_test_sg_divisions(divs);
544 	unsigned int len_remaining = total_len;
545 	unsigned int i;
546 
547 	BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
548 	if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
549 		return -EINVAL;
550 
551 	/* Calculate the (div, length) pairs */
552 	tsgl->nents = 0;
553 	for (i = 0; i < ndivs; i++) {
554 		unsigned int len_this_sg =
555 			min(len_remaining,
556 			    (total_len * divs[i].proportion_of_total +
557 			     TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
558 
559 		if (len_this_sg != 0) {
560 			partitions[tsgl->nents].div = &divs[i];
561 			partitions[tsgl->nents].length = len_this_sg;
562 			tsgl->nents++;
563 			len_remaining -= len_this_sg;
564 		}
565 	}
566 	if (tsgl->nents == 0) {
567 		partitions[tsgl->nents].div = &divs[0];
568 		partitions[tsgl->nents].length = 0;
569 		tsgl->nents++;
570 	}
571 	partitions[tsgl->nents - 1].length += len_remaining;
572 
573 	/* Set up the sgl entries and fill the data or poison */
574 	sg_init_table(tsgl->sgl, tsgl->nents);
575 	for (i = 0; i < tsgl->nents; i++) {
576 		unsigned int offset = partitions[i].div->offset;
577 		void *addr;
578 
579 		if (partitions[i].div->offset_relative_to_alignmask)
580 			offset += alignmask;
581 
582 		while (offset + partitions[i].length + TESTMGR_POISON_LEN >
583 		       2 * PAGE_SIZE) {
584 			if (WARN_ON(offset <= 0))
585 				return -EINVAL;
586 			offset /= 2;
587 		}
588 
589 		addr = &tsgl->bufs[i][offset];
590 		sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
591 
592 		if (out_divs)
593 			out_divs[i] = partitions[i].div;
594 
595 		if (data) {
596 			size_t copy_len, copied;
597 
598 			copy_len = min(partitions[i].length, data->count);
599 			copied = copy_from_iter(addr, copy_len, data);
600 			if (WARN_ON(copied != copy_len))
601 				return -EINVAL;
602 			testmgr_poison(addr + copy_len, partitions[i].length +
603 				       TESTMGR_POISON_LEN - copy_len);
604 		} else {
605 			testmgr_poison(addr, partitions[i].length +
606 				       TESTMGR_POISON_LEN);
607 		}
608 	}
609 
610 	sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
611 	tsgl->sgl_ptr = tsgl->sgl;
612 	memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
613 	return 0;
614 }
615 
616 /*
617  * Verify that a scatterlist crypto operation produced the correct output.
618  *
619  * @tsgl: scatterlist containing the actual output
620  * @expected_output: buffer containing the expected output
621  * @len_to_check: length of @expected_output in bytes
622  * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
623  * @check_poison: verify that the poison bytes after each chunk are intact?
624  *
625  * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
626  */
627 static int verify_correct_output(const struct test_sglist *tsgl,
628 				 const char *expected_output,
629 				 unsigned int len_to_check,
630 				 unsigned int unchecked_prefix_len,
631 				 bool check_poison)
632 {
633 	unsigned int i;
634 
635 	for (i = 0; i < tsgl->nents; i++) {
636 		struct scatterlist *sg = &tsgl->sgl_ptr[i];
637 		unsigned int len = sg->length;
638 		unsigned int offset = sg->offset;
639 		const char *actual_output;
640 
641 		if (unchecked_prefix_len) {
642 			if (unchecked_prefix_len >= len) {
643 				unchecked_prefix_len -= len;
644 				continue;
645 			}
646 			offset += unchecked_prefix_len;
647 			len -= unchecked_prefix_len;
648 			unchecked_prefix_len = 0;
649 		}
650 		len = min(len, len_to_check);
651 		actual_output = page_address(sg_page(sg)) + offset;
652 		if (memcmp(expected_output, actual_output, len) != 0)
653 			return -EINVAL;
654 		if (check_poison &&
655 		    !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
656 			return -EOVERFLOW;
657 		len_to_check -= len;
658 		expected_output += len;
659 	}
660 	if (WARN_ON(len_to_check != 0))
661 		return -EINVAL;
662 	return 0;
663 }
664 
665 static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
666 {
667 	unsigned int i;
668 
669 	for (i = 0; i < tsgl->nents; i++) {
670 		if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
671 			return true;
672 		if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
673 			return true;
674 		if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
675 			return true;
676 	}
677 	return false;
678 }
679 
680 struct cipher_test_sglists {
681 	struct test_sglist src;
682 	struct test_sglist dst;
683 };
684 
685 static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
686 {
687 	struct cipher_test_sglists *tsgls;
688 
689 	tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
690 	if (!tsgls)
691 		return NULL;
692 
693 	if (init_test_sglist(&tsgls->src) != 0)
694 		goto fail_kfree;
695 	if (init_test_sglist(&tsgls->dst) != 0)
696 		goto fail_destroy_src;
697 
698 	return tsgls;
699 
700 fail_destroy_src:
701 	destroy_test_sglist(&tsgls->src);
702 fail_kfree:
703 	kfree(tsgls);
704 	return NULL;
705 }
706 
707 static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
708 {
709 	if (tsgls) {
710 		destroy_test_sglist(&tsgls->src);
711 		destroy_test_sglist(&tsgls->dst);
712 		kfree(tsgls);
713 	}
714 }
715 
716 /* Build the src and dst scatterlists for an skcipher or AEAD test */
717 static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
718 				     const struct testvec_config *cfg,
719 				     unsigned int alignmask,
720 				     unsigned int src_total_len,
721 				     unsigned int dst_total_len,
722 				     const struct kvec *inputs,
723 				     unsigned int nr_inputs)
724 {
725 	struct iov_iter input;
726 	int err;
727 
728 	iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
729 	err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
730 				cfg->inplace ?
731 					max(dst_total_len, src_total_len) :
732 					src_total_len,
733 				&input, NULL);
734 	if (err)
735 		return err;
736 
737 	if (cfg->inplace) {
738 		tsgls->dst.sgl_ptr = tsgls->src.sgl;
739 		tsgls->dst.nents = tsgls->src.nents;
740 		return 0;
741 	}
742 	return build_test_sglist(&tsgls->dst,
743 				 cfg->dst_divs[0].proportion_of_total ?
744 					cfg->dst_divs : cfg->src_divs,
745 				 alignmask, dst_total_len, NULL, NULL);
746 }
747 
748 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
749 
750 /* Generate a random length in range [0, max_len], but prefer smaller values */
751 static unsigned int generate_random_length(unsigned int max_len)
752 {
753 	unsigned int len = prandom_u32() % (max_len + 1);
754 
755 	switch (prandom_u32() % 4) {
756 	case 0:
757 		return len % 64;
758 	case 1:
759 		return len % 256;
760 	case 2:
761 		return len % 1024;
762 	default:
763 		return len;
764 	}
765 }
766 
767 /* Sometimes make some random changes to the given data buffer */
768 static void mutate_buffer(u8 *buf, size_t count)
769 {
770 	size_t num_flips;
771 	size_t i;
772 	size_t pos;
773 
774 	/* Sometimes flip some bits */
775 	if (prandom_u32() % 4 == 0) {
776 		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count * 8);
777 		for (i = 0; i < num_flips; i++) {
778 			pos = prandom_u32() % (count * 8);
779 			buf[pos / 8] ^= 1 << (pos % 8);
780 		}
781 	}
782 
783 	/* Sometimes flip some bytes */
784 	if (prandom_u32() % 4 == 0) {
785 		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count);
786 		for (i = 0; i < num_flips; i++)
787 			buf[prandom_u32() % count] ^= 0xff;
788 	}
789 }
790 
791 /* Randomly generate 'count' bytes, but sometimes make them "interesting" */
792 static void generate_random_bytes(u8 *buf, size_t count)
793 {
794 	u8 b;
795 	u8 increment;
796 	size_t i;
797 
798 	if (count == 0)
799 		return;
800 
801 	switch (prandom_u32() % 8) { /* Choose a generation strategy */
802 	case 0:
803 	case 1:
804 		/* All the same byte, plus optional mutations */
805 		switch (prandom_u32() % 4) {
806 		case 0:
807 			b = 0x00;
808 			break;
809 		case 1:
810 			b = 0xff;
811 			break;
812 		default:
813 			b = (u8)prandom_u32();
814 			break;
815 		}
816 		memset(buf, b, count);
817 		mutate_buffer(buf, count);
818 		break;
819 	case 2:
820 		/* Ascending or descending bytes, plus optional mutations */
821 		increment = (u8)prandom_u32();
822 		b = (u8)prandom_u32();
823 		for (i = 0; i < count; i++, b += increment)
824 			buf[i] = b;
825 		mutate_buffer(buf, count);
826 		break;
827 	default:
828 		/* Fully random bytes */
829 		for (i = 0; i < count; i++)
830 			buf[i] = (u8)prandom_u32();
831 	}
832 }
833 
834 static char *generate_random_sgl_divisions(struct test_sg_division *divs,
835 					   size_t max_divs, char *p, char *end,
836 					   bool gen_flushes, u32 req_flags)
837 {
838 	struct test_sg_division *div = divs;
839 	unsigned int remaining = TEST_SG_TOTAL;
840 
841 	do {
842 		unsigned int this_len;
843 		const char *flushtype_str;
844 
845 		if (div == &divs[max_divs - 1] || prandom_u32() % 2 == 0)
846 			this_len = remaining;
847 		else
848 			this_len = 1 + (prandom_u32() % remaining);
849 		div->proportion_of_total = this_len;
850 
851 		if (prandom_u32() % 4 == 0)
852 			div->offset = (PAGE_SIZE - 128) + (prandom_u32() % 128);
853 		else if (prandom_u32() % 2 == 0)
854 			div->offset = prandom_u32() % 32;
855 		else
856 			div->offset = prandom_u32() % PAGE_SIZE;
857 		if (prandom_u32() % 8 == 0)
858 			div->offset_relative_to_alignmask = true;
859 
860 		div->flush_type = FLUSH_TYPE_NONE;
861 		if (gen_flushes) {
862 			switch (prandom_u32() % 4) {
863 			case 0:
864 				div->flush_type = FLUSH_TYPE_REIMPORT;
865 				break;
866 			case 1:
867 				div->flush_type = FLUSH_TYPE_FLUSH;
868 				break;
869 			}
870 		}
871 
872 		if (div->flush_type != FLUSH_TYPE_NONE &&
873 		    !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
874 		    prandom_u32() % 2 == 0)
875 			div->nosimd = true;
876 
877 		switch (div->flush_type) {
878 		case FLUSH_TYPE_FLUSH:
879 			if (div->nosimd)
880 				flushtype_str = "<flush,nosimd>";
881 			else
882 				flushtype_str = "<flush>";
883 			break;
884 		case FLUSH_TYPE_REIMPORT:
885 			if (div->nosimd)
886 				flushtype_str = "<reimport,nosimd>";
887 			else
888 				flushtype_str = "<reimport>";
889 			break;
890 		default:
891 			flushtype_str = "";
892 			break;
893 		}
894 
895 		BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
896 		p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
897 			       this_len / 100, this_len % 100,
898 			       div->offset_relative_to_alignmask ?
899 					"alignmask" : "",
900 			       div->offset, this_len == remaining ? "" : ", ");
901 		remaining -= this_len;
902 		div++;
903 	} while (remaining);
904 
905 	return p;
906 }
907 
908 /* Generate a random testvec_config for fuzz testing */
909 static void generate_random_testvec_config(struct testvec_config *cfg,
910 					   char *name, size_t max_namelen)
911 {
912 	char *p = name;
913 	char * const end = name + max_namelen;
914 
915 	memset(cfg, 0, sizeof(*cfg));
916 
917 	cfg->name = name;
918 
919 	p += scnprintf(p, end - p, "random:");
920 
921 	if (prandom_u32() % 2 == 0) {
922 		cfg->inplace = true;
923 		p += scnprintf(p, end - p, " inplace");
924 	}
925 
926 	if (prandom_u32() % 2 == 0) {
927 		cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
928 		p += scnprintf(p, end - p, " may_sleep");
929 	}
930 
931 	switch (prandom_u32() % 4) {
932 	case 0:
933 		cfg->finalization_type = FINALIZATION_TYPE_FINAL;
934 		p += scnprintf(p, end - p, " use_final");
935 		break;
936 	case 1:
937 		cfg->finalization_type = FINALIZATION_TYPE_FINUP;
938 		p += scnprintf(p, end - p, " use_finup");
939 		break;
940 	default:
941 		cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
942 		p += scnprintf(p, end - p, " use_digest");
943 		break;
944 	}
945 
946 	if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
947 	    prandom_u32() % 2 == 0) {
948 		cfg->nosimd = true;
949 		p += scnprintf(p, end - p, " nosimd");
950 	}
951 
952 	p += scnprintf(p, end - p, " src_divs=[");
953 	p = generate_random_sgl_divisions(cfg->src_divs,
954 					  ARRAY_SIZE(cfg->src_divs), p, end,
955 					  (cfg->finalization_type !=
956 					   FINALIZATION_TYPE_DIGEST),
957 					  cfg->req_flags);
958 	p += scnprintf(p, end - p, "]");
959 
960 	if (!cfg->inplace && prandom_u32() % 2 == 0) {
961 		p += scnprintf(p, end - p, " dst_divs=[");
962 		p = generate_random_sgl_divisions(cfg->dst_divs,
963 						  ARRAY_SIZE(cfg->dst_divs),
964 						  p, end, false,
965 						  cfg->req_flags);
966 		p += scnprintf(p, end - p, "]");
967 	}
968 
969 	if (prandom_u32() % 2 == 0) {
970 		cfg->iv_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
971 		p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
972 	}
973 
974 	WARN_ON_ONCE(!valid_testvec_config(cfg));
975 }
976 
977 static void crypto_disable_simd_for_test(void)
978 {
979 	preempt_disable();
980 	__this_cpu_write(crypto_simd_disabled_for_test, true);
981 }
982 
983 static void crypto_reenable_simd_for_test(void)
984 {
985 	__this_cpu_write(crypto_simd_disabled_for_test, false);
986 	preempt_enable();
987 }
988 
989 /*
990  * Given an algorithm name, build the name of the generic implementation of that
991  * algorithm, assuming the usual naming convention.  Specifically, this appends
992  * "-generic" to every part of the name that is not a template name.  Examples:
993  *
994  *	aes => aes-generic
995  *	cbc(aes) => cbc(aes-generic)
996  *	cts(cbc(aes)) => cts(cbc(aes-generic))
997  *	rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
998  *
999  * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
1000  */
1001 static int build_generic_driver_name(const char *algname,
1002 				     char driver_name[CRYPTO_MAX_ALG_NAME])
1003 {
1004 	const char *in = algname;
1005 	char *out = driver_name;
1006 	size_t len = strlen(algname);
1007 
1008 	if (len >= CRYPTO_MAX_ALG_NAME)
1009 		goto too_long;
1010 	do {
1011 		const char *in_saved = in;
1012 
1013 		while (*in && *in != '(' && *in != ')' && *in != ',')
1014 			*out++ = *in++;
1015 		if (*in != '(' && in > in_saved) {
1016 			len += 8;
1017 			if (len >= CRYPTO_MAX_ALG_NAME)
1018 				goto too_long;
1019 			memcpy(out, "-generic", 8);
1020 			out += 8;
1021 		}
1022 	} while ((*out++ = *in++) != '\0');
1023 	return 0;
1024 
1025 too_long:
1026 	pr_err("alg: generic driver name for \"%s\" would be too long\n",
1027 	       algname);
1028 	return -ENAMETOOLONG;
1029 }
1030 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1031 static void crypto_disable_simd_for_test(void)
1032 {
1033 }
1034 
1035 static void crypto_reenable_simd_for_test(void)
1036 {
1037 }
1038 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1039 
1040 static int do_ahash_op(int (*op)(struct ahash_request *req),
1041 		       struct ahash_request *req,
1042 		       struct crypto_wait *wait, bool nosimd)
1043 {
1044 	int err;
1045 
1046 	if (nosimd)
1047 		crypto_disable_simd_for_test();
1048 
1049 	err = op(req);
1050 
1051 	if (nosimd)
1052 		crypto_reenable_simd_for_test();
1053 
1054 	return crypto_wait_req(err, wait);
1055 }
1056 
1057 static int check_nonfinal_hash_op(const char *op, int err,
1058 				  u8 *result, unsigned int digestsize,
1059 				  const char *driver, const char *vec_name,
1060 				  const struct testvec_config *cfg)
1061 {
1062 	if (err) {
1063 		pr_err("alg: hash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1064 		       driver, op, err, vec_name, cfg->name);
1065 		return err;
1066 	}
1067 	if (!testmgr_is_poison(result, digestsize)) {
1068 		pr_err("alg: hash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
1069 		       driver, op, vec_name, cfg->name);
1070 		return -EINVAL;
1071 	}
1072 	return 0;
1073 }
1074 
1075 static int test_hash_vec_cfg(const char *driver,
1076 			     const struct hash_testvec *vec,
1077 			     const char *vec_name,
1078 			     const struct testvec_config *cfg,
1079 			     struct ahash_request *req,
1080 			     struct test_sglist *tsgl,
1081 			     u8 *hashstate)
1082 {
1083 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1084 	const unsigned int alignmask = crypto_ahash_alignmask(tfm);
1085 	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1086 	const unsigned int statesize = crypto_ahash_statesize(tfm);
1087 	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1088 	const struct test_sg_division *divs[XBUFSIZE];
1089 	DECLARE_CRYPTO_WAIT(wait);
1090 	struct kvec _input;
1091 	struct iov_iter input;
1092 	unsigned int i;
1093 	struct scatterlist *pending_sgl;
1094 	unsigned int pending_len;
1095 	u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
1096 	int err;
1097 
1098 	/* Set the key, if specified */
1099 	if (vec->ksize) {
1100 		err = crypto_ahash_setkey(tfm, vec->key, vec->ksize);
1101 		if (err) {
1102 			if (err == vec->setkey_error)
1103 				return 0;
1104 			pr_err("alg: hash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1105 			       driver, vec_name, vec->setkey_error, err,
1106 			       crypto_ahash_get_flags(tfm));
1107 			return err;
1108 		}
1109 		if (vec->setkey_error) {
1110 			pr_err("alg: hash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1111 			       driver, vec_name, vec->setkey_error);
1112 			return -EINVAL;
1113 		}
1114 	}
1115 
1116 	/* Build the scatterlist for the source data */
1117 	_input.iov_base = (void *)vec->plaintext;
1118 	_input.iov_len = vec->psize;
1119 	iov_iter_kvec(&input, WRITE, &_input, 1, vec->psize);
1120 	err = build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
1121 				&input, divs);
1122 	if (err) {
1123 		pr_err("alg: hash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1124 		       driver, vec_name, cfg->name);
1125 		return err;
1126 	}
1127 
1128 	/* Do the actual hashing */
1129 
1130 	testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1131 	testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
1132 
1133 	if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
1134 	    vec->digest_error) {
1135 		/* Just using digest() */
1136 		ahash_request_set_callback(req, req_flags, crypto_req_done,
1137 					   &wait);
1138 		ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
1139 		err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
1140 		if (err) {
1141 			if (err == vec->digest_error)
1142 				return 0;
1143 			pr_err("alg: hash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1144 			       driver, vec_name, vec->digest_error, err,
1145 			       cfg->name);
1146 			return err;
1147 		}
1148 		if (vec->digest_error) {
1149 			pr_err("alg: hash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1150 			       driver, vec_name, vec->digest_error, cfg->name);
1151 			return -EINVAL;
1152 		}
1153 		goto result_ready;
1154 	}
1155 
1156 	/* Using init(), zero or more update(), then final() or finup() */
1157 
1158 	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1159 	ahash_request_set_crypt(req, NULL, result, 0);
1160 	err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
1161 	err = check_nonfinal_hash_op("init", err, result, digestsize,
1162 				     driver, vec_name, cfg);
1163 	if (err)
1164 		return err;
1165 
1166 	pending_sgl = NULL;
1167 	pending_len = 0;
1168 	for (i = 0; i < tsgl->nents; i++) {
1169 		if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
1170 		    pending_sgl != NULL) {
1171 			/* update() with the pending data */
1172 			ahash_request_set_callback(req, req_flags,
1173 						   crypto_req_done, &wait);
1174 			ahash_request_set_crypt(req, pending_sgl, result,
1175 						pending_len);
1176 			err = do_ahash_op(crypto_ahash_update, req, &wait,
1177 					  divs[i]->nosimd);
1178 			err = check_nonfinal_hash_op("update", err,
1179 						     result, digestsize,
1180 						     driver, vec_name, cfg);
1181 			if (err)
1182 				return err;
1183 			pending_sgl = NULL;
1184 			pending_len = 0;
1185 		}
1186 		if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1187 			/* Test ->export() and ->import() */
1188 			testmgr_poison(hashstate + statesize,
1189 				       TESTMGR_POISON_LEN);
1190 			err = crypto_ahash_export(req, hashstate);
1191 			err = check_nonfinal_hash_op("export", err,
1192 						     result, digestsize,
1193 						     driver, vec_name, cfg);
1194 			if (err)
1195 				return err;
1196 			if (!testmgr_is_poison(hashstate + statesize,
1197 					       TESTMGR_POISON_LEN)) {
1198 				pr_err("alg: hash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1199 				       driver, vec_name, cfg->name);
1200 				return -EOVERFLOW;
1201 			}
1202 
1203 			testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1204 			err = crypto_ahash_import(req, hashstate);
1205 			err = check_nonfinal_hash_op("import", err,
1206 						     result, digestsize,
1207 						     driver, vec_name, cfg);
1208 			if (err)
1209 				return err;
1210 		}
1211 		if (pending_sgl == NULL)
1212 			pending_sgl = &tsgl->sgl[i];
1213 		pending_len += tsgl->sgl[i].length;
1214 	}
1215 
1216 	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1217 	ahash_request_set_crypt(req, pending_sgl, result, pending_len);
1218 	if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
1219 		/* finish with update() and final() */
1220 		err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
1221 		err = check_nonfinal_hash_op("update", err, result, digestsize,
1222 					     driver, vec_name, cfg);
1223 		if (err)
1224 			return err;
1225 		err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
1226 		if (err) {
1227 			pr_err("alg: hash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
1228 			       driver, err, vec_name, cfg->name);
1229 			return err;
1230 		}
1231 	} else {
1232 		/* finish with finup() */
1233 		err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
1234 		if (err) {
1235 			pr_err("alg: hash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
1236 			       driver, err, vec_name, cfg->name);
1237 			return err;
1238 		}
1239 	}
1240 
1241 result_ready:
1242 	/* Check that the algorithm produced the correct digest */
1243 	if (memcmp(result, vec->digest, digestsize) != 0) {
1244 		pr_err("alg: hash: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
1245 		       driver, vec_name, cfg->name);
1246 		return -EINVAL;
1247 	}
1248 	if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
1249 		pr_err("alg: hash: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
1250 		       driver, vec_name, cfg->name);
1251 		return -EOVERFLOW;
1252 	}
1253 
1254 	return 0;
1255 }
1256 
1257 static int test_hash_vec(const char *driver, const struct hash_testvec *vec,
1258 			 unsigned int vec_num, struct ahash_request *req,
1259 			 struct test_sglist *tsgl, u8 *hashstate)
1260 {
1261 	char vec_name[16];
1262 	unsigned int i;
1263 	int err;
1264 
1265 	sprintf(vec_name, "%u", vec_num);
1266 
1267 	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
1268 		err = test_hash_vec_cfg(driver, vec, vec_name,
1269 					&default_hash_testvec_configs[i],
1270 					req, tsgl, hashstate);
1271 		if (err)
1272 			return err;
1273 	}
1274 
1275 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1276 	if (!noextratests) {
1277 		struct testvec_config cfg;
1278 		char cfgname[TESTVEC_CONFIG_NAMELEN];
1279 
1280 		for (i = 0; i < fuzz_iterations; i++) {
1281 			generate_random_testvec_config(&cfg, cfgname,
1282 						       sizeof(cfgname));
1283 			err = test_hash_vec_cfg(driver, vec, vec_name, &cfg,
1284 						req, tsgl, hashstate);
1285 			if (err)
1286 				return err;
1287 		}
1288 	}
1289 #endif
1290 	return 0;
1291 }
1292 
1293 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1294 /*
1295  * Generate a hash test vector from the given implementation.
1296  * Assumes the buffers in 'vec' were already allocated.
1297  */
1298 static void generate_random_hash_testvec(struct crypto_shash *tfm,
1299 					 struct hash_testvec *vec,
1300 					 unsigned int maxkeysize,
1301 					 unsigned int maxdatasize,
1302 					 char *name, size_t max_namelen)
1303 {
1304 	SHASH_DESC_ON_STACK(desc, tfm);
1305 
1306 	/* Data */
1307 	vec->psize = generate_random_length(maxdatasize);
1308 	generate_random_bytes((u8 *)vec->plaintext, vec->psize);
1309 
1310 	/*
1311 	 * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
1312 	 * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
1313 	 */
1314 	vec->setkey_error = 0;
1315 	vec->ksize = 0;
1316 	if (maxkeysize) {
1317 		vec->ksize = maxkeysize;
1318 		if (prandom_u32() % 4 == 0)
1319 			vec->ksize = 1 + (prandom_u32() % maxkeysize);
1320 		generate_random_bytes((u8 *)vec->key, vec->ksize);
1321 
1322 		vec->setkey_error = crypto_shash_setkey(tfm, vec->key,
1323 							vec->ksize);
1324 		/* If the key couldn't be set, no need to continue to digest. */
1325 		if (vec->setkey_error)
1326 			goto done;
1327 	}
1328 
1329 	/* Digest */
1330 	desc->tfm = tfm;
1331 	vec->digest_error = crypto_shash_digest(desc, vec->plaintext,
1332 						vec->psize, (u8 *)vec->digest);
1333 done:
1334 	snprintf(name, max_namelen, "\"random: psize=%u ksize=%u\"",
1335 		 vec->psize, vec->ksize);
1336 }
1337 
1338 /*
1339  * Test the hash algorithm represented by @req against the corresponding generic
1340  * implementation, if one is available.
1341  */
1342 static int test_hash_vs_generic_impl(const char *driver,
1343 				     const char *generic_driver,
1344 				     unsigned int maxkeysize,
1345 				     struct ahash_request *req,
1346 				     struct test_sglist *tsgl,
1347 				     u8 *hashstate)
1348 {
1349 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1350 	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1351 	const unsigned int blocksize = crypto_ahash_blocksize(tfm);
1352 	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
1353 	const char *algname = crypto_hash_alg_common(tfm)->base.cra_name;
1354 	char _generic_driver[CRYPTO_MAX_ALG_NAME];
1355 	struct crypto_shash *generic_tfm = NULL;
1356 	unsigned int i;
1357 	struct hash_testvec vec = { 0 };
1358 	char vec_name[64];
1359 	struct testvec_config cfg;
1360 	char cfgname[TESTVEC_CONFIG_NAMELEN];
1361 	int err;
1362 
1363 	if (noextratests)
1364 		return 0;
1365 
1366 	if (!generic_driver) { /* Use default naming convention? */
1367 		err = build_generic_driver_name(algname, _generic_driver);
1368 		if (err)
1369 			return err;
1370 		generic_driver = _generic_driver;
1371 	}
1372 
1373 	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
1374 		return 0;
1375 
1376 	generic_tfm = crypto_alloc_shash(generic_driver, 0, 0);
1377 	if (IS_ERR(generic_tfm)) {
1378 		err = PTR_ERR(generic_tfm);
1379 		if (err == -ENOENT) {
1380 			pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
1381 				driver, generic_driver);
1382 			return 0;
1383 		}
1384 		pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
1385 		       generic_driver, algname, err);
1386 		return err;
1387 	}
1388 
1389 	/* Check the algorithm properties for consistency. */
1390 
1391 	if (digestsize != crypto_shash_digestsize(generic_tfm)) {
1392 		pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
1393 		       driver, digestsize,
1394 		       crypto_shash_digestsize(generic_tfm));
1395 		err = -EINVAL;
1396 		goto out;
1397 	}
1398 
1399 	if (blocksize != crypto_shash_blocksize(generic_tfm)) {
1400 		pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
1401 		       driver, blocksize, crypto_shash_blocksize(generic_tfm));
1402 		err = -EINVAL;
1403 		goto out;
1404 	}
1405 
1406 	/*
1407 	 * Now generate test vectors using the generic implementation, and test
1408 	 * the other implementation against them.
1409 	 */
1410 
1411 	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
1412 	vec.plaintext = kmalloc(maxdatasize, GFP_KERNEL);
1413 	vec.digest = kmalloc(digestsize, GFP_KERNEL);
1414 	if (!vec.key || !vec.plaintext || !vec.digest) {
1415 		err = -ENOMEM;
1416 		goto out;
1417 	}
1418 
1419 	for (i = 0; i < fuzz_iterations * 8; i++) {
1420 		generate_random_hash_testvec(generic_tfm, &vec,
1421 					     maxkeysize, maxdatasize,
1422 					     vec_name, sizeof(vec_name));
1423 		generate_random_testvec_config(&cfg, cfgname, sizeof(cfgname));
1424 
1425 		err = test_hash_vec_cfg(driver, &vec, vec_name, &cfg,
1426 					req, tsgl, hashstate);
1427 		if (err)
1428 			goto out;
1429 		cond_resched();
1430 	}
1431 	err = 0;
1432 out:
1433 	kfree(vec.key);
1434 	kfree(vec.plaintext);
1435 	kfree(vec.digest);
1436 	crypto_free_shash(generic_tfm);
1437 	return err;
1438 }
1439 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1440 static int test_hash_vs_generic_impl(const char *driver,
1441 				     const char *generic_driver,
1442 				     unsigned int maxkeysize,
1443 				     struct ahash_request *req,
1444 				     struct test_sglist *tsgl,
1445 				     u8 *hashstate)
1446 {
1447 	return 0;
1448 }
1449 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1450 
1451 static int __alg_test_hash(const struct hash_testvec *vecs,
1452 			   unsigned int num_vecs, const char *driver,
1453 			   u32 type, u32 mask,
1454 			   const char *generic_driver, unsigned int maxkeysize)
1455 {
1456 	struct crypto_ahash *tfm;
1457 	struct ahash_request *req = NULL;
1458 	struct test_sglist *tsgl = NULL;
1459 	u8 *hashstate = NULL;
1460 	unsigned int i;
1461 	int err;
1462 
1463 	tfm = crypto_alloc_ahash(driver, type, mask);
1464 	if (IS_ERR(tfm)) {
1465 		pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
1466 		       driver, PTR_ERR(tfm));
1467 		return PTR_ERR(tfm);
1468 	}
1469 
1470 	req = ahash_request_alloc(tfm, GFP_KERNEL);
1471 	if (!req) {
1472 		pr_err("alg: hash: failed to allocate request for %s\n",
1473 		       driver);
1474 		err = -ENOMEM;
1475 		goto out;
1476 	}
1477 
1478 	tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
1479 	if (!tsgl || init_test_sglist(tsgl) != 0) {
1480 		pr_err("alg: hash: failed to allocate test buffers for %s\n",
1481 		       driver);
1482 		kfree(tsgl);
1483 		tsgl = NULL;
1484 		err = -ENOMEM;
1485 		goto out;
1486 	}
1487 
1488 	hashstate = kmalloc(crypto_ahash_statesize(tfm) + TESTMGR_POISON_LEN,
1489 			    GFP_KERNEL);
1490 	if (!hashstate) {
1491 		pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
1492 		       driver);
1493 		err = -ENOMEM;
1494 		goto out;
1495 	}
1496 
1497 	for (i = 0; i < num_vecs; i++) {
1498 		err = test_hash_vec(driver, &vecs[i], i, req, tsgl, hashstate);
1499 		if (err)
1500 			goto out;
1501 	}
1502 	err = test_hash_vs_generic_impl(driver, generic_driver, maxkeysize, req,
1503 					tsgl, hashstate);
1504 out:
1505 	kfree(hashstate);
1506 	if (tsgl) {
1507 		destroy_test_sglist(tsgl);
1508 		kfree(tsgl);
1509 	}
1510 	ahash_request_free(req);
1511 	crypto_free_ahash(tfm);
1512 	return err;
1513 }
1514 
1515 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1516 			 u32 type, u32 mask)
1517 {
1518 	const struct hash_testvec *template = desc->suite.hash.vecs;
1519 	unsigned int tcount = desc->suite.hash.count;
1520 	unsigned int nr_unkeyed, nr_keyed;
1521 	unsigned int maxkeysize = 0;
1522 	int err;
1523 
1524 	/*
1525 	 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1526 	 * first, before setting a key on the tfm.  To make this easier, we
1527 	 * require that the unkeyed test vectors (if any) are listed first.
1528 	 */
1529 
1530 	for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1531 		if (template[nr_unkeyed].ksize)
1532 			break;
1533 	}
1534 	for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1535 		if (!template[nr_unkeyed + nr_keyed].ksize) {
1536 			pr_err("alg: hash: test vectors for %s out of order, "
1537 			       "unkeyed ones must come first\n", desc->alg);
1538 			return -EINVAL;
1539 		}
1540 		maxkeysize = max_t(unsigned int, maxkeysize,
1541 				   template[nr_unkeyed + nr_keyed].ksize);
1542 	}
1543 
1544 	err = 0;
1545 	if (nr_unkeyed) {
1546 		err = __alg_test_hash(template, nr_unkeyed, driver, type, mask,
1547 				      desc->generic_driver, maxkeysize);
1548 		template += nr_unkeyed;
1549 	}
1550 
1551 	if (!err && nr_keyed)
1552 		err = __alg_test_hash(template, nr_keyed, driver, type, mask,
1553 				      desc->generic_driver, maxkeysize);
1554 
1555 	return err;
1556 }
1557 
1558 static int test_aead_vec_cfg(const char *driver, int enc,
1559 			     const struct aead_testvec *vec,
1560 			     const char *vec_name,
1561 			     const struct testvec_config *cfg,
1562 			     struct aead_request *req,
1563 			     struct cipher_test_sglists *tsgls)
1564 {
1565 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1566 	const unsigned int alignmask = crypto_aead_alignmask(tfm);
1567 	const unsigned int ivsize = crypto_aead_ivsize(tfm);
1568 	const unsigned int authsize = vec->clen - vec->plen;
1569 	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1570 	const char *op = enc ? "encryption" : "decryption";
1571 	DECLARE_CRYPTO_WAIT(wait);
1572 	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1573 	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1574 		 cfg->iv_offset +
1575 		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1576 	struct kvec input[2];
1577 	int expected_error;
1578 	int err;
1579 
1580 	/* Set the key */
1581 	if (vec->wk)
1582 		crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1583 	else
1584 		crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1585 	err = crypto_aead_setkey(tfm, vec->key, vec->klen);
1586 	if (err && err != vec->setkey_error) {
1587 		pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1588 		       driver, vec_name, vec->setkey_error, err,
1589 		       crypto_aead_get_flags(tfm));
1590 		return err;
1591 	}
1592 	if (!err && vec->setkey_error) {
1593 		pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1594 		       driver, vec_name, vec->setkey_error);
1595 		return -EINVAL;
1596 	}
1597 
1598 	/* Set the authentication tag size */
1599 	err = crypto_aead_setauthsize(tfm, authsize);
1600 	if (err && err != vec->setauthsize_error) {
1601 		pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
1602 		       driver, vec_name, vec->setauthsize_error, err);
1603 		return err;
1604 	}
1605 	if (!err && vec->setauthsize_error) {
1606 		pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
1607 		       driver, vec_name, vec->setauthsize_error);
1608 		return -EINVAL;
1609 	}
1610 
1611 	if (vec->setkey_error || vec->setauthsize_error)
1612 		return 0;
1613 
1614 	/* The IV must be copied to a buffer, as the algorithm may modify it */
1615 	if (WARN_ON(ivsize > MAX_IVLEN))
1616 		return -EINVAL;
1617 	if (vec->iv)
1618 		memcpy(iv, vec->iv, ivsize);
1619 	else
1620 		memset(iv, 0, ivsize);
1621 
1622 	/* Build the src/dst scatterlists */
1623 	input[0].iov_base = (void *)vec->assoc;
1624 	input[0].iov_len = vec->alen;
1625 	input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1626 	input[1].iov_len = enc ? vec->plen : vec->clen;
1627 	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1628 					vec->alen + (enc ? vec->plen :
1629 						     vec->clen),
1630 					vec->alen + (enc ? vec->clen :
1631 						     vec->plen),
1632 					input, 2);
1633 	if (err) {
1634 		pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
1635 		       driver, op, vec_name, cfg->name);
1636 		return err;
1637 	}
1638 
1639 	/* Do the actual encryption or decryption */
1640 	testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
1641 	aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
1642 	aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
1643 			       enc ? vec->plen : vec->clen, iv);
1644 	aead_request_set_ad(req, vec->alen);
1645 	if (cfg->nosimd)
1646 		crypto_disable_simd_for_test();
1647 	err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
1648 	if (cfg->nosimd)
1649 		crypto_reenable_simd_for_test();
1650 	err = crypto_wait_req(err, &wait);
1651 
1652 	/* Check that the algorithm didn't overwrite things it shouldn't have */
1653 	if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
1654 	    req->assoclen != vec->alen ||
1655 	    req->iv != iv ||
1656 	    req->src != tsgls->src.sgl_ptr ||
1657 	    req->dst != tsgls->dst.sgl_ptr ||
1658 	    crypto_aead_reqtfm(req) != tfm ||
1659 	    req->base.complete != crypto_req_done ||
1660 	    req->base.flags != req_flags ||
1661 	    req->base.data != &wait) {
1662 		pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
1663 		       driver, op, vec_name, cfg->name);
1664 		if (req->cryptlen != (enc ? vec->plen : vec->clen))
1665 			pr_err("alg: aead: changed 'req->cryptlen'\n");
1666 		if (req->assoclen != vec->alen)
1667 			pr_err("alg: aead: changed 'req->assoclen'\n");
1668 		if (req->iv != iv)
1669 			pr_err("alg: aead: changed 'req->iv'\n");
1670 		if (req->src != tsgls->src.sgl_ptr)
1671 			pr_err("alg: aead: changed 'req->src'\n");
1672 		if (req->dst != tsgls->dst.sgl_ptr)
1673 			pr_err("alg: aead: changed 'req->dst'\n");
1674 		if (crypto_aead_reqtfm(req) != tfm)
1675 			pr_err("alg: aead: changed 'req->base.tfm'\n");
1676 		if (req->base.complete != crypto_req_done)
1677 			pr_err("alg: aead: changed 'req->base.complete'\n");
1678 		if (req->base.flags != req_flags)
1679 			pr_err("alg: aead: changed 'req->base.flags'\n");
1680 		if (req->base.data != &wait)
1681 			pr_err("alg: aead: changed 'req->base.data'\n");
1682 		return -EINVAL;
1683 	}
1684 	if (is_test_sglist_corrupted(&tsgls->src)) {
1685 		pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
1686 		       driver, op, vec_name, cfg->name);
1687 		return -EINVAL;
1688 	}
1689 	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
1690 	    is_test_sglist_corrupted(&tsgls->dst)) {
1691 		pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
1692 		       driver, op, vec_name, cfg->name);
1693 		return -EINVAL;
1694 	}
1695 
1696 	/* Check for success or failure */
1697 	expected_error = vec->novrfy ? -EBADMSG : vec->crypt_error;
1698 	if (err) {
1699 		if (err == expected_error)
1700 			return 0;
1701 		pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1702 		       driver, op, vec_name, expected_error, err, cfg->name);
1703 		return err;
1704 	}
1705 	if (expected_error) {
1706 		pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1707 		       driver, op, vec_name, expected_error, cfg->name);
1708 		return -EINVAL;
1709 	}
1710 
1711 	/* Check for the correct output (ciphertext or plaintext) */
1712 	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
1713 				    enc ? vec->clen : vec->plen,
1714 				    vec->alen, enc || !cfg->inplace);
1715 	if (err == -EOVERFLOW) {
1716 		pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
1717 		       driver, op, vec_name, cfg->name);
1718 		return err;
1719 	}
1720 	if (err) {
1721 		pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
1722 		       driver, op, vec_name, cfg->name);
1723 		return err;
1724 	}
1725 
1726 	return 0;
1727 }
1728 
1729 static int test_aead_vec(const char *driver, int enc,
1730 			 const struct aead_testvec *vec, unsigned int vec_num,
1731 			 struct aead_request *req,
1732 			 struct cipher_test_sglists *tsgls)
1733 {
1734 	char vec_name[16];
1735 	unsigned int i;
1736 	int err;
1737 
1738 	if (enc && vec->novrfy)
1739 		return 0;
1740 
1741 	sprintf(vec_name, "%u", vec_num);
1742 
1743 	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
1744 		err = test_aead_vec_cfg(driver, enc, vec, vec_name,
1745 					&default_cipher_testvec_configs[i],
1746 					req, tsgls);
1747 		if (err)
1748 			return err;
1749 	}
1750 
1751 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1752 	if (!noextratests) {
1753 		struct testvec_config cfg;
1754 		char cfgname[TESTVEC_CONFIG_NAMELEN];
1755 
1756 		for (i = 0; i < fuzz_iterations; i++) {
1757 			generate_random_testvec_config(&cfg, cfgname,
1758 						       sizeof(cfgname));
1759 			err = test_aead_vec_cfg(driver, enc, vec, vec_name,
1760 						&cfg, req, tsgls);
1761 			if (err)
1762 				return err;
1763 		}
1764 	}
1765 #endif
1766 	return 0;
1767 }
1768 
1769 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1770 /*
1771  * Generate an AEAD test vector from the given implementation.
1772  * Assumes the buffers in 'vec' were already allocated.
1773  */
1774 static void generate_random_aead_testvec(struct aead_request *req,
1775 					 struct aead_testvec *vec,
1776 					 unsigned int maxkeysize,
1777 					 unsigned int maxdatasize,
1778 					 char *name, size_t max_namelen)
1779 {
1780 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1781 	const unsigned int ivsize = crypto_aead_ivsize(tfm);
1782 	unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
1783 	unsigned int authsize;
1784 	unsigned int total_len;
1785 	int i;
1786 	struct scatterlist src[2], dst;
1787 	u8 iv[MAX_IVLEN];
1788 	DECLARE_CRYPTO_WAIT(wait);
1789 
1790 	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
1791 	vec->klen = maxkeysize;
1792 	if (prandom_u32() % 4 == 0)
1793 		vec->klen = prandom_u32() % (maxkeysize + 1);
1794 	generate_random_bytes((u8 *)vec->key, vec->klen);
1795 	vec->setkey_error = crypto_aead_setkey(tfm, vec->key, vec->klen);
1796 
1797 	/* IV */
1798 	generate_random_bytes((u8 *)vec->iv, ivsize);
1799 
1800 	/* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
1801 	authsize = maxauthsize;
1802 	if (prandom_u32() % 4 == 0)
1803 		authsize = prandom_u32() % (maxauthsize + 1);
1804 	if (WARN_ON(authsize > maxdatasize))
1805 		authsize = maxdatasize;
1806 	maxdatasize -= authsize;
1807 	vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
1808 
1809 	/* Plaintext and associated data */
1810 	total_len = generate_random_length(maxdatasize);
1811 	if (prandom_u32() % 4 == 0)
1812 		vec->alen = 0;
1813 	else
1814 		vec->alen = generate_random_length(total_len);
1815 	vec->plen = total_len - vec->alen;
1816 	generate_random_bytes((u8 *)vec->assoc, vec->alen);
1817 	generate_random_bytes((u8 *)vec->ptext, vec->plen);
1818 
1819 	vec->clen = vec->plen + authsize;
1820 
1821 	/*
1822 	 * If the key or authentication tag size couldn't be set, no need to
1823 	 * continue to encrypt.
1824 	 */
1825 	if (vec->setkey_error || vec->setauthsize_error)
1826 		goto done;
1827 
1828 	/* Ciphertext */
1829 	sg_init_table(src, 2);
1830 	i = 0;
1831 	if (vec->alen)
1832 		sg_set_buf(&src[i++], vec->assoc, vec->alen);
1833 	if (vec->plen)
1834 		sg_set_buf(&src[i++], vec->ptext, vec->plen);
1835 	sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
1836 	memcpy(iv, vec->iv, ivsize);
1837 	aead_request_set_callback(req, 0, crypto_req_done, &wait);
1838 	aead_request_set_crypt(req, src, &dst, vec->plen, iv);
1839 	aead_request_set_ad(req, vec->alen);
1840 	vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req), &wait);
1841 	if (vec->crypt_error == 0)
1842 		memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
1843 done:
1844 	snprintf(name, max_namelen,
1845 		 "\"random: alen=%u plen=%u authsize=%u klen=%u\"",
1846 		 vec->alen, vec->plen, authsize, vec->klen);
1847 }
1848 
1849 /*
1850  * Test the AEAD algorithm represented by @req against the corresponding generic
1851  * implementation, if one is available.
1852  */
1853 static int test_aead_vs_generic_impl(const char *driver,
1854 				     const struct alg_test_desc *test_desc,
1855 				     struct aead_request *req,
1856 				     struct cipher_test_sglists *tsgls)
1857 {
1858 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1859 	const unsigned int ivsize = crypto_aead_ivsize(tfm);
1860 	const unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
1861 	const unsigned int blocksize = crypto_aead_blocksize(tfm);
1862 	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
1863 	const char *algname = crypto_aead_alg(tfm)->base.cra_name;
1864 	const char *generic_driver = test_desc->generic_driver;
1865 	char _generic_driver[CRYPTO_MAX_ALG_NAME];
1866 	struct crypto_aead *generic_tfm = NULL;
1867 	struct aead_request *generic_req = NULL;
1868 	unsigned int maxkeysize;
1869 	unsigned int i;
1870 	struct aead_testvec vec = { 0 };
1871 	char vec_name[64];
1872 	struct testvec_config cfg;
1873 	char cfgname[TESTVEC_CONFIG_NAMELEN];
1874 	int err;
1875 
1876 	if (noextratests)
1877 		return 0;
1878 
1879 	if (!generic_driver) { /* Use default naming convention? */
1880 		err = build_generic_driver_name(algname, _generic_driver);
1881 		if (err)
1882 			return err;
1883 		generic_driver = _generic_driver;
1884 	}
1885 
1886 	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
1887 		return 0;
1888 
1889 	generic_tfm = crypto_alloc_aead(generic_driver, 0, 0);
1890 	if (IS_ERR(generic_tfm)) {
1891 		err = PTR_ERR(generic_tfm);
1892 		if (err == -ENOENT) {
1893 			pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
1894 				driver, generic_driver);
1895 			return 0;
1896 		}
1897 		pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
1898 		       generic_driver, algname, err);
1899 		return err;
1900 	}
1901 
1902 	generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
1903 	if (!generic_req) {
1904 		err = -ENOMEM;
1905 		goto out;
1906 	}
1907 
1908 	/* Check the algorithm properties for consistency. */
1909 
1910 	if (maxauthsize != crypto_aead_alg(generic_tfm)->maxauthsize) {
1911 		pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
1912 		       driver, maxauthsize,
1913 		       crypto_aead_alg(generic_tfm)->maxauthsize);
1914 		err = -EINVAL;
1915 		goto out;
1916 	}
1917 
1918 	if (ivsize != crypto_aead_ivsize(generic_tfm)) {
1919 		pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
1920 		       driver, ivsize, crypto_aead_ivsize(generic_tfm));
1921 		err = -EINVAL;
1922 		goto out;
1923 	}
1924 
1925 	if (blocksize != crypto_aead_blocksize(generic_tfm)) {
1926 		pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
1927 		       driver, blocksize, crypto_aead_blocksize(generic_tfm));
1928 		err = -EINVAL;
1929 		goto out;
1930 	}
1931 
1932 	/*
1933 	 * Now generate test vectors using the generic implementation, and test
1934 	 * the other implementation against them.
1935 	 */
1936 
1937 	maxkeysize = 0;
1938 	for (i = 0; i < test_desc->suite.aead.count; i++)
1939 		maxkeysize = max_t(unsigned int, maxkeysize,
1940 				   test_desc->suite.aead.vecs[i].klen);
1941 
1942 	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
1943 	vec.iv = kmalloc(ivsize, GFP_KERNEL);
1944 	vec.assoc = kmalloc(maxdatasize, GFP_KERNEL);
1945 	vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
1946 	vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
1947 	if (!vec.key || !vec.iv || !vec.assoc || !vec.ptext || !vec.ctext) {
1948 		err = -ENOMEM;
1949 		goto out;
1950 	}
1951 
1952 	for (i = 0; i < fuzz_iterations * 8; i++) {
1953 		generate_random_aead_testvec(generic_req, &vec,
1954 					     maxkeysize, maxdatasize,
1955 					     vec_name, sizeof(vec_name));
1956 		generate_random_testvec_config(&cfg, cfgname, sizeof(cfgname));
1957 
1958 		err = test_aead_vec_cfg(driver, ENCRYPT, &vec, vec_name, &cfg,
1959 					req, tsgls);
1960 		if (err)
1961 			goto out;
1962 		err = test_aead_vec_cfg(driver, DECRYPT, &vec, vec_name, &cfg,
1963 					req, tsgls);
1964 		if (err)
1965 			goto out;
1966 		cond_resched();
1967 	}
1968 	err = 0;
1969 out:
1970 	kfree(vec.key);
1971 	kfree(vec.iv);
1972 	kfree(vec.assoc);
1973 	kfree(vec.ptext);
1974 	kfree(vec.ctext);
1975 	crypto_free_aead(generic_tfm);
1976 	aead_request_free(generic_req);
1977 	return err;
1978 }
1979 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1980 static int test_aead_vs_generic_impl(const char *driver,
1981 				     const struct alg_test_desc *test_desc,
1982 				     struct aead_request *req,
1983 				     struct cipher_test_sglists *tsgls)
1984 {
1985 	return 0;
1986 }
1987 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1988 
1989 static int test_aead(const char *driver, int enc,
1990 		     const struct aead_test_suite *suite,
1991 		     struct aead_request *req,
1992 		     struct cipher_test_sglists *tsgls)
1993 {
1994 	unsigned int i;
1995 	int err;
1996 
1997 	for (i = 0; i < suite->count; i++) {
1998 		err = test_aead_vec(driver, enc, &suite->vecs[i], i, req,
1999 				    tsgls);
2000 		if (err)
2001 			return err;
2002 	}
2003 	return 0;
2004 }
2005 
2006 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2007 			 u32 type, u32 mask)
2008 {
2009 	const struct aead_test_suite *suite = &desc->suite.aead;
2010 	struct crypto_aead *tfm;
2011 	struct aead_request *req = NULL;
2012 	struct cipher_test_sglists *tsgls = NULL;
2013 	int err;
2014 
2015 	if (suite->count <= 0) {
2016 		pr_err("alg: aead: empty test suite for %s\n", driver);
2017 		return -EINVAL;
2018 	}
2019 
2020 	tfm = crypto_alloc_aead(driver, type, mask);
2021 	if (IS_ERR(tfm)) {
2022 		pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
2023 		       driver, PTR_ERR(tfm));
2024 		return PTR_ERR(tfm);
2025 	}
2026 
2027 	req = aead_request_alloc(tfm, GFP_KERNEL);
2028 	if (!req) {
2029 		pr_err("alg: aead: failed to allocate request for %s\n",
2030 		       driver);
2031 		err = -ENOMEM;
2032 		goto out;
2033 	}
2034 
2035 	tsgls = alloc_cipher_test_sglists();
2036 	if (!tsgls) {
2037 		pr_err("alg: aead: failed to allocate test buffers for %s\n",
2038 		       driver);
2039 		err = -ENOMEM;
2040 		goto out;
2041 	}
2042 
2043 	err = test_aead(driver, ENCRYPT, suite, req, tsgls);
2044 	if (err)
2045 		goto out;
2046 
2047 	err = test_aead(driver, DECRYPT, suite, req, tsgls);
2048 	if (err)
2049 		goto out;
2050 
2051 	err = test_aead_vs_generic_impl(driver, desc, req, tsgls);
2052 out:
2053 	free_cipher_test_sglists(tsgls);
2054 	aead_request_free(req);
2055 	crypto_free_aead(tfm);
2056 	return err;
2057 }
2058 
2059 static int test_cipher(struct crypto_cipher *tfm, int enc,
2060 		       const struct cipher_testvec *template,
2061 		       unsigned int tcount)
2062 {
2063 	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
2064 	unsigned int i, j, k;
2065 	char *q;
2066 	const char *e;
2067 	const char *input, *result;
2068 	void *data;
2069 	char *xbuf[XBUFSIZE];
2070 	int ret = -ENOMEM;
2071 
2072 	if (testmgr_alloc_buf(xbuf))
2073 		goto out_nobuf;
2074 
2075 	if (enc == ENCRYPT)
2076 	        e = "encryption";
2077 	else
2078 		e = "decryption";
2079 
2080 	j = 0;
2081 	for (i = 0; i < tcount; i++) {
2082 
2083 		if (fips_enabled && template[i].fips_skip)
2084 			continue;
2085 
2086 		input  = enc ? template[i].ptext : template[i].ctext;
2087 		result = enc ? template[i].ctext : template[i].ptext;
2088 		j++;
2089 
2090 		ret = -EINVAL;
2091 		if (WARN_ON(template[i].len > PAGE_SIZE))
2092 			goto out;
2093 
2094 		data = xbuf[0];
2095 		memcpy(data, input, template[i].len);
2096 
2097 		crypto_cipher_clear_flags(tfm, ~0);
2098 		if (template[i].wk)
2099 			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2100 
2101 		ret = crypto_cipher_setkey(tfm, template[i].key,
2102 					   template[i].klen);
2103 		if (ret) {
2104 			if (ret == template[i].setkey_error)
2105 				continue;
2106 			pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
2107 			       algo, j, template[i].setkey_error, ret,
2108 			       crypto_cipher_get_flags(tfm));
2109 			goto out;
2110 		}
2111 		if (template[i].setkey_error) {
2112 			pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
2113 			       algo, j, template[i].setkey_error);
2114 			ret = -EINVAL;
2115 			goto out;
2116 		}
2117 
2118 		for (k = 0; k < template[i].len;
2119 		     k += crypto_cipher_blocksize(tfm)) {
2120 			if (enc)
2121 				crypto_cipher_encrypt_one(tfm, data + k,
2122 							  data + k);
2123 			else
2124 				crypto_cipher_decrypt_one(tfm, data + k,
2125 							  data + k);
2126 		}
2127 
2128 		q = data;
2129 		if (memcmp(q, result, template[i].len)) {
2130 			printk(KERN_ERR "alg: cipher: Test %d failed "
2131 			       "on %s for %s\n", j, e, algo);
2132 			hexdump(q, template[i].len);
2133 			ret = -EINVAL;
2134 			goto out;
2135 		}
2136 	}
2137 
2138 	ret = 0;
2139 
2140 out:
2141 	testmgr_free_buf(xbuf);
2142 out_nobuf:
2143 	return ret;
2144 }
2145 
2146 static int test_skcipher_vec_cfg(const char *driver, int enc,
2147 				 const struct cipher_testvec *vec,
2148 				 const char *vec_name,
2149 				 const struct testvec_config *cfg,
2150 				 struct skcipher_request *req,
2151 				 struct cipher_test_sglists *tsgls)
2152 {
2153 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2154 	const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
2155 	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2156 	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
2157 	const char *op = enc ? "encryption" : "decryption";
2158 	DECLARE_CRYPTO_WAIT(wait);
2159 	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
2160 	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
2161 		 cfg->iv_offset +
2162 		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
2163 	struct kvec input;
2164 	int err;
2165 
2166 	/* Set the key */
2167 	if (vec->wk)
2168 		crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2169 	else
2170 		crypto_skcipher_clear_flags(tfm,
2171 					    CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2172 	err = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
2173 	if (err) {
2174 		if (err == vec->setkey_error)
2175 			return 0;
2176 		pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
2177 		       driver, vec_name, vec->setkey_error, err,
2178 		       crypto_skcipher_get_flags(tfm));
2179 		return err;
2180 	}
2181 	if (vec->setkey_error) {
2182 		pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
2183 		       driver, vec_name, vec->setkey_error);
2184 		return -EINVAL;
2185 	}
2186 
2187 	/* The IV must be copied to a buffer, as the algorithm may modify it */
2188 	if (ivsize) {
2189 		if (WARN_ON(ivsize > MAX_IVLEN))
2190 			return -EINVAL;
2191 		if (vec->generates_iv && !enc)
2192 			memcpy(iv, vec->iv_out, ivsize);
2193 		else if (vec->iv)
2194 			memcpy(iv, vec->iv, ivsize);
2195 		else
2196 			memset(iv, 0, ivsize);
2197 	} else {
2198 		if (vec->generates_iv) {
2199 			pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
2200 			       driver, vec_name);
2201 			return -EINVAL;
2202 		}
2203 		iv = NULL;
2204 	}
2205 
2206 	/* Build the src/dst scatterlists */
2207 	input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
2208 	input.iov_len = vec->len;
2209 	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
2210 					vec->len, vec->len, &input, 1);
2211 	if (err) {
2212 		pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2213 		       driver, op, vec_name, cfg->name);
2214 		return err;
2215 	}
2216 
2217 	/* Do the actual encryption or decryption */
2218 	testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
2219 	skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
2220 	skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
2221 				   vec->len, iv);
2222 	if (cfg->nosimd)
2223 		crypto_disable_simd_for_test();
2224 	err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
2225 	if (cfg->nosimd)
2226 		crypto_reenable_simd_for_test();
2227 	err = crypto_wait_req(err, &wait);
2228 
2229 	/* Check that the algorithm didn't overwrite things it shouldn't have */
2230 	if (req->cryptlen != vec->len ||
2231 	    req->iv != iv ||
2232 	    req->src != tsgls->src.sgl_ptr ||
2233 	    req->dst != tsgls->dst.sgl_ptr ||
2234 	    crypto_skcipher_reqtfm(req) != tfm ||
2235 	    req->base.complete != crypto_req_done ||
2236 	    req->base.flags != req_flags ||
2237 	    req->base.data != &wait) {
2238 		pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2239 		       driver, op, vec_name, cfg->name);
2240 		if (req->cryptlen != vec->len)
2241 			pr_err("alg: skcipher: changed 'req->cryptlen'\n");
2242 		if (req->iv != iv)
2243 			pr_err("alg: skcipher: changed 'req->iv'\n");
2244 		if (req->src != tsgls->src.sgl_ptr)
2245 			pr_err("alg: skcipher: changed 'req->src'\n");
2246 		if (req->dst != tsgls->dst.sgl_ptr)
2247 			pr_err("alg: skcipher: changed 'req->dst'\n");
2248 		if (crypto_skcipher_reqtfm(req) != tfm)
2249 			pr_err("alg: skcipher: changed 'req->base.tfm'\n");
2250 		if (req->base.complete != crypto_req_done)
2251 			pr_err("alg: skcipher: changed 'req->base.complete'\n");
2252 		if (req->base.flags != req_flags)
2253 			pr_err("alg: skcipher: changed 'req->base.flags'\n");
2254 		if (req->base.data != &wait)
2255 			pr_err("alg: skcipher: changed 'req->base.data'\n");
2256 		return -EINVAL;
2257 	}
2258 	if (is_test_sglist_corrupted(&tsgls->src)) {
2259 		pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2260 		       driver, op, vec_name, cfg->name);
2261 		return -EINVAL;
2262 	}
2263 	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
2264 	    is_test_sglist_corrupted(&tsgls->dst)) {
2265 		pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2266 		       driver, op, vec_name, cfg->name);
2267 		return -EINVAL;
2268 	}
2269 
2270 	/* Check for success or failure */
2271 	if (err) {
2272 		if (err == vec->crypt_error)
2273 			return 0;
2274 		pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
2275 		       driver, op, vec_name, vec->crypt_error, err, cfg->name);
2276 		return err;
2277 	}
2278 	if (vec->crypt_error) {
2279 		pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
2280 		       driver, op, vec_name, vec->crypt_error, cfg->name);
2281 		return -EINVAL;
2282 	}
2283 
2284 	/* Check for the correct output (ciphertext or plaintext) */
2285 	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
2286 				    vec->len, 0, true);
2287 	if (err == -EOVERFLOW) {
2288 		pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2289 		       driver, op, vec_name, cfg->name);
2290 		return err;
2291 	}
2292 	if (err) {
2293 		pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2294 		       driver, op, vec_name, cfg->name);
2295 		return err;
2296 	}
2297 
2298 	/* If applicable, check that the algorithm generated the correct IV */
2299 	if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
2300 		pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
2301 		       driver, op, vec_name, cfg->name);
2302 		hexdump(iv, ivsize);
2303 		return -EINVAL;
2304 	}
2305 
2306 	return 0;
2307 }
2308 
2309 static int test_skcipher_vec(const char *driver, int enc,
2310 			     const struct cipher_testvec *vec,
2311 			     unsigned int vec_num,
2312 			     struct skcipher_request *req,
2313 			     struct cipher_test_sglists *tsgls)
2314 {
2315 	char vec_name[16];
2316 	unsigned int i;
2317 	int err;
2318 
2319 	if (fips_enabled && vec->fips_skip)
2320 		return 0;
2321 
2322 	sprintf(vec_name, "%u", vec_num);
2323 
2324 	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
2325 		err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2326 					    &default_cipher_testvec_configs[i],
2327 					    req, tsgls);
2328 		if (err)
2329 			return err;
2330 	}
2331 
2332 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2333 	if (!noextratests) {
2334 		struct testvec_config cfg;
2335 		char cfgname[TESTVEC_CONFIG_NAMELEN];
2336 
2337 		for (i = 0; i < fuzz_iterations; i++) {
2338 			generate_random_testvec_config(&cfg, cfgname,
2339 						       sizeof(cfgname));
2340 			err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2341 						    &cfg, req, tsgls);
2342 			if (err)
2343 				return err;
2344 		}
2345 	}
2346 #endif
2347 	return 0;
2348 }
2349 
2350 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2351 /*
2352  * Generate a symmetric cipher test vector from the given implementation.
2353  * Assumes the buffers in 'vec' were already allocated.
2354  */
2355 static void generate_random_cipher_testvec(struct skcipher_request *req,
2356 					   struct cipher_testvec *vec,
2357 					   unsigned int maxdatasize,
2358 					   char *name, size_t max_namelen)
2359 {
2360 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2361 	const unsigned int maxkeysize = tfm->keysize;
2362 	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2363 	struct scatterlist src, dst;
2364 	u8 iv[MAX_IVLEN];
2365 	DECLARE_CRYPTO_WAIT(wait);
2366 
2367 	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2368 	vec->klen = maxkeysize;
2369 	if (prandom_u32() % 4 == 0)
2370 		vec->klen = prandom_u32() % (maxkeysize + 1);
2371 	generate_random_bytes((u8 *)vec->key, vec->klen);
2372 	vec->setkey_error = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
2373 
2374 	/* IV */
2375 	generate_random_bytes((u8 *)vec->iv, ivsize);
2376 
2377 	/* Plaintext */
2378 	vec->len = generate_random_length(maxdatasize);
2379 	generate_random_bytes((u8 *)vec->ptext, vec->len);
2380 
2381 	/* If the key couldn't be set, no need to continue to encrypt. */
2382 	if (vec->setkey_error)
2383 		goto done;
2384 
2385 	/* Ciphertext */
2386 	sg_init_one(&src, vec->ptext, vec->len);
2387 	sg_init_one(&dst, vec->ctext, vec->len);
2388 	memcpy(iv, vec->iv, ivsize);
2389 	skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
2390 	skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
2391 	vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
2392 done:
2393 	snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
2394 		 vec->len, vec->klen);
2395 }
2396 
2397 /*
2398  * Test the skcipher algorithm represented by @req against the corresponding
2399  * generic implementation, if one is available.
2400  */
2401 static int test_skcipher_vs_generic_impl(const char *driver,
2402 					 const char *generic_driver,
2403 					 struct skcipher_request *req,
2404 					 struct cipher_test_sglists *tsgls)
2405 {
2406 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2407 	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2408 	const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
2409 	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
2410 	const char *algname = crypto_skcipher_alg(tfm)->base.cra_name;
2411 	char _generic_driver[CRYPTO_MAX_ALG_NAME];
2412 	struct crypto_skcipher *generic_tfm = NULL;
2413 	struct skcipher_request *generic_req = NULL;
2414 	unsigned int i;
2415 	struct cipher_testvec vec = { 0 };
2416 	char vec_name[64];
2417 	struct testvec_config cfg;
2418 	char cfgname[TESTVEC_CONFIG_NAMELEN];
2419 	int err;
2420 
2421 	if (noextratests)
2422 		return 0;
2423 
2424 	/* Keywrap isn't supported here yet as it handles its IV differently. */
2425 	if (strncmp(algname, "kw(", 3) == 0)
2426 		return 0;
2427 
2428 	if (!generic_driver) { /* Use default naming convention? */
2429 		err = build_generic_driver_name(algname, _generic_driver);
2430 		if (err)
2431 			return err;
2432 		generic_driver = _generic_driver;
2433 	}
2434 
2435 	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
2436 		return 0;
2437 
2438 	generic_tfm = crypto_alloc_skcipher(generic_driver, 0, 0);
2439 	if (IS_ERR(generic_tfm)) {
2440 		err = PTR_ERR(generic_tfm);
2441 		if (err == -ENOENT) {
2442 			pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
2443 				driver, generic_driver);
2444 			return 0;
2445 		}
2446 		pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
2447 		       generic_driver, algname, err);
2448 		return err;
2449 	}
2450 
2451 	generic_req = skcipher_request_alloc(generic_tfm, GFP_KERNEL);
2452 	if (!generic_req) {
2453 		err = -ENOMEM;
2454 		goto out;
2455 	}
2456 
2457 	/* Check the algorithm properties for consistency. */
2458 
2459 	if (tfm->keysize != generic_tfm->keysize) {
2460 		pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
2461 		       driver, tfm->keysize, generic_tfm->keysize);
2462 		err = -EINVAL;
2463 		goto out;
2464 	}
2465 
2466 	if (ivsize != crypto_skcipher_ivsize(generic_tfm)) {
2467 		pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
2468 		       driver, ivsize, crypto_skcipher_ivsize(generic_tfm));
2469 		err = -EINVAL;
2470 		goto out;
2471 	}
2472 
2473 	if (blocksize != crypto_skcipher_blocksize(generic_tfm)) {
2474 		pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
2475 		       driver, blocksize,
2476 		       crypto_skcipher_blocksize(generic_tfm));
2477 		err = -EINVAL;
2478 		goto out;
2479 	}
2480 
2481 	/*
2482 	 * Now generate test vectors using the generic implementation, and test
2483 	 * the other implementation against them.
2484 	 */
2485 
2486 	vec.key = kmalloc(tfm->keysize, GFP_KERNEL);
2487 	vec.iv = kmalloc(ivsize, GFP_KERNEL);
2488 	vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
2489 	vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
2490 	if (!vec.key || !vec.iv || !vec.ptext || !vec.ctext) {
2491 		err = -ENOMEM;
2492 		goto out;
2493 	}
2494 
2495 	for (i = 0; i < fuzz_iterations * 8; i++) {
2496 		generate_random_cipher_testvec(generic_req, &vec, maxdatasize,
2497 					       vec_name, sizeof(vec_name));
2498 		generate_random_testvec_config(&cfg, cfgname, sizeof(cfgname));
2499 
2500 		err = test_skcipher_vec_cfg(driver, ENCRYPT, &vec, vec_name,
2501 					    &cfg, req, tsgls);
2502 		if (err)
2503 			goto out;
2504 		err = test_skcipher_vec_cfg(driver, DECRYPT, &vec, vec_name,
2505 					    &cfg, req, tsgls);
2506 		if (err)
2507 			goto out;
2508 		cond_resched();
2509 	}
2510 	err = 0;
2511 out:
2512 	kfree(vec.key);
2513 	kfree(vec.iv);
2514 	kfree(vec.ptext);
2515 	kfree(vec.ctext);
2516 	crypto_free_skcipher(generic_tfm);
2517 	skcipher_request_free(generic_req);
2518 	return err;
2519 }
2520 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2521 static int test_skcipher_vs_generic_impl(const char *driver,
2522 					 const char *generic_driver,
2523 					 struct skcipher_request *req,
2524 					 struct cipher_test_sglists *tsgls)
2525 {
2526 	return 0;
2527 }
2528 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2529 
2530 static int test_skcipher(const char *driver, int enc,
2531 			 const struct cipher_test_suite *suite,
2532 			 struct skcipher_request *req,
2533 			 struct cipher_test_sglists *tsgls)
2534 {
2535 	unsigned int i;
2536 	int err;
2537 
2538 	for (i = 0; i < suite->count; i++) {
2539 		err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req,
2540 					tsgls);
2541 		if (err)
2542 			return err;
2543 	}
2544 	return 0;
2545 }
2546 
2547 static int alg_test_skcipher(const struct alg_test_desc *desc,
2548 			     const char *driver, u32 type, u32 mask)
2549 {
2550 	const struct cipher_test_suite *suite = &desc->suite.cipher;
2551 	struct crypto_skcipher *tfm;
2552 	struct skcipher_request *req = NULL;
2553 	struct cipher_test_sglists *tsgls = NULL;
2554 	int err;
2555 
2556 	if (suite->count <= 0) {
2557 		pr_err("alg: skcipher: empty test suite for %s\n", driver);
2558 		return -EINVAL;
2559 	}
2560 
2561 	tfm = crypto_alloc_skcipher(driver, type, mask);
2562 	if (IS_ERR(tfm)) {
2563 		pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
2564 		       driver, PTR_ERR(tfm));
2565 		return PTR_ERR(tfm);
2566 	}
2567 
2568 	req = skcipher_request_alloc(tfm, GFP_KERNEL);
2569 	if (!req) {
2570 		pr_err("alg: skcipher: failed to allocate request for %s\n",
2571 		       driver);
2572 		err = -ENOMEM;
2573 		goto out;
2574 	}
2575 
2576 	tsgls = alloc_cipher_test_sglists();
2577 	if (!tsgls) {
2578 		pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
2579 		       driver);
2580 		err = -ENOMEM;
2581 		goto out;
2582 	}
2583 
2584 	err = test_skcipher(driver, ENCRYPT, suite, req, tsgls);
2585 	if (err)
2586 		goto out;
2587 
2588 	err = test_skcipher(driver, DECRYPT, suite, req, tsgls);
2589 	if (err)
2590 		goto out;
2591 
2592 	err = test_skcipher_vs_generic_impl(driver, desc->generic_driver, req,
2593 					    tsgls);
2594 out:
2595 	free_cipher_test_sglists(tsgls);
2596 	skcipher_request_free(req);
2597 	crypto_free_skcipher(tfm);
2598 	return err;
2599 }
2600 
2601 static int test_comp(struct crypto_comp *tfm,
2602 		     const struct comp_testvec *ctemplate,
2603 		     const struct comp_testvec *dtemplate,
2604 		     int ctcount, int dtcount)
2605 {
2606 	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
2607 	char *output, *decomp_output;
2608 	unsigned int i;
2609 	int ret;
2610 
2611 	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2612 	if (!output)
2613 		return -ENOMEM;
2614 
2615 	decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2616 	if (!decomp_output) {
2617 		kfree(output);
2618 		return -ENOMEM;
2619 	}
2620 
2621 	for (i = 0; i < ctcount; i++) {
2622 		int ilen;
2623 		unsigned int dlen = COMP_BUF_SIZE;
2624 
2625 		memset(output, 0, COMP_BUF_SIZE);
2626 		memset(decomp_output, 0, COMP_BUF_SIZE);
2627 
2628 		ilen = ctemplate[i].inlen;
2629 		ret = crypto_comp_compress(tfm, ctemplate[i].input,
2630 					   ilen, output, &dlen);
2631 		if (ret) {
2632 			printk(KERN_ERR "alg: comp: compression failed "
2633 			       "on test %d for %s: ret=%d\n", i + 1, algo,
2634 			       -ret);
2635 			goto out;
2636 		}
2637 
2638 		ilen = dlen;
2639 		dlen = COMP_BUF_SIZE;
2640 		ret = crypto_comp_decompress(tfm, output,
2641 					     ilen, decomp_output, &dlen);
2642 		if (ret) {
2643 			pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
2644 			       i + 1, algo, -ret);
2645 			goto out;
2646 		}
2647 
2648 		if (dlen != ctemplate[i].inlen) {
2649 			printk(KERN_ERR "alg: comp: Compression test %d "
2650 			       "failed for %s: output len = %d\n", i + 1, algo,
2651 			       dlen);
2652 			ret = -EINVAL;
2653 			goto out;
2654 		}
2655 
2656 		if (memcmp(decomp_output, ctemplate[i].input,
2657 			   ctemplate[i].inlen)) {
2658 			pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
2659 			       i + 1, algo);
2660 			hexdump(decomp_output, dlen);
2661 			ret = -EINVAL;
2662 			goto out;
2663 		}
2664 	}
2665 
2666 	for (i = 0; i < dtcount; i++) {
2667 		int ilen;
2668 		unsigned int dlen = COMP_BUF_SIZE;
2669 
2670 		memset(decomp_output, 0, COMP_BUF_SIZE);
2671 
2672 		ilen = dtemplate[i].inlen;
2673 		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
2674 					     ilen, decomp_output, &dlen);
2675 		if (ret) {
2676 			printk(KERN_ERR "alg: comp: decompression failed "
2677 			       "on test %d for %s: ret=%d\n", i + 1, algo,
2678 			       -ret);
2679 			goto out;
2680 		}
2681 
2682 		if (dlen != dtemplate[i].outlen) {
2683 			printk(KERN_ERR "alg: comp: Decompression test %d "
2684 			       "failed for %s: output len = %d\n", i + 1, algo,
2685 			       dlen);
2686 			ret = -EINVAL;
2687 			goto out;
2688 		}
2689 
2690 		if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
2691 			printk(KERN_ERR "alg: comp: Decompression test %d "
2692 			       "failed for %s\n", i + 1, algo);
2693 			hexdump(decomp_output, dlen);
2694 			ret = -EINVAL;
2695 			goto out;
2696 		}
2697 	}
2698 
2699 	ret = 0;
2700 
2701 out:
2702 	kfree(decomp_output);
2703 	kfree(output);
2704 	return ret;
2705 }
2706 
2707 static int test_acomp(struct crypto_acomp *tfm,
2708 			      const struct comp_testvec *ctemplate,
2709 		      const struct comp_testvec *dtemplate,
2710 		      int ctcount, int dtcount)
2711 {
2712 	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
2713 	unsigned int i;
2714 	char *output, *decomp_out;
2715 	int ret;
2716 	struct scatterlist src, dst;
2717 	struct acomp_req *req;
2718 	struct crypto_wait wait;
2719 
2720 	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2721 	if (!output)
2722 		return -ENOMEM;
2723 
2724 	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2725 	if (!decomp_out) {
2726 		kfree(output);
2727 		return -ENOMEM;
2728 	}
2729 
2730 	for (i = 0; i < ctcount; i++) {
2731 		unsigned int dlen = COMP_BUF_SIZE;
2732 		int ilen = ctemplate[i].inlen;
2733 		void *input_vec;
2734 
2735 		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
2736 		if (!input_vec) {
2737 			ret = -ENOMEM;
2738 			goto out;
2739 		}
2740 
2741 		memset(output, 0, dlen);
2742 		crypto_init_wait(&wait);
2743 		sg_init_one(&src, input_vec, ilen);
2744 		sg_init_one(&dst, output, dlen);
2745 
2746 		req = acomp_request_alloc(tfm);
2747 		if (!req) {
2748 			pr_err("alg: acomp: request alloc failed for %s\n",
2749 			       algo);
2750 			kfree(input_vec);
2751 			ret = -ENOMEM;
2752 			goto out;
2753 		}
2754 
2755 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
2756 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2757 					   crypto_req_done, &wait);
2758 
2759 		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
2760 		if (ret) {
2761 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2762 			       i + 1, algo, -ret);
2763 			kfree(input_vec);
2764 			acomp_request_free(req);
2765 			goto out;
2766 		}
2767 
2768 		ilen = req->dlen;
2769 		dlen = COMP_BUF_SIZE;
2770 		sg_init_one(&src, output, ilen);
2771 		sg_init_one(&dst, decomp_out, dlen);
2772 		crypto_init_wait(&wait);
2773 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
2774 
2775 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2776 		if (ret) {
2777 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
2778 			       i + 1, algo, -ret);
2779 			kfree(input_vec);
2780 			acomp_request_free(req);
2781 			goto out;
2782 		}
2783 
2784 		if (req->dlen != ctemplate[i].inlen) {
2785 			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
2786 			       i + 1, algo, req->dlen);
2787 			ret = -EINVAL;
2788 			kfree(input_vec);
2789 			acomp_request_free(req);
2790 			goto out;
2791 		}
2792 
2793 		if (memcmp(input_vec, decomp_out, req->dlen)) {
2794 			pr_err("alg: acomp: Compression test %d failed for %s\n",
2795 			       i + 1, algo);
2796 			hexdump(output, req->dlen);
2797 			ret = -EINVAL;
2798 			kfree(input_vec);
2799 			acomp_request_free(req);
2800 			goto out;
2801 		}
2802 
2803 		kfree(input_vec);
2804 		acomp_request_free(req);
2805 	}
2806 
2807 	for (i = 0; i < dtcount; i++) {
2808 		unsigned int dlen = COMP_BUF_SIZE;
2809 		int ilen = dtemplate[i].inlen;
2810 		void *input_vec;
2811 
2812 		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
2813 		if (!input_vec) {
2814 			ret = -ENOMEM;
2815 			goto out;
2816 		}
2817 
2818 		memset(output, 0, dlen);
2819 		crypto_init_wait(&wait);
2820 		sg_init_one(&src, input_vec, ilen);
2821 		sg_init_one(&dst, output, dlen);
2822 
2823 		req = acomp_request_alloc(tfm);
2824 		if (!req) {
2825 			pr_err("alg: acomp: request alloc failed for %s\n",
2826 			       algo);
2827 			kfree(input_vec);
2828 			ret = -ENOMEM;
2829 			goto out;
2830 		}
2831 
2832 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
2833 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2834 					   crypto_req_done, &wait);
2835 
2836 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
2837 		if (ret) {
2838 			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
2839 			       i + 1, algo, -ret);
2840 			kfree(input_vec);
2841 			acomp_request_free(req);
2842 			goto out;
2843 		}
2844 
2845 		if (req->dlen != dtemplate[i].outlen) {
2846 			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
2847 			       i + 1, algo, req->dlen);
2848 			ret = -EINVAL;
2849 			kfree(input_vec);
2850 			acomp_request_free(req);
2851 			goto out;
2852 		}
2853 
2854 		if (memcmp(output, dtemplate[i].output, req->dlen)) {
2855 			pr_err("alg: acomp: Decompression test %d failed for %s\n",
2856 			       i + 1, algo);
2857 			hexdump(output, req->dlen);
2858 			ret = -EINVAL;
2859 			kfree(input_vec);
2860 			acomp_request_free(req);
2861 			goto out;
2862 		}
2863 
2864 		kfree(input_vec);
2865 		acomp_request_free(req);
2866 	}
2867 
2868 	ret = 0;
2869 
2870 out:
2871 	kfree(decomp_out);
2872 	kfree(output);
2873 	return ret;
2874 }
2875 
2876 static int test_cprng(struct crypto_rng *tfm,
2877 		      const struct cprng_testvec *template,
2878 		      unsigned int tcount)
2879 {
2880 	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
2881 	int err = 0, i, j, seedsize;
2882 	u8 *seed;
2883 	char result[32];
2884 
2885 	seedsize = crypto_rng_seedsize(tfm);
2886 
2887 	seed = kmalloc(seedsize, GFP_KERNEL);
2888 	if (!seed) {
2889 		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
2890 		       "for %s\n", algo);
2891 		return -ENOMEM;
2892 	}
2893 
2894 	for (i = 0; i < tcount; i++) {
2895 		memset(result, 0, 32);
2896 
2897 		memcpy(seed, template[i].v, template[i].vlen);
2898 		memcpy(seed + template[i].vlen, template[i].key,
2899 		       template[i].klen);
2900 		memcpy(seed + template[i].vlen + template[i].klen,
2901 		       template[i].dt, template[i].dtlen);
2902 
2903 		err = crypto_rng_reset(tfm, seed, seedsize);
2904 		if (err) {
2905 			printk(KERN_ERR "alg: cprng: Failed to reset rng "
2906 			       "for %s\n", algo);
2907 			goto out;
2908 		}
2909 
2910 		for (j = 0; j < template[i].loops; j++) {
2911 			err = crypto_rng_get_bytes(tfm, result,
2912 						   template[i].rlen);
2913 			if (err < 0) {
2914 				printk(KERN_ERR "alg: cprng: Failed to obtain "
2915 				       "the correct amount of random data for "
2916 				       "%s (requested %d)\n", algo,
2917 				       template[i].rlen);
2918 				goto out;
2919 			}
2920 		}
2921 
2922 		err = memcmp(result, template[i].result,
2923 			     template[i].rlen);
2924 		if (err) {
2925 			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
2926 			       i, algo);
2927 			hexdump(result, template[i].rlen);
2928 			err = -EINVAL;
2929 			goto out;
2930 		}
2931 	}
2932 
2933 out:
2934 	kfree(seed);
2935 	return err;
2936 }
2937 
2938 static int alg_test_cipher(const struct alg_test_desc *desc,
2939 			   const char *driver, u32 type, u32 mask)
2940 {
2941 	const struct cipher_test_suite *suite = &desc->suite.cipher;
2942 	struct crypto_cipher *tfm;
2943 	int err;
2944 
2945 	tfm = crypto_alloc_cipher(driver, type, mask);
2946 	if (IS_ERR(tfm)) {
2947 		printk(KERN_ERR "alg: cipher: Failed to load transform for "
2948 		       "%s: %ld\n", driver, PTR_ERR(tfm));
2949 		return PTR_ERR(tfm);
2950 	}
2951 
2952 	err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
2953 	if (!err)
2954 		err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
2955 
2956 	crypto_free_cipher(tfm);
2957 	return err;
2958 }
2959 
2960 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
2961 			 u32 type, u32 mask)
2962 {
2963 	struct crypto_comp *comp;
2964 	struct crypto_acomp *acomp;
2965 	int err;
2966 	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
2967 
2968 	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
2969 		acomp = crypto_alloc_acomp(driver, type, mask);
2970 		if (IS_ERR(acomp)) {
2971 			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
2972 			       driver, PTR_ERR(acomp));
2973 			return PTR_ERR(acomp);
2974 		}
2975 		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
2976 				 desc->suite.comp.decomp.vecs,
2977 				 desc->suite.comp.comp.count,
2978 				 desc->suite.comp.decomp.count);
2979 		crypto_free_acomp(acomp);
2980 	} else {
2981 		comp = crypto_alloc_comp(driver, type, mask);
2982 		if (IS_ERR(comp)) {
2983 			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
2984 			       driver, PTR_ERR(comp));
2985 			return PTR_ERR(comp);
2986 		}
2987 
2988 		err = test_comp(comp, desc->suite.comp.comp.vecs,
2989 				desc->suite.comp.decomp.vecs,
2990 				desc->suite.comp.comp.count,
2991 				desc->suite.comp.decomp.count);
2992 
2993 		crypto_free_comp(comp);
2994 	}
2995 	return err;
2996 }
2997 
2998 static int alg_test_crc32c(const struct alg_test_desc *desc,
2999 			   const char *driver, u32 type, u32 mask)
3000 {
3001 	struct crypto_shash *tfm;
3002 	__le32 val;
3003 	int err;
3004 
3005 	err = alg_test_hash(desc, driver, type, mask);
3006 	if (err)
3007 		return err;
3008 
3009 	tfm = crypto_alloc_shash(driver, type, mask);
3010 	if (IS_ERR(tfm)) {
3011 		if (PTR_ERR(tfm) == -ENOENT) {
3012 			/*
3013 			 * This crc32c implementation is only available through
3014 			 * ahash API, not the shash API, so the remaining part
3015 			 * of the test is not applicable to it.
3016 			 */
3017 			return 0;
3018 		}
3019 		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
3020 		       "%ld\n", driver, PTR_ERR(tfm));
3021 		return PTR_ERR(tfm);
3022 	}
3023 
3024 	do {
3025 		SHASH_DESC_ON_STACK(shash, tfm);
3026 		u32 *ctx = (u32 *)shash_desc_ctx(shash);
3027 
3028 		shash->tfm = tfm;
3029 
3030 		*ctx = 420553207;
3031 		err = crypto_shash_final(shash, (u8 *)&val);
3032 		if (err) {
3033 			printk(KERN_ERR "alg: crc32c: Operation failed for "
3034 			       "%s: %d\n", driver, err);
3035 			break;
3036 		}
3037 
3038 		if (val != cpu_to_le32(~420553207)) {
3039 			pr_err("alg: crc32c: Test failed for %s: %u\n",
3040 			       driver, le32_to_cpu(val));
3041 			err = -EINVAL;
3042 		}
3043 	} while (0);
3044 
3045 	crypto_free_shash(tfm);
3046 
3047 	return err;
3048 }
3049 
3050 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
3051 			  u32 type, u32 mask)
3052 {
3053 	struct crypto_rng *rng;
3054 	int err;
3055 
3056 	rng = crypto_alloc_rng(driver, type, mask);
3057 	if (IS_ERR(rng)) {
3058 		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
3059 		       "%ld\n", driver, PTR_ERR(rng));
3060 		return PTR_ERR(rng);
3061 	}
3062 
3063 	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
3064 
3065 	crypto_free_rng(rng);
3066 
3067 	return err;
3068 }
3069 
3070 
3071 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
3072 			  const char *driver, u32 type, u32 mask)
3073 {
3074 	int ret = -EAGAIN;
3075 	struct crypto_rng *drng;
3076 	struct drbg_test_data test_data;
3077 	struct drbg_string addtl, pers, testentropy;
3078 	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
3079 
3080 	if (!buf)
3081 		return -ENOMEM;
3082 
3083 	drng = crypto_alloc_rng(driver, type, mask);
3084 	if (IS_ERR(drng)) {
3085 		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
3086 		       "%s\n", driver);
3087 		kzfree(buf);
3088 		return -ENOMEM;
3089 	}
3090 
3091 	test_data.testentropy = &testentropy;
3092 	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
3093 	drbg_string_fill(&pers, test->pers, test->perslen);
3094 	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
3095 	if (ret) {
3096 		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
3097 		goto outbuf;
3098 	}
3099 
3100 	drbg_string_fill(&addtl, test->addtla, test->addtllen);
3101 	if (pr) {
3102 		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
3103 		ret = crypto_drbg_get_bytes_addtl_test(drng,
3104 			buf, test->expectedlen, &addtl,	&test_data);
3105 	} else {
3106 		ret = crypto_drbg_get_bytes_addtl(drng,
3107 			buf, test->expectedlen, &addtl);
3108 	}
3109 	if (ret < 0) {
3110 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3111 		       "driver %s\n", driver);
3112 		goto outbuf;
3113 	}
3114 
3115 	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
3116 	if (pr) {
3117 		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
3118 		ret = crypto_drbg_get_bytes_addtl_test(drng,
3119 			buf, test->expectedlen, &addtl, &test_data);
3120 	} else {
3121 		ret = crypto_drbg_get_bytes_addtl(drng,
3122 			buf, test->expectedlen, &addtl);
3123 	}
3124 	if (ret < 0) {
3125 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3126 		       "driver %s\n", driver);
3127 		goto outbuf;
3128 	}
3129 
3130 	ret = memcmp(test->expected, buf, test->expectedlen);
3131 
3132 outbuf:
3133 	crypto_free_rng(drng);
3134 	kzfree(buf);
3135 	return ret;
3136 }
3137 
3138 
3139 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
3140 			 u32 type, u32 mask)
3141 {
3142 	int err = 0;
3143 	int pr = 0;
3144 	int i = 0;
3145 	const struct drbg_testvec *template = desc->suite.drbg.vecs;
3146 	unsigned int tcount = desc->suite.drbg.count;
3147 
3148 	if (0 == memcmp(driver, "drbg_pr_", 8))
3149 		pr = 1;
3150 
3151 	for (i = 0; i < tcount; i++) {
3152 		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
3153 		if (err) {
3154 			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
3155 			       i, driver);
3156 			err = -EINVAL;
3157 			break;
3158 		}
3159 	}
3160 	return err;
3161 
3162 }
3163 
3164 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
3165 		       const char *alg)
3166 {
3167 	struct kpp_request *req;
3168 	void *input_buf = NULL;
3169 	void *output_buf = NULL;
3170 	void *a_public = NULL;
3171 	void *a_ss = NULL;
3172 	void *shared_secret = NULL;
3173 	struct crypto_wait wait;
3174 	unsigned int out_len_max;
3175 	int err = -ENOMEM;
3176 	struct scatterlist src, dst;
3177 
3178 	req = kpp_request_alloc(tfm, GFP_KERNEL);
3179 	if (!req)
3180 		return err;
3181 
3182 	crypto_init_wait(&wait);
3183 
3184 	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
3185 	if (err < 0)
3186 		goto free_req;
3187 
3188 	out_len_max = crypto_kpp_maxsize(tfm);
3189 	output_buf = kzalloc(out_len_max, GFP_KERNEL);
3190 	if (!output_buf) {
3191 		err = -ENOMEM;
3192 		goto free_req;
3193 	}
3194 
3195 	/* Use appropriate parameter as base */
3196 	kpp_request_set_input(req, NULL, 0);
3197 	sg_init_one(&dst, output_buf, out_len_max);
3198 	kpp_request_set_output(req, &dst, out_len_max);
3199 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3200 				 crypto_req_done, &wait);
3201 
3202 	/* Compute party A's public key */
3203 	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
3204 	if (err) {
3205 		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
3206 		       alg, err);
3207 		goto free_output;
3208 	}
3209 
3210 	if (vec->genkey) {
3211 		/* Save party A's public key */
3212 		a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
3213 		if (!a_public) {
3214 			err = -ENOMEM;
3215 			goto free_output;
3216 		}
3217 	} else {
3218 		/* Verify calculated public key */
3219 		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
3220 			   vec->expected_a_public_size)) {
3221 			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
3222 			       alg);
3223 			err = -EINVAL;
3224 			goto free_output;
3225 		}
3226 	}
3227 
3228 	/* Calculate shared secret key by using counter part (b) public key. */
3229 	input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
3230 	if (!input_buf) {
3231 		err = -ENOMEM;
3232 		goto free_output;
3233 	}
3234 
3235 	sg_init_one(&src, input_buf, vec->b_public_size);
3236 	sg_init_one(&dst, output_buf, out_len_max);
3237 	kpp_request_set_input(req, &src, vec->b_public_size);
3238 	kpp_request_set_output(req, &dst, out_len_max);
3239 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3240 				 crypto_req_done, &wait);
3241 	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
3242 	if (err) {
3243 		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
3244 		       alg, err);
3245 		goto free_all;
3246 	}
3247 
3248 	if (vec->genkey) {
3249 		/* Save the shared secret obtained by party A */
3250 		a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
3251 		if (!a_ss) {
3252 			err = -ENOMEM;
3253 			goto free_all;
3254 		}
3255 
3256 		/*
3257 		 * Calculate party B's shared secret by using party A's
3258 		 * public key.
3259 		 */
3260 		err = crypto_kpp_set_secret(tfm, vec->b_secret,
3261 					    vec->b_secret_size);
3262 		if (err < 0)
3263 			goto free_all;
3264 
3265 		sg_init_one(&src, a_public, vec->expected_a_public_size);
3266 		sg_init_one(&dst, output_buf, out_len_max);
3267 		kpp_request_set_input(req, &src, vec->expected_a_public_size);
3268 		kpp_request_set_output(req, &dst, out_len_max);
3269 		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3270 					 crypto_req_done, &wait);
3271 		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
3272 				      &wait);
3273 		if (err) {
3274 			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
3275 			       alg, err);
3276 			goto free_all;
3277 		}
3278 
3279 		shared_secret = a_ss;
3280 	} else {
3281 		shared_secret = (void *)vec->expected_ss;
3282 	}
3283 
3284 	/*
3285 	 * verify shared secret from which the user will derive
3286 	 * secret key by executing whatever hash it has chosen
3287 	 */
3288 	if (memcmp(shared_secret, sg_virt(req->dst),
3289 		   vec->expected_ss_size)) {
3290 		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
3291 		       alg);
3292 		err = -EINVAL;
3293 	}
3294 
3295 free_all:
3296 	kfree(a_ss);
3297 	kfree(input_buf);
3298 free_output:
3299 	kfree(a_public);
3300 	kfree(output_buf);
3301 free_req:
3302 	kpp_request_free(req);
3303 	return err;
3304 }
3305 
3306 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
3307 		    const struct kpp_testvec *vecs, unsigned int tcount)
3308 {
3309 	int ret, i;
3310 
3311 	for (i = 0; i < tcount; i++) {
3312 		ret = do_test_kpp(tfm, vecs++, alg);
3313 		if (ret) {
3314 			pr_err("alg: %s: test failed on vector %d, err=%d\n",
3315 			       alg, i + 1, ret);
3316 			return ret;
3317 		}
3318 	}
3319 	return 0;
3320 }
3321 
3322 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
3323 			u32 type, u32 mask)
3324 {
3325 	struct crypto_kpp *tfm;
3326 	int err = 0;
3327 
3328 	tfm = crypto_alloc_kpp(driver, type, mask);
3329 	if (IS_ERR(tfm)) {
3330 		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
3331 		       driver, PTR_ERR(tfm));
3332 		return PTR_ERR(tfm);
3333 	}
3334 	if (desc->suite.kpp.vecs)
3335 		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
3336 			       desc->suite.kpp.count);
3337 
3338 	crypto_free_kpp(tfm);
3339 	return err;
3340 }
3341 
3342 static u8 *test_pack_u32(u8 *dst, u32 val)
3343 {
3344 	memcpy(dst, &val, sizeof(val));
3345 	return dst + sizeof(val);
3346 }
3347 
3348 static int test_akcipher_one(struct crypto_akcipher *tfm,
3349 			     const struct akcipher_testvec *vecs)
3350 {
3351 	char *xbuf[XBUFSIZE];
3352 	struct akcipher_request *req;
3353 	void *outbuf_enc = NULL;
3354 	void *outbuf_dec = NULL;
3355 	struct crypto_wait wait;
3356 	unsigned int out_len_max, out_len = 0;
3357 	int err = -ENOMEM;
3358 	struct scatterlist src, dst, src_tab[3];
3359 	const char *m, *c;
3360 	unsigned int m_size, c_size;
3361 	const char *op;
3362 	u8 *key, *ptr;
3363 
3364 	if (testmgr_alloc_buf(xbuf))
3365 		return err;
3366 
3367 	req = akcipher_request_alloc(tfm, GFP_KERNEL);
3368 	if (!req)
3369 		goto free_xbuf;
3370 
3371 	crypto_init_wait(&wait);
3372 
3373 	key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
3374 		      GFP_KERNEL);
3375 	if (!key)
3376 		goto free_xbuf;
3377 	memcpy(key, vecs->key, vecs->key_len);
3378 	ptr = key + vecs->key_len;
3379 	ptr = test_pack_u32(ptr, vecs->algo);
3380 	ptr = test_pack_u32(ptr, vecs->param_len);
3381 	memcpy(ptr, vecs->params, vecs->param_len);
3382 
3383 	if (vecs->public_key_vec)
3384 		err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
3385 	else
3386 		err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
3387 	if (err)
3388 		goto free_req;
3389 
3390 	/*
3391 	 * First run test which do not require a private key, such as
3392 	 * encrypt or verify.
3393 	 */
3394 	err = -ENOMEM;
3395 	out_len_max = crypto_akcipher_maxsize(tfm);
3396 	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
3397 	if (!outbuf_enc)
3398 		goto free_req;
3399 
3400 	if (!vecs->siggen_sigver_test) {
3401 		m = vecs->m;
3402 		m_size = vecs->m_size;
3403 		c = vecs->c;
3404 		c_size = vecs->c_size;
3405 		op = "encrypt";
3406 	} else {
3407 		/* Swap args so we could keep plaintext (digest)
3408 		 * in vecs->m, and cooked signature in vecs->c.
3409 		 */
3410 		m = vecs->c; /* signature */
3411 		m_size = vecs->c_size;
3412 		c = vecs->m; /* digest */
3413 		c_size = vecs->m_size;
3414 		op = "verify";
3415 	}
3416 
3417 	if (WARN_ON(m_size > PAGE_SIZE))
3418 		goto free_all;
3419 	memcpy(xbuf[0], m, m_size);
3420 
3421 	sg_init_table(src_tab, 3);
3422 	sg_set_buf(&src_tab[0], xbuf[0], 8);
3423 	sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
3424 	if (vecs->siggen_sigver_test) {
3425 		if (WARN_ON(c_size > PAGE_SIZE))
3426 			goto free_all;
3427 		memcpy(xbuf[1], c, c_size);
3428 		sg_set_buf(&src_tab[2], xbuf[1], c_size);
3429 		akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
3430 	} else {
3431 		sg_init_one(&dst, outbuf_enc, out_len_max);
3432 		akcipher_request_set_crypt(req, src_tab, &dst, m_size,
3433 					   out_len_max);
3434 	}
3435 	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3436 				      crypto_req_done, &wait);
3437 
3438 	err = crypto_wait_req(vecs->siggen_sigver_test ?
3439 			      /* Run asymmetric signature verification */
3440 			      crypto_akcipher_verify(req) :
3441 			      /* Run asymmetric encrypt */
3442 			      crypto_akcipher_encrypt(req), &wait);
3443 	if (err) {
3444 		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
3445 		goto free_all;
3446 	}
3447 	if (!vecs->siggen_sigver_test) {
3448 		if (req->dst_len != c_size) {
3449 			pr_err("alg: akcipher: %s test failed. Invalid output len\n",
3450 			       op);
3451 			err = -EINVAL;
3452 			goto free_all;
3453 		}
3454 		/* verify that encrypted message is equal to expected */
3455 		if (memcmp(c, outbuf_enc, c_size) != 0) {
3456 			pr_err("alg: akcipher: %s test failed. Invalid output\n",
3457 			       op);
3458 			hexdump(outbuf_enc, c_size);
3459 			err = -EINVAL;
3460 			goto free_all;
3461 		}
3462 	}
3463 
3464 	/*
3465 	 * Don't invoke (decrypt or sign) test which require a private key
3466 	 * for vectors with only a public key.
3467 	 */
3468 	if (vecs->public_key_vec) {
3469 		err = 0;
3470 		goto free_all;
3471 	}
3472 	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
3473 	if (!outbuf_dec) {
3474 		err = -ENOMEM;
3475 		goto free_all;
3476 	}
3477 
3478 	op = vecs->siggen_sigver_test ? "sign" : "decrypt";
3479 	if (WARN_ON(c_size > PAGE_SIZE))
3480 		goto free_all;
3481 	memcpy(xbuf[0], c, c_size);
3482 
3483 	sg_init_one(&src, xbuf[0], c_size);
3484 	sg_init_one(&dst, outbuf_dec, out_len_max);
3485 	crypto_init_wait(&wait);
3486 	akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
3487 
3488 	err = crypto_wait_req(vecs->siggen_sigver_test ?
3489 			      /* Run asymmetric signature generation */
3490 			      crypto_akcipher_sign(req) :
3491 			      /* Run asymmetric decrypt */
3492 			      crypto_akcipher_decrypt(req), &wait);
3493 	if (err) {
3494 		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
3495 		goto free_all;
3496 	}
3497 	out_len = req->dst_len;
3498 	if (out_len < m_size) {
3499 		pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
3500 		       op, out_len);
3501 		err = -EINVAL;
3502 		goto free_all;
3503 	}
3504 	/* verify that decrypted message is equal to the original msg */
3505 	if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
3506 	    memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
3507 		pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
3508 		hexdump(outbuf_dec, out_len);
3509 		err = -EINVAL;
3510 	}
3511 free_all:
3512 	kfree(outbuf_dec);
3513 	kfree(outbuf_enc);
3514 free_req:
3515 	akcipher_request_free(req);
3516 	kfree(key);
3517 free_xbuf:
3518 	testmgr_free_buf(xbuf);
3519 	return err;
3520 }
3521 
3522 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
3523 			 const struct akcipher_testvec *vecs,
3524 			 unsigned int tcount)
3525 {
3526 	const char *algo =
3527 		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
3528 	int ret, i;
3529 
3530 	for (i = 0; i < tcount; i++) {
3531 		ret = test_akcipher_one(tfm, vecs++);
3532 		if (!ret)
3533 			continue;
3534 
3535 		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
3536 		       i + 1, algo, ret);
3537 		return ret;
3538 	}
3539 	return 0;
3540 }
3541 
3542 static int alg_test_akcipher(const struct alg_test_desc *desc,
3543 			     const char *driver, u32 type, u32 mask)
3544 {
3545 	struct crypto_akcipher *tfm;
3546 	int err = 0;
3547 
3548 	tfm = crypto_alloc_akcipher(driver, type, mask);
3549 	if (IS_ERR(tfm)) {
3550 		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
3551 		       driver, PTR_ERR(tfm));
3552 		return PTR_ERR(tfm);
3553 	}
3554 	if (desc->suite.akcipher.vecs)
3555 		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
3556 				    desc->suite.akcipher.count);
3557 
3558 	crypto_free_akcipher(tfm);
3559 	return err;
3560 }
3561 
3562 static int alg_test_null(const struct alg_test_desc *desc,
3563 			     const char *driver, u32 type, u32 mask)
3564 {
3565 	return 0;
3566 }
3567 
3568 #define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
3569 
3570 /* Please keep this list sorted by algorithm name. */
3571 static const struct alg_test_desc alg_test_descs[] = {
3572 	{
3573 		.alg = "adiantum(xchacha12,aes)",
3574 		.generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
3575 		.test = alg_test_skcipher,
3576 		.suite = {
3577 			.cipher = __VECS(adiantum_xchacha12_aes_tv_template)
3578 		},
3579 	}, {
3580 		.alg = "adiantum(xchacha20,aes)",
3581 		.generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
3582 		.test = alg_test_skcipher,
3583 		.suite = {
3584 			.cipher = __VECS(adiantum_xchacha20_aes_tv_template)
3585 		},
3586 	}, {
3587 		.alg = "aegis128",
3588 		.test = alg_test_aead,
3589 		.suite = {
3590 			.aead = __VECS(aegis128_tv_template)
3591 		}
3592 	}, {
3593 		.alg = "aegis128l",
3594 		.test = alg_test_aead,
3595 		.suite = {
3596 			.aead = __VECS(aegis128l_tv_template)
3597 		}
3598 	}, {
3599 		.alg = "aegis256",
3600 		.test = alg_test_aead,
3601 		.suite = {
3602 			.aead = __VECS(aegis256_tv_template)
3603 		}
3604 	}, {
3605 		.alg = "ansi_cprng",
3606 		.test = alg_test_cprng,
3607 		.suite = {
3608 			.cprng = __VECS(ansi_cprng_aes_tv_template)
3609 		}
3610 	}, {
3611 		.alg = "authenc(hmac(md5),ecb(cipher_null))",
3612 		.test = alg_test_aead,
3613 		.suite = {
3614 			.aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
3615 		}
3616 	}, {
3617 		.alg = "authenc(hmac(sha1),cbc(aes))",
3618 		.test = alg_test_aead,
3619 		.fips_allowed = 1,
3620 		.suite = {
3621 			.aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
3622 		}
3623 	}, {
3624 		.alg = "authenc(hmac(sha1),cbc(des))",
3625 		.test = alg_test_aead,
3626 		.suite = {
3627 			.aead = __VECS(hmac_sha1_des_cbc_tv_temp)
3628 		}
3629 	}, {
3630 		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
3631 		.test = alg_test_aead,
3632 		.fips_allowed = 1,
3633 		.suite = {
3634 			.aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
3635 		}
3636 	}, {
3637 		.alg = "authenc(hmac(sha1),ctr(aes))",
3638 		.test = alg_test_null,
3639 		.fips_allowed = 1,
3640 	}, {
3641 		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
3642 		.test = alg_test_aead,
3643 		.suite = {
3644 			.aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
3645 		}
3646 	}, {
3647 		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
3648 		.test = alg_test_null,
3649 		.fips_allowed = 1,
3650 	}, {
3651 		.alg = "authenc(hmac(sha224),cbc(des))",
3652 		.test = alg_test_aead,
3653 		.suite = {
3654 			.aead = __VECS(hmac_sha224_des_cbc_tv_temp)
3655 		}
3656 	}, {
3657 		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
3658 		.test = alg_test_aead,
3659 		.fips_allowed = 1,
3660 		.suite = {
3661 			.aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
3662 		}
3663 	}, {
3664 		.alg = "authenc(hmac(sha256),cbc(aes))",
3665 		.test = alg_test_aead,
3666 		.fips_allowed = 1,
3667 		.suite = {
3668 			.aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
3669 		}
3670 	}, {
3671 		.alg = "authenc(hmac(sha256),cbc(des))",
3672 		.test = alg_test_aead,
3673 		.suite = {
3674 			.aead = __VECS(hmac_sha256_des_cbc_tv_temp)
3675 		}
3676 	}, {
3677 		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
3678 		.test = alg_test_aead,
3679 		.fips_allowed = 1,
3680 		.suite = {
3681 			.aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
3682 		}
3683 	}, {
3684 		.alg = "authenc(hmac(sha256),ctr(aes))",
3685 		.test = alg_test_null,
3686 		.fips_allowed = 1,
3687 	}, {
3688 		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
3689 		.test = alg_test_null,
3690 		.fips_allowed = 1,
3691 	}, {
3692 		.alg = "authenc(hmac(sha384),cbc(des))",
3693 		.test = alg_test_aead,
3694 		.suite = {
3695 			.aead = __VECS(hmac_sha384_des_cbc_tv_temp)
3696 		}
3697 	}, {
3698 		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
3699 		.test = alg_test_aead,
3700 		.fips_allowed = 1,
3701 		.suite = {
3702 			.aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
3703 		}
3704 	}, {
3705 		.alg = "authenc(hmac(sha384),ctr(aes))",
3706 		.test = alg_test_null,
3707 		.fips_allowed = 1,
3708 	}, {
3709 		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
3710 		.test = alg_test_null,
3711 		.fips_allowed = 1,
3712 	}, {
3713 		.alg = "authenc(hmac(sha512),cbc(aes))",
3714 		.fips_allowed = 1,
3715 		.test = alg_test_aead,
3716 		.suite = {
3717 			.aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
3718 		}
3719 	}, {
3720 		.alg = "authenc(hmac(sha512),cbc(des))",
3721 		.test = alg_test_aead,
3722 		.suite = {
3723 			.aead = __VECS(hmac_sha512_des_cbc_tv_temp)
3724 		}
3725 	}, {
3726 		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
3727 		.test = alg_test_aead,
3728 		.fips_allowed = 1,
3729 		.suite = {
3730 			.aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
3731 		}
3732 	}, {
3733 		.alg = "authenc(hmac(sha512),ctr(aes))",
3734 		.test = alg_test_null,
3735 		.fips_allowed = 1,
3736 	}, {
3737 		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
3738 		.test = alg_test_null,
3739 		.fips_allowed = 1,
3740 	}, {
3741 		.alg = "cbc(aes)",
3742 		.test = alg_test_skcipher,
3743 		.fips_allowed = 1,
3744 		.suite = {
3745 			.cipher = __VECS(aes_cbc_tv_template)
3746 		},
3747 	}, {
3748 		.alg = "cbc(anubis)",
3749 		.test = alg_test_skcipher,
3750 		.suite = {
3751 			.cipher = __VECS(anubis_cbc_tv_template)
3752 		},
3753 	}, {
3754 		.alg = "cbc(blowfish)",
3755 		.test = alg_test_skcipher,
3756 		.suite = {
3757 			.cipher = __VECS(bf_cbc_tv_template)
3758 		},
3759 	}, {
3760 		.alg = "cbc(camellia)",
3761 		.test = alg_test_skcipher,
3762 		.suite = {
3763 			.cipher = __VECS(camellia_cbc_tv_template)
3764 		},
3765 	}, {
3766 		.alg = "cbc(cast5)",
3767 		.test = alg_test_skcipher,
3768 		.suite = {
3769 			.cipher = __VECS(cast5_cbc_tv_template)
3770 		},
3771 	}, {
3772 		.alg = "cbc(cast6)",
3773 		.test = alg_test_skcipher,
3774 		.suite = {
3775 			.cipher = __VECS(cast6_cbc_tv_template)
3776 		},
3777 	}, {
3778 		.alg = "cbc(des)",
3779 		.test = alg_test_skcipher,
3780 		.suite = {
3781 			.cipher = __VECS(des_cbc_tv_template)
3782 		},
3783 	}, {
3784 		.alg = "cbc(des3_ede)",
3785 		.test = alg_test_skcipher,
3786 		.fips_allowed = 1,
3787 		.suite = {
3788 			.cipher = __VECS(des3_ede_cbc_tv_template)
3789 		},
3790 	}, {
3791 		/* Same as cbc(aes) except the key is stored in
3792 		 * hardware secure memory which we reference by index
3793 		 */
3794 		.alg = "cbc(paes)",
3795 		.test = alg_test_null,
3796 		.fips_allowed = 1,
3797 	}, {
3798 		/* Same as cbc(sm4) except the key is stored in
3799 		 * hardware secure memory which we reference by index
3800 		 */
3801 		.alg = "cbc(psm4)",
3802 		.test = alg_test_null,
3803 	}, {
3804 		.alg = "cbc(serpent)",
3805 		.test = alg_test_skcipher,
3806 		.suite = {
3807 			.cipher = __VECS(serpent_cbc_tv_template)
3808 		},
3809 	}, {
3810 		.alg = "cbc(sm4)",
3811 		.test = alg_test_skcipher,
3812 		.suite = {
3813 			.cipher = __VECS(sm4_cbc_tv_template)
3814 		}
3815 	}, {
3816 		.alg = "cbc(twofish)",
3817 		.test = alg_test_skcipher,
3818 		.suite = {
3819 			.cipher = __VECS(tf_cbc_tv_template)
3820 		},
3821 	}, {
3822 		.alg = "cbcmac(aes)",
3823 		.fips_allowed = 1,
3824 		.test = alg_test_hash,
3825 		.suite = {
3826 			.hash = __VECS(aes_cbcmac_tv_template)
3827 		}
3828 	}, {
3829 		.alg = "ccm(aes)",
3830 		.generic_driver = "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
3831 		.test = alg_test_aead,
3832 		.fips_allowed = 1,
3833 		.suite = {
3834 			.aead = __VECS(aes_ccm_tv_template)
3835 		}
3836 	}, {
3837 		.alg = "cfb(aes)",
3838 		.test = alg_test_skcipher,
3839 		.fips_allowed = 1,
3840 		.suite = {
3841 			.cipher = __VECS(aes_cfb_tv_template)
3842 		},
3843 	}, {
3844 		.alg = "chacha20",
3845 		.test = alg_test_skcipher,
3846 		.suite = {
3847 			.cipher = __VECS(chacha20_tv_template)
3848 		},
3849 	}, {
3850 		.alg = "cmac(aes)",
3851 		.fips_allowed = 1,
3852 		.test = alg_test_hash,
3853 		.suite = {
3854 			.hash = __VECS(aes_cmac128_tv_template)
3855 		}
3856 	}, {
3857 		.alg = "cmac(des3_ede)",
3858 		.fips_allowed = 1,
3859 		.test = alg_test_hash,
3860 		.suite = {
3861 			.hash = __VECS(des3_ede_cmac64_tv_template)
3862 		}
3863 	}, {
3864 		.alg = "compress_null",
3865 		.test = alg_test_null,
3866 	}, {
3867 		.alg = "crc32",
3868 		.test = alg_test_hash,
3869 		.fips_allowed = 1,
3870 		.suite = {
3871 			.hash = __VECS(crc32_tv_template)
3872 		}
3873 	}, {
3874 		.alg = "crc32c",
3875 		.test = alg_test_crc32c,
3876 		.fips_allowed = 1,
3877 		.suite = {
3878 			.hash = __VECS(crc32c_tv_template)
3879 		}
3880 	}, {
3881 		.alg = "crct10dif",
3882 		.test = alg_test_hash,
3883 		.fips_allowed = 1,
3884 		.suite = {
3885 			.hash = __VECS(crct10dif_tv_template)
3886 		}
3887 	}, {
3888 		.alg = "ctr(aes)",
3889 		.test = alg_test_skcipher,
3890 		.fips_allowed = 1,
3891 		.suite = {
3892 			.cipher = __VECS(aes_ctr_tv_template)
3893 		}
3894 	}, {
3895 		.alg = "ctr(blowfish)",
3896 		.test = alg_test_skcipher,
3897 		.suite = {
3898 			.cipher = __VECS(bf_ctr_tv_template)
3899 		}
3900 	}, {
3901 		.alg = "ctr(camellia)",
3902 		.test = alg_test_skcipher,
3903 		.suite = {
3904 			.cipher = __VECS(camellia_ctr_tv_template)
3905 		}
3906 	}, {
3907 		.alg = "ctr(cast5)",
3908 		.test = alg_test_skcipher,
3909 		.suite = {
3910 			.cipher = __VECS(cast5_ctr_tv_template)
3911 		}
3912 	}, {
3913 		.alg = "ctr(cast6)",
3914 		.test = alg_test_skcipher,
3915 		.suite = {
3916 			.cipher = __VECS(cast6_ctr_tv_template)
3917 		}
3918 	}, {
3919 		.alg = "ctr(des)",
3920 		.test = alg_test_skcipher,
3921 		.suite = {
3922 			.cipher = __VECS(des_ctr_tv_template)
3923 		}
3924 	}, {
3925 		.alg = "ctr(des3_ede)",
3926 		.test = alg_test_skcipher,
3927 		.fips_allowed = 1,
3928 		.suite = {
3929 			.cipher = __VECS(des3_ede_ctr_tv_template)
3930 		}
3931 	}, {
3932 		/* Same as ctr(aes) except the key is stored in
3933 		 * hardware secure memory which we reference by index
3934 		 */
3935 		.alg = "ctr(paes)",
3936 		.test = alg_test_null,
3937 		.fips_allowed = 1,
3938 	}, {
3939 
3940 		/* Same as ctr(sm4) except the key is stored in
3941 		 * hardware secure memory which we reference by index
3942 		 */
3943 		.alg = "ctr(psm4)",
3944 		.test = alg_test_null,
3945 	}, {
3946 		.alg = "ctr(serpent)",
3947 		.test = alg_test_skcipher,
3948 		.suite = {
3949 			.cipher = __VECS(serpent_ctr_tv_template)
3950 		}
3951 	}, {
3952 		.alg = "ctr(sm4)",
3953 		.test = alg_test_skcipher,
3954 		.suite = {
3955 			.cipher = __VECS(sm4_ctr_tv_template)
3956 		}
3957 	}, {
3958 		.alg = "ctr(twofish)",
3959 		.test = alg_test_skcipher,
3960 		.suite = {
3961 			.cipher = __VECS(tf_ctr_tv_template)
3962 		}
3963 	}, {
3964 		.alg = "cts(cbc(aes))",
3965 		.test = alg_test_skcipher,
3966 		.fips_allowed = 1,
3967 		.suite = {
3968 			.cipher = __VECS(cts_mode_tv_template)
3969 		}
3970 	}, {
3971 		/* Same as cts(cbc((aes)) except the key is stored in
3972 		 * hardware secure memory which we reference by index
3973 		 */
3974 		.alg = "cts(cbc(paes))",
3975 		.test = alg_test_null,
3976 		.fips_allowed = 1,
3977 	}, {
3978 		.alg = "deflate",
3979 		.test = alg_test_comp,
3980 		.fips_allowed = 1,
3981 		.suite = {
3982 			.comp = {
3983 				.comp = __VECS(deflate_comp_tv_template),
3984 				.decomp = __VECS(deflate_decomp_tv_template)
3985 			}
3986 		}
3987 	}, {
3988 		.alg = "dh",
3989 		.test = alg_test_kpp,
3990 		.fips_allowed = 1,
3991 		.suite = {
3992 			.kpp = __VECS(dh_tv_template)
3993 		}
3994 	}, {
3995 		.alg = "digest_null",
3996 		.test = alg_test_null,
3997 	}, {
3998 		.alg = "drbg_nopr_ctr_aes128",
3999 		.test = alg_test_drbg,
4000 		.fips_allowed = 1,
4001 		.suite = {
4002 			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
4003 		}
4004 	}, {
4005 		.alg = "drbg_nopr_ctr_aes192",
4006 		.test = alg_test_drbg,
4007 		.fips_allowed = 1,
4008 		.suite = {
4009 			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
4010 		}
4011 	}, {
4012 		.alg = "drbg_nopr_ctr_aes256",
4013 		.test = alg_test_drbg,
4014 		.fips_allowed = 1,
4015 		.suite = {
4016 			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
4017 		}
4018 	}, {
4019 		/*
4020 		 * There is no need to specifically test the DRBG with every
4021 		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
4022 		 */
4023 		.alg = "drbg_nopr_hmac_sha1",
4024 		.fips_allowed = 1,
4025 		.test = alg_test_null,
4026 	}, {
4027 		.alg = "drbg_nopr_hmac_sha256",
4028 		.test = alg_test_drbg,
4029 		.fips_allowed = 1,
4030 		.suite = {
4031 			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
4032 		}
4033 	}, {
4034 		/* covered by drbg_nopr_hmac_sha256 test */
4035 		.alg = "drbg_nopr_hmac_sha384",
4036 		.fips_allowed = 1,
4037 		.test = alg_test_null,
4038 	}, {
4039 		.alg = "drbg_nopr_hmac_sha512",
4040 		.test = alg_test_null,
4041 		.fips_allowed = 1,
4042 	}, {
4043 		.alg = "drbg_nopr_sha1",
4044 		.fips_allowed = 1,
4045 		.test = alg_test_null,
4046 	}, {
4047 		.alg = "drbg_nopr_sha256",
4048 		.test = alg_test_drbg,
4049 		.fips_allowed = 1,
4050 		.suite = {
4051 			.drbg = __VECS(drbg_nopr_sha256_tv_template)
4052 		}
4053 	}, {
4054 		/* covered by drbg_nopr_sha256 test */
4055 		.alg = "drbg_nopr_sha384",
4056 		.fips_allowed = 1,
4057 		.test = alg_test_null,
4058 	}, {
4059 		.alg = "drbg_nopr_sha512",
4060 		.fips_allowed = 1,
4061 		.test = alg_test_null,
4062 	}, {
4063 		.alg = "drbg_pr_ctr_aes128",
4064 		.test = alg_test_drbg,
4065 		.fips_allowed = 1,
4066 		.suite = {
4067 			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
4068 		}
4069 	}, {
4070 		/* covered by drbg_pr_ctr_aes128 test */
4071 		.alg = "drbg_pr_ctr_aes192",
4072 		.fips_allowed = 1,
4073 		.test = alg_test_null,
4074 	}, {
4075 		.alg = "drbg_pr_ctr_aes256",
4076 		.fips_allowed = 1,
4077 		.test = alg_test_null,
4078 	}, {
4079 		.alg = "drbg_pr_hmac_sha1",
4080 		.fips_allowed = 1,
4081 		.test = alg_test_null,
4082 	}, {
4083 		.alg = "drbg_pr_hmac_sha256",
4084 		.test = alg_test_drbg,
4085 		.fips_allowed = 1,
4086 		.suite = {
4087 			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
4088 		}
4089 	}, {
4090 		/* covered by drbg_pr_hmac_sha256 test */
4091 		.alg = "drbg_pr_hmac_sha384",
4092 		.fips_allowed = 1,
4093 		.test = alg_test_null,
4094 	}, {
4095 		.alg = "drbg_pr_hmac_sha512",
4096 		.test = alg_test_null,
4097 		.fips_allowed = 1,
4098 	}, {
4099 		.alg = "drbg_pr_sha1",
4100 		.fips_allowed = 1,
4101 		.test = alg_test_null,
4102 	}, {
4103 		.alg = "drbg_pr_sha256",
4104 		.test = alg_test_drbg,
4105 		.fips_allowed = 1,
4106 		.suite = {
4107 			.drbg = __VECS(drbg_pr_sha256_tv_template)
4108 		}
4109 	}, {
4110 		/* covered by drbg_pr_sha256 test */
4111 		.alg = "drbg_pr_sha384",
4112 		.fips_allowed = 1,
4113 		.test = alg_test_null,
4114 	}, {
4115 		.alg = "drbg_pr_sha512",
4116 		.fips_allowed = 1,
4117 		.test = alg_test_null,
4118 	}, {
4119 		.alg = "ecb(aes)",
4120 		.test = alg_test_skcipher,
4121 		.fips_allowed = 1,
4122 		.suite = {
4123 			.cipher = __VECS(aes_tv_template)
4124 		}
4125 	}, {
4126 		.alg = "ecb(anubis)",
4127 		.test = alg_test_skcipher,
4128 		.suite = {
4129 			.cipher = __VECS(anubis_tv_template)
4130 		}
4131 	}, {
4132 		.alg = "ecb(arc4)",
4133 		.test = alg_test_skcipher,
4134 		.suite = {
4135 			.cipher = __VECS(arc4_tv_template)
4136 		}
4137 	}, {
4138 		.alg = "ecb(blowfish)",
4139 		.test = alg_test_skcipher,
4140 		.suite = {
4141 			.cipher = __VECS(bf_tv_template)
4142 		}
4143 	}, {
4144 		.alg = "ecb(camellia)",
4145 		.test = alg_test_skcipher,
4146 		.suite = {
4147 			.cipher = __VECS(camellia_tv_template)
4148 		}
4149 	}, {
4150 		.alg = "ecb(cast5)",
4151 		.test = alg_test_skcipher,
4152 		.suite = {
4153 			.cipher = __VECS(cast5_tv_template)
4154 		}
4155 	}, {
4156 		.alg = "ecb(cast6)",
4157 		.test = alg_test_skcipher,
4158 		.suite = {
4159 			.cipher = __VECS(cast6_tv_template)
4160 		}
4161 	}, {
4162 		.alg = "ecb(cipher_null)",
4163 		.test = alg_test_null,
4164 		.fips_allowed = 1,
4165 	}, {
4166 		.alg = "ecb(des)",
4167 		.test = alg_test_skcipher,
4168 		.suite = {
4169 			.cipher = __VECS(des_tv_template)
4170 		}
4171 	}, {
4172 		.alg = "ecb(des3_ede)",
4173 		.test = alg_test_skcipher,
4174 		.fips_allowed = 1,
4175 		.suite = {
4176 			.cipher = __VECS(des3_ede_tv_template)
4177 		}
4178 	}, {
4179 		.alg = "ecb(fcrypt)",
4180 		.test = alg_test_skcipher,
4181 		.suite = {
4182 			.cipher = {
4183 				.vecs = fcrypt_pcbc_tv_template,
4184 				.count = 1
4185 			}
4186 		}
4187 	}, {
4188 		.alg = "ecb(khazad)",
4189 		.test = alg_test_skcipher,
4190 		.suite = {
4191 			.cipher = __VECS(khazad_tv_template)
4192 		}
4193 	}, {
4194 		/* Same as ecb(aes) except the key is stored in
4195 		 * hardware secure memory which we reference by index
4196 		 */
4197 		.alg = "ecb(paes)",
4198 		.test = alg_test_null,
4199 		.fips_allowed = 1,
4200 	}, {
4201 		.alg = "ecb(seed)",
4202 		.test = alg_test_skcipher,
4203 		.suite = {
4204 			.cipher = __VECS(seed_tv_template)
4205 		}
4206 	}, {
4207 		.alg = "ecb(serpent)",
4208 		.test = alg_test_skcipher,
4209 		.suite = {
4210 			.cipher = __VECS(serpent_tv_template)
4211 		}
4212 	}, {
4213 		.alg = "ecb(sm4)",
4214 		.test = alg_test_skcipher,
4215 		.suite = {
4216 			.cipher = __VECS(sm4_tv_template)
4217 		}
4218 	}, {
4219 		.alg = "ecb(tea)",
4220 		.test = alg_test_skcipher,
4221 		.suite = {
4222 			.cipher = __VECS(tea_tv_template)
4223 		}
4224 	}, {
4225 		.alg = "ecb(tnepres)",
4226 		.test = alg_test_skcipher,
4227 		.suite = {
4228 			.cipher = __VECS(tnepres_tv_template)
4229 		}
4230 	}, {
4231 		.alg = "ecb(twofish)",
4232 		.test = alg_test_skcipher,
4233 		.suite = {
4234 			.cipher = __VECS(tf_tv_template)
4235 		}
4236 	}, {
4237 		.alg = "ecb(xeta)",
4238 		.test = alg_test_skcipher,
4239 		.suite = {
4240 			.cipher = __VECS(xeta_tv_template)
4241 		}
4242 	}, {
4243 		.alg = "ecb(xtea)",
4244 		.test = alg_test_skcipher,
4245 		.suite = {
4246 			.cipher = __VECS(xtea_tv_template)
4247 		}
4248 	}, {
4249 		.alg = "ecdh",
4250 		.test = alg_test_kpp,
4251 		.fips_allowed = 1,
4252 		.suite = {
4253 			.kpp = __VECS(ecdh_tv_template)
4254 		}
4255 	}, {
4256 		.alg = "ecrdsa",
4257 		.test = alg_test_akcipher,
4258 		.suite = {
4259 			.akcipher = __VECS(ecrdsa_tv_template)
4260 		}
4261 	}, {
4262 		.alg = "gcm(aes)",
4263 		.generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
4264 		.test = alg_test_aead,
4265 		.fips_allowed = 1,
4266 		.suite = {
4267 			.aead = __VECS(aes_gcm_tv_template)
4268 		}
4269 	}, {
4270 		.alg = "ghash",
4271 		.test = alg_test_hash,
4272 		.fips_allowed = 1,
4273 		.suite = {
4274 			.hash = __VECS(ghash_tv_template)
4275 		}
4276 	}, {
4277 		.alg = "hmac(md5)",
4278 		.test = alg_test_hash,
4279 		.suite = {
4280 			.hash = __VECS(hmac_md5_tv_template)
4281 		}
4282 	}, {
4283 		.alg = "hmac(rmd128)",
4284 		.test = alg_test_hash,
4285 		.suite = {
4286 			.hash = __VECS(hmac_rmd128_tv_template)
4287 		}
4288 	}, {
4289 		.alg = "hmac(rmd160)",
4290 		.test = alg_test_hash,
4291 		.suite = {
4292 			.hash = __VECS(hmac_rmd160_tv_template)
4293 		}
4294 	}, {
4295 		.alg = "hmac(sha1)",
4296 		.test = alg_test_hash,
4297 		.fips_allowed = 1,
4298 		.suite = {
4299 			.hash = __VECS(hmac_sha1_tv_template)
4300 		}
4301 	}, {
4302 		.alg = "hmac(sha224)",
4303 		.test = alg_test_hash,
4304 		.fips_allowed = 1,
4305 		.suite = {
4306 			.hash = __VECS(hmac_sha224_tv_template)
4307 		}
4308 	}, {
4309 		.alg = "hmac(sha256)",
4310 		.test = alg_test_hash,
4311 		.fips_allowed = 1,
4312 		.suite = {
4313 			.hash = __VECS(hmac_sha256_tv_template)
4314 		}
4315 	}, {
4316 		.alg = "hmac(sha3-224)",
4317 		.test = alg_test_hash,
4318 		.fips_allowed = 1,
4319 		.suite = {
4320 			.hash = __VECS(hmac_sha3_224_tv_template)
4321 		}
4322 	}, {
4323 		.alg = "hmac(sha3-256)",
4324 		.test = alg_test_hash,
4325 		.fips_allowed = 1,
4326 		.suite = {
4327 			.hash = __VECS(hmac_sha3_256_tv_template)
4328 		}
4329 	}, {
4330 		.alg = "hmac(sha3-384)",
4331 		.test = alg_test_hash,
4332 		.fips_allowed = 1,
4333 		.suite = {
4334 			.hash = __VECS(hmac_sha3_384_tv_template)
4335 		}
4336 	}, {
4337 		.alg = "hmac(sha3-512)",
4338 		.test = alg_test_hash,
4339 		.fips_allowed = 1,
4340 		.suite = {
4341 			.hash = __VECS(hmac_sha3_512_tv_template)
4342 		}
4343 	}, {
4344 		.alg = "hmac(sha384)",
4345 		.test = alg_test_hash,
4346 		.fips_allowed = 1,
4347 		.suite = {
4348 			.hash = __VECS(hmac_sha384_tv_template)
4349 		}
4350 	}, {
4351 		.alg = "hmac(sha512)",
4352 		.test = alg_test_hash,
4353 		.fips_allowed = 1,
4354 		.suite = {
4355 			.hash = __VECS(hmac_sha512_tv_template)
4356 		}
4357 	}, {
4358 		.alg = "hmac(streebog256)",
4359 		.test = alg_test_hash,
4360 		.suite = {
4361 			.hash = __VECS(hmac_streebog256_tv_template)
4362 		}
4363 	}, {
4364 		.alg = "hmac(streebog512)",
4365 		.test = alg_test_hash,
4366 		.suite = {
4367 			.hash = __VECS(hmac_streebog512_tv_template)
4368 		}
4369 	}, {
4370 		.alg = "jitterentropy_rng",
4371 		.fips_allowed = 1,
4372 		.test = alg_test_null,
4373 	}, {
4374 		.alg = "kw(aes)",
4375 		.test = alg_test_skcipher,
4376 		.fips_allowed = 1,
4377 		.suite = {
4378 			.cipher = __VECS(aes_kw_tv_template)
4379 		}
4380 	}, {
4381 		.alg = "lrw(aes)",
4382 		.generic_driver = "lrw(ecb(aes-generic))",
4383 		.test = alg_test_skcipher,
4384 		.suite = {
4385 			.cipher = __VECS(aes_lrw_tv_template)
4386 		}
4387 	}, {
4388 		.alg = "lrw(camellia)",
4389 		.generic_driver = "lrw(ecb(camellia-generic))",
4390 		.test = alg_test_skcipher,
4391 		.suite = {
4392 			.cipher = __VECS(camellia_lrw_tv_template)
4393 		}
4394 	}, {
4395 		.alg = "lrw(cast6)",
4396 		.generic_driver = "lrw(ecb(cast6-generic))",
4397 		.test = alg_test_skcipher,
4398 		.suite = {
4399 			.cipher = __VECS(cast6_lrw_tv_template)
4400 		}
4401 	}, {
4402 		.alg = "lrw(serpent)",
4403 		.generic_driver = "lrw(ecb(serpent-generic))",
4404 		.test = alg_test_skcipher,
4405 		.suite = {
4406 			.cipher = __VECS(serpent_lrw_tv_template)
4407 		}
4408 	}, {
4409 		.alg = "lrw(twofish)",
4410 		.generic_driver = "lrw(ecb(twofish-generic))",
4411 		.test = alg_test_skcipher,
4412 		.suite = {
4413 			.cipher = __VECS(tf_lrw_tv_template)
4414 		}
4415 	}, {
4416 		.alg = "lz4",
4417 		.test = alg_test_comp,
4418 		.fips_allowed = 1,
4419 		.suite = {
4420 			.comp = {
4421 				.comp = __VECS(lz4_comp_tv_template),
4422 				.decomp = __VECS(lz4_decomp_tv_template)
4423 			}
4424 		}
4425 	}, {
4426 		.alg = "lz4hc",
4427 		.test = alg_test_comp,
4428 		.fips_allowed = 1,
4429 		.suite = {
4430 			.comp = {
4431 				.comp = __VECS(lz4hc_comp_tv_template),
4432 				.decomp = __VECS(lz4hc_decomp_tv_template)
4433 			}
4434 		}
4435 	}, {
4436 		.alg = "lzo",
4437 		.test = alg_test_comp,
4438 		.fips_allowed = 1,
4439 		.suite = {
4440 			.comp = {
4441 				.comp = __VECS(lzo_comp_tv_template),
4442 				.decomp = __VECS(lzo_decomp_tv_template)
4443 			}
4444 		}
4445 	}, {
4446 		.alg = "md4",
4447 		.test = alg_test_hash,
4448 		.suite = {
4449 			.hash = __VECS(md4_tv_template)
4450 		}
4451 	}, {
4452 		.alg = "md5",
4453 		.test = alg_test_hash,
4454 		.suite = {
4455 			.hash = __VECS(md5_tv_template)
4456 		}
4457 	}, {
4458 		.alg = "michael_mic",
4459 		.test = alg_test_hash,
4460 		.suite = {
4461 			.hash = __VECS(michael_mic_tv_template)
4462 		}
4463 	}, {
4464 		.alg = "morus1280",
4465 		.test = alg_test_aead,
4466 		.suite = {
4467 			.aead = __VECS(morus1280_tv_template)
4468 		}
4469 	}, {
4470 		.alg = "morus640",
4471 		.test = alg_test_aead,
4472 		.suite = {
4473 			.aead = __VECS(morus640_tv_template)
4474 		}
4475 	}, {
4476 		.alg = "nhpoly1305",
4477 		.test = alg_test_hash,
4478 		.suite = {
4479 			.hash = __VECS(nhpoly1305_tv_template)
4480 		}
4481 	}, {
4482 		.alg = "ofb(aes)",
4483 		.test = alg_test_skcipher,
4484 		.fips_allowed = 1,
4485 		.suite = {
4486 			.cipher = __VECS(aes_ofb_tv_template)
4487 		}
4488 	}, {
4489 		/* Same as ofb(aes) except the key is stored in
4490 		 * hardware secure memory which we reference by index
4491 		 */
4492 		.alg = "ofb(paes)",
4493 		.test = alg_test_null,
4494 		.fips_allowed = 1,
4495 	}, {
4496 		.alg = "pcbc(fcrypt)",
4497 		.test = alg_test_skcipher,
4498 		.suite = {
4499 			.cipher = __VECS(fcrypt_pcbc_tv_template)
4500 		}
4501 	}, {
4502 		.alg = "pkcs1pad(rsa,sha224)",
4503 		.test = alg_test_null,
4504 		.fips_allowed = 1,
4505 	}, {
4506 		.alg = "pkcs1pad(rsa,sha256)",
4507 		.test = alg_test_akcipher,
4508 		.fips_allowed = 1,
4509 		.suite = {
4510 			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
4511 		}
4512 	}, {
4513 		.alg = "pkcs1pad(rsa,sha384)",
4514 		.test = alg_test_null,
4515 		.fips_allowed = 1,
4516 	}, {
4517 		.alg = "pkcs1pad(rsa,sha512)",
4518 		.test = alg_test_null,
4519 		.fips_allowed = 1,
4520 	}, {
4521 		.alg = "poly1305",
4522 		.test = alg_test_hash,
4523 		.suite = {
4524 			.hash = __VECS(poly1305_tv_template)
4525 		}
4526 	}, {
4527 		.alg = "rfc3686(ctr(aes))",
4528 		.test = alg_test_skcipher,
4529 		.fips_allowed = 1,
4530 		.suite = {
4531 			.cipher = __VECS(aes_ctr_rfc3686_tv_template)
4532 		}
4533 	}, {
4534 		.alg = "rfc4106(gcm(aes))",
4535 		.generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
4536 		.test = alg_test_aead,
4537 		.fips_allowed = 1,
4538 		.suite = {
4539 			.aead = __VECS(aes_gcm_rfc4106_tv_template)
4540 		}
4541 	}, {
4542 		.alg = "rfc4309(ccm(aes))",
4543 		.generic_driver = "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
4544 		.test = alg_test_aead,
4545 		.fips_allowed = 1,
4546 		.suite = {
4547 			.aead = __VECS(aes_ccm_rfc4309_tv_template)
4548 		}
4549 	}, {
4550 		.alg = "rfc4543(gcm(aes))",
4551 		.generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
4552 		.test = alg_test_aead,
4553 		.suite = {
4554 			.aead = __VECS(aes_gcm_rfc4543_tv_template)
4555 		}
4556 	}, {
4557 		.alg = "rfc7539(chacha20,poly1305)",
4558 		.test = alg_test_aead,
4559 		.suite = {
4560 			.aead = __VECS(rfc7539_tv_template)
4561 		}
4562 	}, {
4563 		.alg = "rfc7539esp(chacha20,poly1305)",
4564 		.test = alg_test_aead,
4565 		.suite = {
4566 			.aead = __VECS(rfc7539esp_tv_template)
4567 		}
4568 	}, {
4569 		.alg = "rmd128",
4570 		.test = alg_test_hash,
4571 		.suite = {
4572 			.hash = __VECS(rmd128_tv_template)
4573 		}
4574 	}, {
4575 		.alg = "rmd160",
4576 		.test = alg_test_hash,
4577 		.suite = {
4578 			.hash = __VECS(rmd160_tv_template)
4579 		}
4580 	}, {
4581 		.alg = "rmd256",
4582 		.test = alg_test_hash,
4583 		.suite = {
4584 			.hash = __VECS(rmd256_tv_template)
4585 		}
4586 	}, {
4587 		.alg = "rmd320",
4588 		.test = alg_test_hash,
4589 		.suite = {
4590 			.hash = __VECS(rmd320_tv_template)
4591 		}
4592 	}, {
4593 		.alg = "rsa",
4594 		.test = alg_test_akcipher,
4595 		.fips_allowed = 1,
4596 		.suite = {
4597 			.akcipher = __VECS(rsa_tv_template)
4598 		}
4599 	}, {
4600 		.alg = "salsa20",
4601 		.test = alg_test_skcipher,
4602 		.suite = {
4603 			.cipher = __VECS(salsa20_stream_tv_template)
4604 		}
4605 	}, {
4606 		.alg = "sha1",
4607 		.test = alg_test_hash,
4608 		.fips_allowed = 1,
4609 		.suite = {
4610 			.hash = __VECS(sha1_tv_template)
4611 		}
4612 	}, {
4613 		.alg = "sha224",
4614 		.test = alg_test_hash,
4615 		.fips_allowed = 1,
4616 		.suite = {
4617 			.hash = __VECS(sha224_tv_template)
4618 		}
4619 	}, {
4620 		.alg = "sha256",
4621 		.test = alg_test_hash,
4622 		.fips_allowed = 1,
4623 		.suite = {
4624 			.hash = __VECS(sha256_tv_template)
4625 		}
4626 	}, {
4627 		.alg = "sha3-224",
4628 		.test = alg_test_hash,
4629 		.fips_allowed = 1,
4630 		.suite = {
4631 			.hash = __VECS(sha3_224_tv_template)
4632 		}
4633 	}, {
4634 		.alg = "sha3-256",
4635 		.test = alg_test_hash,
4636 		.fips_allowed = 1,
4637 		.suite = {
4638 			.hash = __VECS(sha3_256_tv_template)
4639 		}
4640 	}, {
4641 		.alg = "sha3-384",
4642 		.test = alg_test_hash,
4643 		.fips_allowed = 1,
4644 		.suite = {
4645 			.hash = __VECS(sha3_384_tv_template)
4646 		}
4647 	}, {
4648 		.alg = "sha3-512",
4649 		.test = alg_test_hash,
4650 		.fips_allowed = 1,
4651 		.suite = {
4652 			.hash = __VECS(sha3_512_tv_template)
4653 		}
4654 	}, {
4655 		.alg = "sha384",
4656 		.test = alg_test_hash,
4657 		.fips_allowed = 1,
4658 		.suite = {
4659 			.hash = __VECS(sha384_tv_template)
4660 		}
4661 	}, {
4662 		.alg = "sha512",
4663 		.test = alg_test_hash,
4664 		.fips_allowed = 1,
4665 		.suite = {
4666 			.hash = __VECS(sha512_tv_template)
4667 		}
4668 	}, {
4669 		.alg = "sm3",
4670 		.test = alg_test_hash,
4671 		.suite = {
4672 			.hash = __VECS(sm3_tv_template)
4673 		}
4674 	}, {
4675 		.alg = "streebog256",
4676 		.test = alg_test_hash,
4677 		.suite = {
4678 			.hash = __VECS(streebog256_tv_template)
4679 		}
4680 	}, {
4681 		.alg = "streebog512",
4682 		.test = alg_test_hash,
4683 		.suite = {
4684 			.hash = __VECS(streebog512_tv_template)
4685 		}
4686 	}, {
4687 		.alg = "tgr128",
4688 		.test = alg_test_hash,
4689 		.suite = {
4690 			.hash = __VECS(tgr128_tv_template)
4691 		}
4692 	}, {
4693 		.alg = "tgr160",
4694 		.test = alg_test_hash,
4695 		.suite = {
4696 			.hash = __VECS(tgr160_tv_template)
4697 		}
4698 	}, {
4699 		.alg = "tgr192",
4700 		.test = alg_test_hash,
4701 		.suite = {
4702 			.hash = __VECS(tgr192_tv_template)
4703 		}
4704 	}, {
4705 		.alg = "vmac64(aes)",
4706 		.test = alg_test_hash,
4707 		.suite = {
4708 			.hash = __VECS(vmac64_aes_tv_template)
4709 		}
4710 	}, {
4711 		.alg = "wp256",
4712 		.test = alg_test_hash,
4713 		.suite = {
4714 			.hash = __VECS(wp256_tv_template)
4715 		}
4716 	}, {
4717 		.alg = "wp384",
4718 		.test = alg_test_hash,
4719 		.suite = {
4720 			.hash = __VECS(wp384_tv_template)
4721 		}
4722 	}, {
4723 		.alg = "wp512",
4724 		.test = alg_test_hash,
4725 		.suite = {
4726 			.hash = __VECS(wp512_tv_template)
4727 		}
4728 	}, {
4729 		.alg = "xcbc(aes)",
4730 		.test = alg_test_hash,
4731 		.suite = {
4732 			.hash = __VECS(aes_xcbc128_tv_template)
4733 		}
4734 	}, {
4735 		.alg = "xchacha12",
4736 		.test = alg_test_skcipher,
4737 		.suite = {
4738 			.cipher = __VECS(xchacha12_tv_template)
4739 		},
4740 	}, {
4741 		.alg = "xchacha20",
4742 		.test = alg_test_skcipher,
4743 		.suite = {
4744 			.cipher = __VECS(xchacha20_tv_template)
4745 		},
4746 	}, {
4747 		.alg = "xts(aes)",
4748 		.generic_driver = "xts(ecb(aes-generic))",
4749 		.test = alg_test_skcipher,
4750 		.fips_allowed = 1,
4751 		.suite = {
4752 			.cipher = __VECS(aes_xts_tv_template)
4753 		}
4754 	}, {
4755 		.alg = "xts(camellia)",
4756 		.generic_driver = "xts(ecb(camellia-generic))",
4757 		.test = alg_test_skcipher,
4758 		.suite = {
4759 			.cipher = __VECS(camellia_xts_tv_template)
4760 		}
4761 	}, {
4762 		.alg = "xts(cast6)",
4763 		.generic_driver = "xts(ecb(cast6-generic))",
4764 		.test = alg_test_skcipher,
4765 		.suite = {
4766 			.cipher = __VECS(cast6_xts_tv_template)
4767 		}
4768 	}, {
4769 		/* Same as xts(aes) except the key is stored in
4770 		 * hardware secure memory which we reference by index
4771 		 */
4772 		.alg = "xts(paes)",
4773 		.test = alg_test_null,
4774 		.fips_allowed = 1,
4775 	}, {
4776 		.alg = "xts(serpent)",
4777 		.generic_driver = "xts(ecb(serpent-generic))",
4778 		.test = alg_test_skcipher,
4779 		.suite = {
4780 			.cipher = __VECS(serpent_xts_tv_template)
4781 		}
4782 	}, {
4783 		.alg = "xts(twofish)",
4784 		.generic_driver = "xts(ecb(twofish-generic))",
4785 		.test = alg_test_skcipher,
4786 		.suite = {
4787 			.cipher = __VECS(tf_xts_tv_template)
4788 		}
4789 	}, {
4790 		.alg = "xts4096(paes)",
4791 		.test = alg_test_null,
4792 		.fips_allowed = 1,
4793 	}, {
4794 		.alg = "xts512(paes)",
4795 		.test = alg_test_null,
4796 		.fips_allowed = 1,
4797 	}, {
4798 		.alg = "zlib-deflate",
4799 		.test = alg_test_comp,
4800 		.fips_allowed = 1,
4801 		.suite = {
4802 			.comp = {
4803 				.comp = __VECS(zlib_deflate_comp_tv_template),
4804 				.decomp = __VECS(zlib_deflate_decomp_tv_template)
4805 			}
4806 		}
4807 	}, {
4808 		.alg = "zstd",
4809 		.test = alg_test_comp,
4810 		.fips_allowed = 1,
4811 		.suite = {
4812 			.comp = {
4813 				.comp = __VECS(zstd_comp_tv_template),
4814 				.decomp = __VECS(zstd_decomp_tv_template)
4815 			}
4816 		}
4817 	}
4818 };
4819 
4820 static void alg_check_test_descs_order(void)
4821 {
4822 	int i;
4823 
4824 	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4825 		int diff = strcmp(alg_test_descs[i - 1].alg,
4826 				  alg_test_descs[i].alg);
4827 
4828 		if (WARN_ON(diff > 0)) {
4829 			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4830 				alg_test_descs[i - 1].alg,
4831 				alg_test_descs[i].alg);
4832 		}
4833 
4834 		if (WARN_ON(diff == 0)) {
4835 			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4836 				alg_test_descs[i].alg);
4837 		}
4838 	}
4839 }
4840 
4841 static void alg_check_testvec_configs(void)
4842 {
4843 	int i;
4844 
4845 	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
4846 		WARN_ON(!valid_testvec_config(
4847 				&default_cipher_testvec_configs[i]));
4848 
4849 	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
4850 		WARN_ON(!valid_testvec_config(
4851 				&default_hash_testvec_configs[i]));
4852 }
4853 
4854 static void testmgr_onetime_init(void)
4855 {
4856 	alg_check_test_descs_order();
4857 	alg_check_testvec_configs();
4858 
4859 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
4860 	pr_warn("alg: extra crypto tests enabled.  This is intended for developer use only.\n");
4861 #endif
4862 }
4863 
4864 static int alg_find_test(const char *alg)
4865 {
4866 	int start = 0;
4867 	int end = ARRAY_SIZE(alg_test_descs);
4868 
4869 	while (start < end) {
4870 		int i = (start + end) / 2;
4871 		int diff = strcmp(alg_test_descs[i].alg, alg);
4872 
4873 		if (diff > 0) {
4874 			end = i;
4875 			continue;
4876 		}
4877 
4878 		if (diff < 0) {
4879 			start = i + 1;
4880 			continue;
4881 		}
4882 
4883 		return i;
4884 	}
4885 
4886 	return -1;
4887 }
4888 
4889 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4890 {
4891 	int i;
4892 	int j;
4893 	int rc;
4894 
4895 	if (!fips_enabled && notests) {
4896 		printk_once(KERN_INFO "alg: self-tests disabled\n");
4897 		return 0;
4898 	}
4899 
4900 	DO_ONCE(testmgr_onetime_init);
4901 
4902 	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4903 		char nalg[CRYPTO_MAX_ALG_NAME];
4904 
4905 		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4906 		    sizeof(nalg))
4907 			return -ENAMETOOLONG;
4908 
4909 		i = alg_find_test(nalg);
4910 		if (i < 0)
4911 			goto notest;
4912 
4913 		if (fips_enabled && !alg_test_descs[i].fips_allowed)
4914 			goto non_fips_alg;
4915 
4916 		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4917 		goto test_done;
4918 	}
4919 
4920 	i = alg_find_test(alg);
4921 	j = alg_find_test(driver);
4922 	if (i < 0 && j < 0)
4923 		goto notest;
4924 
4925 	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4926 			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
4927 		goto non_fips_alg;
4928 
4929 	rc = 0;
4930 	if (i >= 0)
4931 		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4932 					     type, mask);
4933 	if (j >= 0 && j != i)
4934 		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4935 					     type, mask);
4936 
4937 test_done:
4938 	if (rc && (fips_enabled || panic_on_fail))
4939 		panic("alg: self-tests for %s (%s) failed in %s mode!\n",
4940 		      driver, alg, fips_enabled ? "fips" : "panic_on_fail");
4941 
4942 	if (fips_enabled && !rc)
4943 		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4944 
4945 	return rc;
4946 
4947 notest:
4948 	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4949 	return 0;
4950 non_fips_alg:
4951 	return -EINVAL;
4952 }
4953 
4954 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4955 
4956 EXPORT_SYMBOL_GPL(alg_test);
4957