xref: /openbmc/linux/crypto/testmgr.c (revision 4c1ca831)
1  // SPDX-License-Identifier: GPL-2.0-or-later
2  /*
3   * Algorithm testing framework and tests.
4   *
5   * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6   * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
7   * Copyright (c) 2007 Nokia Siemens Networks
8   * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9   * Copyright (c) 2019 Google LLC
10   *
11   * Updated RFC4106 AES-GCM testing.
12   *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13   *             Adrian Hoban <adrian.hoban@intel.com>
14   *             Gabriele Paoloni <gabriele.paoloni@intel.com>
15   *             Tadeusz Struk (tadeusz.struk@intel.com)
16   *    Copyright (c) 2010, Intel Corporation.
17   */
18  
19  #include <crypto/aead.h>
20  #include <crypto/hash.h>
21  #include <crypto/skcipher.h>
22  #include <linux/err.h>
23  #include <linux/fips.h>
24  #include <linux/module.h>
25  #include <linux/once.h>
26  #include <linux/random.h>
27  #include <linux/scatterlist.h>
28  #include <linux/slab.h>
29  #include <linux/string.h>
30  #include <linux/uio.h>
31  #include <crypto/rng.h>
32  #include <crypto/drbg.h>
33  #include <crypto/akcipher.h>
34  #include <crypto/kpp.h>
35  #include <crypto/acompress.h>
36  #include <crypto/internal/simd.h>
37  
38  #include "internal.h"
39  
40  static bool notests;
41  module_param(notests, bool, 0644);
42  MODULE_PARM_DESC(notests, "disable crypto self-tests");
43  
44  static bool panic_on_fail;
45  module_param(panic_on_fail, bool, 0444);
46  
47  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
48  static bool noextratests;
49  module_param(noextratests, bool, 0644);
50  MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
51  
52  static unsigned int fuzz_iterations = 100;
53  module_param(fuzz_iterations, uint, 0644);
54  MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
55  
56  DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
57  EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
58  #endif
59  
60  #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
61  
62  /* a perfect nop */
63  int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
64  {
65  	return 0;
66  }
67  
68  #else
69  
70  #include "testmgr.h"
71  
72  /*
73   * Need slab memory for testing (size in number of pages).
74   */
75  #define XBUFSIZE	8
76  
77  /*
78  * Used by test_cipher()
79  */
80  #define ENCRYPT 1
81  #define DECRYPT 0
82  
83  struct aead_test_suite {
84  	const struct aead_testvec *vecs;
85  	unsigned int count;
86  
87  	/*
88  	 * Set if trying to decrypt an inauthentic ciphertext with this
89  	 * algorithm might result in EINVAL rather than EBADMSG, due to other
90  	 * validation the algorithm does on the inputs such as length checks.
91  	 */
92  	unsigned int einval_allowed : 1;
93  
94  	/*
95  	 * Set if this algorithm requires that the IV be located at the end of
96  	 * the AAD buffer, in addition to being given in the normal way.  The
97  	 * behavior when the two IV copies differ is implementation-defined.
98  	 */
99  	unsigned int aad_iv : 1;
100  };
101  
102  struct cipher_test_suite {
103  	const struct cipher_testvec *vecs;
104  	unsigned int count;
105  };
106  
107  struct comp_test_suite {
108  	struct {
109  		const struct comp_testvec *vecs;
110  		unsigned int count;
111  	} comp, decomp;
112  };
113  
114  struct hash_test_suite {
115  	const struct hash_testvec *vecs;
116  	unsigned int count;
117  };
118  
119  struct cprng_test_suite {
120  	const struct cprng_testvec *vecs;
121  	unsigned int count;
122  };
123  
124  struct drbg_test_suite {
125  	const struct drbg_testvec *vecs;
126  	unsigned int count;
127  };
128  
129  struct akcipher_test_suite {
130  	const struct akcipher_testvec *vecs;
131  	unsigned int count;
132  };
133  
134  struct kpp_test_suite {
135  	const struct kpp_testvec *vecs;
136  	unsigned int count;
137  };
138  
139  struct alg_test_desc {
140  	const char *alg;
141  	const char *generic_driver;
142  	int (*test)(const struct alg_test_desc *desc, const char *driver,
143  		    u32 type, u32 mask);
144  	int fips_allowed;	/* set if alg is allowed in fips mode */
145  
146  	union {
147  		struct aead_test_suite aead;
148  		struct cipher_test_suite cipher;
149  		struct comp_test_suite comp;
150  		struct hash_test_suite hash;
151  		struct cprng_test_suite cprng;
152  		struct drbg_test_suite drbg;
153  		struct akcipher_test_suite akcipher;
154  		struct kpp_test_suite kpp;
155  	} suite;
156  };
157  
158  static void hexdump(unsigned char *buf, unsigned int len)
159  {
160  	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
161  			16, 1,
162  			buf, len, false);
163  }
164  
165  static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
166  {
167  	int i;
168  
169  	for (i = 0; i < XBUFSIZE; i++) {
170  		buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
171  		if (!buf[i])
172  			goto err_free_buf;
173  	}
174  
175  	return 0;
176  
177  err_free_buf:
178  	while (i-- > 0)
179  		free_pages((unsigned long)buf[i], order);
180  
181  	return -ENOMEM;
182  }
183  
184  static int testmgr_alloc_buf(char *buf[XBUFSIZE])
185  {
186  	return __testmgr_alloc_buf(buf, 0);
187  }
188  
189  static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
190  {
191  	int i;
192  
193  	for (i = 0; i < XBUFSIZE; i++)
194  		free_pages((unsigned long)buf[i], order);
195  }
196  
197  static void testmgr_free_buf(char *buf[XBUFSIZE])
198  {
199  	__testmgr_free_buf(buf, 0);
200  }
201  
202  #define TESTMGR_POISON_BYTE	0xfe
203  #define TESTMGR_POISON_LEN	16
204  
205  static inline void testmgr_poison(void *addr, size_t len)
206  {
207  	memset(addr, TESTMGR_POISON_BYTE, len);
208  }
209  
210  /* Is the memory region still fully poisoned? */
211  static inline bool testmgr_is_poison(const void *addr, size_t len)
212  {
213  	return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
214  }
215  
216  /* flush type for hash algorithms */
217  enum flush_type {
218  	/* merge with update of previous buffer(s) */
219  	FLUSH_TYPE_NONE = 0,
220  
221  	/* update with previous buffer(s) before doing this one */
222  	FLUSH_TYPE_FLUSH,
223  
224  	/* likewise, but also export and re-import the intermediate state */
225  	FLUSH_TYPE_REIMPORT,
226  };
227  
228  /* finalization function for hash algorithms */
229  enum finalization_type {
230  	FINALIZATION_TYPE_FINAL,	/* use final() */
231  	FINALIZATION_TYPE_FINUP,	/* use finup() */
232  	FINALIZATION_TYPE_DIGEST,	/* use digest() */
233  };
234  
235  #define TEST_SG_TOTAL	10000
236  
237  /**
238   * struct test_sg_division - description of a scatterlist entry
239   *
240   * This struct describes one entry of a scatterlist being constructed to check a
241   * crypto test vector.
242   *
243   * @proportion_of_total: length of this chunk relative to the total length,
244   *			 given as a proportion out of TEST_SG_TOTAL so that it
245   *			 scales to fit any test vector
246   * @offset: byte offset into a 2-page buffer at which this chunk will start
247   * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
248   *				  @offset
249   * @flush_type: for hashes, whether an update() should be done now vs.
250   *		continuing to accumulate data
251   * @nosimd: if doing the pending update(), do it with SIMD disabled?
252   */
253  struct test_sg_division {
254  	unsigned int proportion_of_total;
255  	unsigned int offset;
256  	bool offset_relative_to_alignmask;
257  	enum flush_type flush_type;
258  	bool nosimd;
259  };
260  
261  /**
262   * struct testvec_config - configuration for testing a crypto test vector
263   *
264   * This struct describes the data layout and other parameters with which each
265   * crypto test vector can be tested.
266   *
267   * @name: name of this config, logged for debugging purposes if a test fails
268   * @inplace: operate on the data in-place, if applicable for the algorithm type?
269   * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
270   * @src_divs: description of how to arrange the source scatterlist
271   * @dst_divs: description of how to arrange the dst scatterlist, if applicable
272   *	      for the algorithm type.  Defaults to @src_divs if unset.
273   * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
274   *	       where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
275   * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
276   *				     the @iv_offset
277   * @key_offset: misalignment of the key, where 0 is default alignment
278   * @key_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
279   *				      the @key_offset
280   * @finalization_type: what finalization function to use for hashes
281   * @nosimd: execute with SIMD disabled?  Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
282   */
283  struct testvec_config {
284  	const char *name;
285  	bool inplace;
286  	u32 req_flags;
287  	struct test_sg_division src_divs[XBUFSIZE];
288  	struct test_sg_division dst_divs[XBUFSIZE];
289  	unsigned int iv_offset;
290  	unsigned int key_offset;
291  	bool iv_offset_relative_to_alignmask;
292  	bool key_offset_relative_to_alignmask;
293  	enum finalization_type finalization_type;
294  	bool nosimd;
295  };
296  
297  #define TESTVEC_CONFIG_NAMELEN	192
298  
299  /*
300   * The following are the lists of testvec_configs to test for each algorithm
301   * type when the basic crypto self-tests are enabled, i.e. when
302   * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset.  They aim to provide good test
303   * coverage, while keeping the test time much shorter than the full fuzz tests
304   * so that the basic tests can be enabled in a wider range of circumstances.
305   */
306  
307  /* Configs for skciphers and aeads */
308  static const struct testvec_config default_cipher_testvec_configs[] = {
309  	{
310  		.name = "in-place",
311  		.inplace = true,
312  		.src_divs = { { .proportion_of_total = 10000 } },
313  	}, {
314  		.name = "out-of-place",
315  		.src_divs = { { .proportion_of_total = 10000 } },
316  	}, {
317  		.name = "unaligned buffer, offset=1",
318  		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
319  		.iv_offset = 1,
320  		.key_offset = 1,
321  	}, {
322  		.name = "buffer aligned only to alignmask",
323  		.src_divs = {
324  			{
325  				.proportion_of_total = 10000,
326  				.offset = 1,
327  				.offset_relative_to_alignmask = true,
328  			},
329  		},
330  		.iv_offset = 1,
331  		.iv_offset_relative_to_alignmask = true,
332  		.key_offset = 1,
333  		.key_offset_relative_to_alignmask = true,
334  	}, {
335  		.name = "two even aligned splits",
336  		.src_divs = {
337  			{ .proportion_of_total = 5000 },
338  			{ .proportion_of_total = 5000 },
339  		},
340  	}, {
341  		.name = "uneven misaligned splits, may sleep",
342  		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
343  		.src_divs = {
344  			{ .proportion_of_total = 1900, .offset = 33 },
345  			{ .proportion_of_total = 3300, .offset = 7  },
346  			{ .proportion_of_total = 4800, .offset = 18 },
347  		},
348  		.iv_offset = 3,
349  		.key_offset = 3,
350  	}, {
351  		.name = "misaligned splits crossing pages, inplace",
352  		.inplace = true,
353  		.src_divs = {
354  			{
355  				.proportion_of_total = 7500,
356  				.offset = PAGE_SIZE - 32
357  			}, {
358  				.proportion_of_total = 2500,
359  				.offset = PAGE_SIZE - 7
360  			},
361  		},
362  	}
363  };
364  
365  static const struct testvec_config default_hash_testvec_configs[] = {
366  	{
367  		.name = "init+update+final aligned buffer",
368  		.src_divs = { { .proportion_of_total = 10000 } },
369  		.finalization_type = FINALIZATION_TYPE_FINAL,
370  	}, {
371  		.name = "init+finup aligned buffer",
372  		.src_divs = { { .proportion_of_total = 10000 } },
373  		.finalization_type = FINALIZATION_TYPE_FINUP,
374  	}, {
375  		.name = "digest aligned buffer",
376  		.src_divs = { { .proportion_of_total = 10000 } },
377  		.finalization_type = FINALIZATION_TYPE_DIGEST,
378  	}, {
379  		.name = "init+update+final misaligned buffer",
380  		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
381  		.finalization_type = FINALIZATION_TYPE_FINAL,
382  		.key_offset = 1,
383  	}, {
384  		.name = "digest buffer aligned only to alignmask",
385  		.src_divs = {
386  			{
387  				.proportion_of_total = 10000,
388  				.offset = 1,
389  				.offset_relative_to_alignmask = true,
390  			},
391  		},
392  		.finalization_type = FINALIZATION_TYPE_DIGEST,
393  		.key_offset = 1,
394  		.key_offset_relative_to_alignmask = true,
395  	}, {
396  		.name = "init+update+update+final two even splits",
397  		.src_divs = {
398  			{ .proportion_of_total = 5000 },
399  			{
400  				.proportion_of_total = 5000,
401  				.flush_type = FLUSH_TYPE_FLUSH,
402  			},
403  		},
404  		.finalization_type = FINALIZATION_TYPE_FINAL,
405  	}, {
406  		.name = "digest uneven misaligned splits, may sleep",
407  		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
408  		.src_divs = {
409  			{ .proportion_of_total = 1900, .offset = 33 },
410  			{ .proportion_of_total = 3300, .offset = 7  },
411  			{ .proportion_of_total = 4800, .offset = 18 },
412  		},
413  		.finalization_type = FINALIZATION_TYPE_DIGEST,
414  	}, {
415  		.name = "digest misaligned splits crossing pages",
416  		.src_divs = {
417  			{
418  				.proportion_of_total = 7500,
419  				.offset = PAGE_SIZE - 32,
420  			}, {
421  				.proportion_of_total = 2500,
422  				.offset = PAGE_SIZE - 7,
423  			},
424  		},
425  		.finalization_type = FINALIZATION_TYPE_DIGEST,
426  	}, {
427  		.name = "import/export",
428  		.src_divs = {
429  			{
430  				.proportion_of_total = 6500,
431  				.flush_type = FLUSH_TYPE_REIMPORT,
432  			}, {
433  				.proportion_of_total = 3500,
434  				.flush_type = FLUSH_TYPE_REIMPORT,
435  			},
436  		},
437  		.finalization_type = FINALIZATION_TYPE_FINAL,
438  	}
439  };
440  
441  static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
442  {
443  	unsigned int remaining = TEST_SG_TOTAL;
444  	unsigned int ndivs = 0;
445  
446  	do {
447  		remaining -= divs[ndivs++].proportion_of_total;
448  	} while (remaining);
449  
450  	return ndivs;
451  }
452  
453  #define SGDIVS_HAVE_FLUSHES	BIT(0)
454  #define SGDIVS_HAVE_NOSIMD	BIT(1)
455  
456  static bool valid_sg_divisions(const struct test_sg_division *divs,
457  			       unsigned int count, int *flags_ret)
458  {
459  	unsigned int total = 0;
460  	unsigned int i;
461  
462  	for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
463  		if (divs[i].proportion_of_total <= 0 ||
464  		    divs[i].proportion_of_total > TEST_SG_TOTAL - total)
465  			return false;
466  		total += divs[i].proportion_of_total;
467  		if (divs[i].flush_type != FLUSH_TYPE_NONE)
468  			*flags_ret |= SGDIVS_HAVE_FLUSHES;
469  		if (divs[i].nosimd)
470  			*flags_ret |= SGDIVS_HAVE_NOSIMD;
471  	}
472  	return total == TEST_SG_TOTAL &&
473  		memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
474  }
475  
476  /*
477   * Check whether the given testvec_config is valid.  This isn't strictly needed
478   * since every testvec_config should be valid, but check anyway so that people
479   * don't unknowingly add broken configs that don't do what they wanted.
480   */
481  static bool valid_testvec_config(const struct testvec_config *cfg)
482  {
483  	int flags = 0;
484  
485  	if (cfg->name == NULL)
486  		return false;
487  
488  	if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
489  				&flags))
490  		return false;
491  
492  	if (cfg->dst_divs[0].proportion_of_total) {
493  		if (!valid_sg_divisions(cfg->dst_divs,
494  					ARRAY_SIZE(cfg->dst_divs), &flags))
495  			return false;
496  	} else {
497  		if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
498  			return false;
499  		/* defaults to dst_divs=src_divs */
500  	}
501  
502  	if (cfg->iv_offset +
503  	    (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
504  	    MAX_ALGAPI_ALIGNMASK + 1)
505  		return false;
506  
507  	if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
508  	    cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
509  		return false;
510  
511  	if ((cfg->nosimd || (flags & SGDIVS_HAVE_NOSIMD)) &&
512  	    (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
513  		return false;
514  
515  	return true;
516  }
517  
518  struct test_sglist {
519  	char *bufs[XBUFSIZE];
520  	struct scatterlist sgl[XBUFSIZE];
521  	struct scatterlist sgl_saved[XBUFSIZE];
522  	struct scatterlist *sgl_ptr;
523  	unsigned int nents;
524  };
525  
526  static int init_test_sglist(struct test_sglist *tsgl)
527  {
528  	return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
529  }
530  
531  static void destroy_test_sglist(struct test_sglist *tsgl)
532  {
533  	return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
534  }
535  
536  /**
537   * build_test_sglist() - build a scatterlist for a crypto test
538   *
539   * @tsgl: the scatterlist to build.  @tsgl->bufs[] contains an array of 2-page
540   *	  buffers which the scatterlist @tsgl->sgl[] will be made to point into.
541   * @divs: the layout specification on which the scatterlist will be based
542   * @alignmask: the algorithm's alignmask
543   * @total_len: the total length of the scatterlist to build in bytes
544   * @data: if non-NULL, the buffers will be filled with this data until it ends.
545   *	  Otherwise the buffers will be poisoned.  In both cases, some bytes
546   *	  past the end of each buffer will be poisoned to help detect overruns.
547   * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
548   *	      corresponds will be returned here.  This will match @divs except
549   *	      that divisions resolving to a length of 0 are omitted as they are
550   *	      not included in the scatterlist.
551   *
552   * Return: 0 or a -errno value
553   */
554  static int build_test_sglist(struct test_sglist *tsgl,
555  			     const struct test_sg_division *divs,
556  			     const unsigned int alignmask,
557  			     const unsigned int total_len,
558  			     struct iov_iter *data,
559  			     const struct test_sg_division *out_divs[XBUFSIZE])
560  {
561  	struct {
562  		const struct test_sg_division *div;
563  		size_t length;
564  	} partitions[XBUFSIZE];
565  	const unsigned int ndivs = count_test_sg_divisions(divs);
566  	unsigned int len_remaining = total_len;
567  	unsigned int i;
568  
569  	BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
570  	if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
571  		return -EINVAL;
572  
573  	/* Calculate the (div, length) pairs */
574  	tsgl->nents = 0;
575  	for (i = 0; i < ndivs; i++) {
576  		unsigned int len_this_sg =
577  			min(len_remaining,
578  			    (total_len * divs[i].proportion_of_total +
579  			     TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
580  
581  		if (len_this_sg != 0) {
582  			partitions[tsgl->nents].div = &divs[i];
583  			partitions[tsgl->nents].length = len_this_sg;
584  			tsgl->nents++;
585  			len_remaining -= len_this_sg;
586  		}
587  	}
588  	if (tsgl->nents == 0) {
589  		partitions[tsgl->nents].div = &divs[0];
590  		partitions[tsgl->nents].length = 0;
591  		tsgl->nents++;
592  	}
593  	partitions[tsgl->nents - 1].length += len_remaining;
594  
595  	/* Set up the sgl entries and fill the data or poison */
596  	sg_init_table(tsgl->sgl, tsgl->nents);
597  	for (i = 0; i < tsgl->nents; i++) {
598  		unsigned int offset = partitions[i].div->offset;
599  		void *addr;
600  
601  		if (partitions[i].div->offset_relative_to_alignmask)
602  			offset += alignmask;
603  
604  		while (offset + partitions[i].length + TESTMGR_POISON_LEN >
605  		       2 * PAGE_SIZE) {
606  			if (WARN_ON(offset <= 0))
607  				return -EINVAL;
608  			offset /= 2;
609  		}
610  
611  		addr = &tsgl->bufs[i][offset];
612  		sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
613  
614  		if (out_divs)
615  			out_divs[i] = partitions[i].div;
616  
617  		if (data) {
618  			size_t copy_len, copied;
619  
620  			copy_len = min(partitions[i].length, data->count);
621  			copied = copy_from_iter(addr, copy_len, data);
622  			if (WARN_ON(copied != copy_len))
623  				return -EINVAL;
624  			testmgr_poison(addr + copy_len, partitions[i].length +
625  				       TESTMGR_POISON_LEN - copy_len);
626  		} else {
627  			testmgr_poison(addr, partitions[i].length +
628  				       TESTMGR_POISON_LEN);
629  		}
630  	}
631  
632  	sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
633  	tsgl->sgl_ptr = tsgl->sgl;
634  	memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
635  	return 0;
636  }
637  
638  /*
639   * Verify that a scatterlist crypto operation produced the correct output.
640   *
641   * @tsgl: scatterlist containing the actual output
642   * @expected_output: buffer containing the expected output
643   * @len_to_check: length of @expected_output in bytes
644   * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
645   * @check_poison: verify that the poison bytes after each chunk are intact?
646   *
647   * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
648   */
649  static int verify_correct_output(const struct test_sglist *tsgl,
650  				 const char *expected_output,
651  				 unsigned int len_to_check,
652  				 unsigned int unchecked_prefix_len,
653  				 bool check_poison)
654  {
655  	unsigned int i;
656  
657  	for (i = 0; i < tsgl->nents; i++) {
658  		struct scatterlist *sg = &tsgl->sgl_ptr[i];
659  		unsigned int len = sg->length;
660  		unsigned int offset = sg->offset;
661  		const char *actual_output;
662  
663  		if (unchecked_prefix_len) {
664  			if (unchecked_prefix_len >= len) {
665  				unchecked_prefix_len -= len;
666  				continue;
667  			}
668  			offset += unchecked_prefix_len;
669  			len -= unchecked_prefix_len;
670  			unchecked_prefix_len = 0;
671  		}
672  		len = min(len, len_to_check);
673  		actual_output = page_address(sg_page(sg)) + offset;
674  		if (memcmp(expected_output, actual_output, len) != 0)
675  			return -EINVAL;
676  		if (check_poison &&
677  		    !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
678  			return -EOVERFLOW;
679  		len_to_check -= len;
680  		expected_output += len;
681  	}
682  	if (WARN_ON(len_to_check != 0))
683  		return -EINVAL;
684  	return 0;
685  }
686  
687  static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
688  {
689  	unsigned int i;
690  
691  	for (i = 0; i < tsgl->nents; i++) {
692  		if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
693  			return true;
694  		if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
695  			return true;
696  		if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
697  			return true;
698  	}
699  	return false;
700  }
701  
702  struct cipher_test_sglists {
703  	struct test_sglist src;
704  	struct test_sglist dst;
705  };
706  
707  static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
708  {
709  	struct cipher_test_sglists *tsgls;
710  
711  	tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
712  	if (!tsgls)
713  		return NULL;
714  
715  	if (init_test_sglist(&tsgls->src) != 0)
716  		goto fail_kfree;
717  	if (init_test_sglist(&tsgls->dst) != 0)
718  		goto fail_destroy_src;
719  
720  	return tsgls;
721  
722  fail_destroy_src:
723  	destroy_test_sglist(&tsgls->src);
724  fail_kfree:
725  	kfree(tsgls);
726  	return NULL;
727  }
728  
729  static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
730  {
731  	if (tsgls) {
732  		destroy_test_sglist(&tsgls->src);
733  		destroy_test_sglist(&tsgls->dst);
734  		kfree(tsgls);
735  	}
736  }
737  
738  /* Build the src and dst scatterlists for an skcipher or AEAD test */
739  static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
740  				     const struct testvec_config *cfg,
741  				     unsigned int alignmask,
742  				     unsigned int src_total_len,
743  				     unsigned int dst_total_len,
744  				     const struct kvec *inputs,
745  				     unsigned int nr_inputs)
746  {
747  	struct iov_iter input;
748  	int err;
749  
750  	iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
751  	err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
752  				cfg->inplace ?
753  					max(dst_total_len, src_total_len) :
754  					src_total_len,
755  				&input, NULL);
756  	if (err)
757  		return err;
758  
759  	if (cfg->inplace) {
760  		tsgls->dst.sgl_ptr = tsgls->src.sgl;
761  		tsgls->dst.nents = tsgls->src.nents;
762  		return 0;
763  	}
764  	return build_test_sglist(&tsgls->dst,
765  				 cfg->dst_divs[0].proportion_of_total ?
766  					cfg->dst_divs : cfg->src_divs,
767  				 alignmask, dst_total_len, NULL, NULL);
768  }
769  
770  /*
771   * Support for testing passing a misaligned key to setkey():
772   *
773   * If cfg->key_offset is set, copy the key into a new buffer at that offset,
774   * optionally adding alignmask.  Else, just use the key directly.
775   */
776  static int prepare_keybuf(const u8 *key, unsigned int ksize,
777  			  const struct testvec_config *cfg,
778  			  unsigned int alignmask,
779  			  const u8 **keybuf_ret, const u8 **keyptr_ret)
780  {
781  	unsigned int key_offset = cfg->key_offset;
782  	u8 *keybuf = NULL, *keyptr = (u8 *)key;
783  
784  	if (key_offset != 0) {
785  		if (cfg->key_offset_relative_to_alignmask)
786  			key_offset += alignmask;
787  		keybuf = kmalloc(key_offset + ksize, GFP_KERNEL);
788  		if (!keybuf)
789  			return -ENOMEM;
790  		keyptr = keybuf + key_offset;
791  		memcpy(keyptr, key, ksize);
792  	}
793  	*keybuf_ret = keybuf;
794  	*keyptr_ret = keyptr;
795  	return 0;
796  }
797  
798  /* Like setkey_f(tfm, key, ksize), but sometimes misalign the key */
799  #define do_setkey(setkey_f, tfm, key, ksize, cfg, alignmask)		\
800  ({									\
801  	const u8 *keybuf, *keyptr;					\
802  	int err;							\
803  									\
804  	err = prepare_keybuf((key), (ksize), (cfg), (alignmask),	\
805  			     &keybuf, &keyptr);				\
806  	if (err == 0) {							\
807  		err = setkey_f((tfm), keyptr, (ksize));			\
808  		kfree(keybuf);						\
809  	}								\
810  	err;								\
811  })
812  
813  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
814  
815  /* Generate a random length in range [0, max_len], but prefer smaller values */
816  static unsigned int generate_random_length(unsigned int max_len)
817  {
818  	unsigned int len = prandom_u32() % (max_len + 1);
819  
820  	switch (prandom_u32() % 4) {
821  	case 0:
822  		return len % 64;
823  	case 1:
824  		return len % 256;
825  	case 2:
826  		return len % 1024;
827  	default:
828  		return len;
829  	}
830  }
831  
832  /* Flip a random bit in the given nonempty data buffer */
833  static void flip_random_bit(u8 *buf, size_t size)
834  {
835  	size_t bitpos;
836  
837  	bitpos = prandom_u32() % (size * 8);
838  	buf[bitpos / 8] ^= 1 << (bitpos % 8);
839  }
840  
841  /* Flip a random byte in the given nonempty data buffer */
842  static void flip_random_byte(u8 *buf, size_t size)
843  {
844  	buf[prandom_u32() % size] ^= 0xff;
845  }
846  
847  /* Sometimes make some random changes to the given nonempty data buffer */
848  static void mutate_buffer(u8 *buf, size_t size)
849  {
850  	size_t num_flips;
851  	size_t i;
852  
853  	/* Sometimes flip some bits */
854  	if (prandom_u32() % 4 == 0) {
855  		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), size * 8);
856  		for (i = 0; i < num_flips; i++)
857  			flip_random_bit(buf, size);
858  	}
859  
860  	/* Sometimes flip some bytes */
861  	if (prandom_u32() % 4 == 0) {
862  		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), size);
863  		for (i = 0; i < num_flips; i++)
864  			flip_random_byte(buf, size);
865  	}
866  }
867  
868  /* Randomly generate 'count' bytes, but sometimes make them "interesting" */
869  static void generate_random_bytes(u8 *buf, size_t count)
870  {
871  	u8 b;
872  	u8 increment;
873  	size_t i;
874  
875  	if (count == 0)
876  		return;
877  
878  	switch (prandom_u32() % 8) { /* Choose a generation strategy */
879  	case 0:
880  	case 1:
881  		/* All the same byte, plus optional mutations */
882  		switch (prandom_u32() % 4) {
883  		case 0:
884  			b = 0x00;
885  			break;
886  		case 1:
887  			b = 0xff;
888  			break;
889  		default:
890  			b = (u8)prandom_u32();
891  			break;
892  		}
893  		memset(buf, b, count);
894  		mutate_buffer(buf, count);
895  		break;
896  	case 2:
897  		/* Ascending or descending bytes, plus optional mutations */
898  		increment = (u8)prandom_u32();
899  		b = (u8)prandom_u32();
900  		for (i = 0; i < count; i++, b += increment)
901  			buf[i] = b;
902  		mutate_buffer(buf, count);
903  		break;
904  	default:
905  		/* Fully random bytes */
906  		for (i = 0; i < count; i++)
907  			buf[i] = (u8)prandom_u32();
908  	}
909  }
910  
911  static char *generate_random_sgl_divisions(struct test_sg_division *divs,
912  					   size_t max_divs, char *p, char *end,
913  					   bool gen_flushes, u32 req_flags)
914  {
915  	struct test_sg_division *div = divs;
916  	unsigned int remaining = TEST_SG_TOTAL;
917  
918  	do {
919  		unsigned int this_len;
920  		const char *flushtype_str;
921  
922  		if (div == &divs[max_divs - 1] || prandom_u32() % 2 == 0)
923  			this_len = remaining;
924  		else
925  			this_len = 1 + (prandom_u32() % remaining);
926  		div->proportion_of_total = this_len;
927  
928  		if (prandom_u32() % 4 == 0)
929  			div->offset = (PAGE_SIZE - 128) + (prandom_u32() % 128);
930  		else if (prandom_u32() % 2 == 0)
931  			div->offset = prandom_u32() % 32;
932  		else
933  			div->offset = prandom_u32() % PAGE_SIZE;
934  		if (prandom_u32() % 8 == 0)
935  			div->offset_relative_to_alignmask = true;
936  
937  		div->flush_type = FLUSH_TYPE_NONE;
938  		if (gen_flushes) {
939  			switch (prandom_u32() % 4) {
940  			case 0:
941  				div->flush_type = FLUSH_TYPE_REIMPORT;
942  				break;
943  			case 1:
944  				div->flush_type = FLUSH_TYPE_FLUSH;
945  				break;
946  			}
947  		}
948  
949  		if (div->flush_type != FLUSH_TYPE_NONE &&
950  		    !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
951  		    prandom_u32() % 2 == 0)
952  			div->nosimd = true;
953  
954  		switch (div->flush_type) {
955  		case FLUSH_TYPE_FLUSH:
956  			if (div->nosimd)
957  				flushtype_str = "<flush,nosimd>";
958  			else
959  				flushtype_str = "<flush>";
960  			break;
961  		case FLUSH_TYPE_REIMPORT:
962  			if (div->nosimd)
963  				flushtype_str = "<reimport,nosimd>";
964  			else
965  				flushtype_str = "<reimport>";
966  			break;
967  		default:
968  			flushtype_str = "";
969  			break;
970  		}
971  
972  		BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
973  		p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
974  			       this_len / 100, this_len % 100,
975  			       div->offset_relative_to_alignmask ?
976  					"alignmask" : "",
977  			       div->offset, this_len == remaining ? "" : ", ");
978  		remaining -= this_len;
979  		div++;
980  	} while (remaining);
981  
982  	return p;
983  }
984  
985  /* Generate a random testvec_config for fuzz testing */
986  static void generate_random_testvec_config(struct testvec_config *cfg,
987  					   char *name, size_t max_namelen)
988  {
989  	char *p = name;
990  	char * const end = name + max_namelen;
991  
992  	memset(cfg, 0, sizeof(*cfg));
993  
994  	cfg->name = name;
995  
996  	p += scnprintf(p, end - p, "random:");
997  
998  	if (prandom_u32() % 2 == 0) {
999  		cfg->inplace = true;
1000  		p += scnprintf(p, end - p, " inplace");
1001  	}
1002  
1003  	if (prandom_u32() % 2 == 0) {
1004  		cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
1005  		p += scnprintf(p, end - p, " may_sleep");
1006  	}
1007  
1008  	switch (prandom_u32() % 4) {
1009  	case 0:
1010  		cfg->finalization_type = FINALIZATION_TYPE_FINAL;
1011  		p += scnprintf(p, end - p, " use_final");
1012  		break;
1013  	case 1:
1014  		cfg->finalization_type = FINALIZATION_TYPE_FINUP;
1015  		p += scnprintf(p, end - p, " use_finup");
1016  		break;
1017  	default:
1018  		cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
1019  		p += scnprintf(p, end - p, " use_digest");
1020  		break;
1021  	}
1022  
1023  	if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
1024  	    prandom_u32() % 2 == 0) {
1025  		cfg->nosimd = true;
1026  		p += scnprintf(p, end - p, " nosimd");
1027  	}
1028  
1029  	p += scnprintf(p, end - p, " src_divs=[");
1030  	p = generate_random_sgl_divisions(cfg->src_divs,
1031  					  ARRAY_SIZE(cfg->src_divs), p, end,
1032  					  (cfg->finalization_type !=
1033  					   FINALIZATION_TYPE_DIGEST),
1034  					  cfg->req_flags);
1035  	p += scnprintf(p, end - p, "]");
1036  
1037  	if (!cfg->inplace && prandom_u32() % 2 == 0) {
1038  		p += scnprintf(p, end - p, " dst_divs=[");
1039  		p = generate_random_sgl_divisions(cfg->dst_divs,
1040  						  ARRAY_SIZE(cfg->dst_divs),
1041  						  p, end, false,
1042  						  cfg->req_flags);
1043  		p += scnprintf(p, end - p, "]");
1044  	}
1045  
1046  	if (prandom_u32() % 2 == 0) {
1047  		cfg->iv_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
1048  		p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
1049  	}
1050  
1051  	if (prandom_u32() % 2 == 0) {
1052  		cfg->key_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
1053  		p += scnprintf(p, end - p, " key_offset=%u", cfg->key_offset);
1054  	}
1055  
1056  	WARN_ON_ONCE(!valid_testvec_config(cfg));
1057  }
1058  
1059  static void crypto_disable_simd_for_test(void)
1060  {
1061  	preempt_disable();
1062  	__this_cpu_write(crypto_simd_disabled_for_test, true);
1063  }
1064  
1065  static void crypto_reenable_simd_for_test(void)
1066  {
1067  	__this_cpu_write(crypto_simd_disabled_for_test, false);
1068  	preempt_enable();
1069  }
1070  
1071  /*
1072   * Given an algorithm name, build the name of the generic implementation of that
1073   * algorithm, assuming the usual naming convention.  Specifically, this appends
1074   * "-generic" to every part of the name that is not a template name.  Examples:
1075   *
1076   *	aes => aes-generic
1077   *	cbc(aes) => cbc(aes-generic)
1078   *	cts(cbc(aes)) => cts(cbc(aes-generic))
1079   *	rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
1080   *
1081   * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
1082   */
1083  static int build_generic_driver_name(const char *algname,
1084  				     char driver_name[CRYPTO_MAX_ALG_NAME])
1085  {
1086  	const char *in = algname;
1087  	char *out = driver_name;
1088  	size_t len = strlen(algname);
1089  
1090  	if (len >= CRYPTO_MAX_ALG_NAME)
1091  		goto too_long;
1092  	do {
1093  		const char *in_saved = in;
1094  
1095  		while (*in && *in != '(' && *in != ')' && *in != ',')
1096  			*out++ = *in++;
1097  		if (*in != '(' && in > in_saved) {
1098  			len += 8;
1099  			if (len >= CRYPTO_MAX_ALG_NAME)
1100  				goto too_long;
1101  			memcpy(out, "-generic", 8);
1102  			out += 8;
1103  		}
1104  	} while ((*out++ = *in++) != '\0');
1105  	return 0;
1106  
1107  too_long:
1108  	pr_err("alg: generic driver name for \"%s\" would be too long\n",
1109  	       algname);
1110  	return -ENAMETOOLONG;
1111  }
1112  #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1113  static void crypto_disable_simd_for_test(void)
1114  {
1115  }
1116  
1117  static void crypto_reenable_simd_for_test(void)
1118  {
1119  }
1120  #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1121  
1122  static int build_hash_sglist(struct test_sglist *tsgl,
1123  			     const struct hash_testvec *vec,
1124  			     const struct testvec_config *cfg,
1125  			     unsigned int alignmask,
1126  			     const struct test_sg_division *divs[XBUFSIZE])
1127  {
1128  	struct kvec kv;
1129  	struct iov_iter input;
1130  
1131  	kv.iov_base = (void *)vec->plaintext;
1132  	kv.iov_len = vec->psize;
1133  	iov_iter_kvec(&input, WRITE, &kv, 1, vec->psize);
1134  	return build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
1135  				 &input, divs);
1136  }
1137  
1138  static int check_hash_result(const char *type,
1139  			     const u8 *result, unsigned int digestsize,
1140  			     const struct hash_testvec *vec,
1141  			     const char *vec_name,
1142  			     const char *driver,
1143  			     const struct testvec_config *cfg)
1144  {
1145  	if (memcmp(result, vec->digest, digestsize) != 0) {
1146  		pr_err("alg: %s: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
1147  		       type, driver, vec_name, cfg->name);
1148  		return -EINVAL;
1149  	}
1150  	if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
1151  		pr_err("alg: %s: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
1152  		       type, driver, vec_name, cfg->name);
1153  		return -EOVERFLOW;
1154  	}
1155  	return 0;
1156  }
1157  
1158  static inline int check_shash_op(const char *op, int err,
1159  				 const char *driver, const char *vec_name,
1160  				 const struct testvec_config *cfg)
1161  {
1162  	if (err)
1163  		pr_err("alg: shash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1164  		       driver, op, err, vec_name, cfg->name);
1165  	return err;
1166  }
1167  
1168  static inline const void *sg_data(struct scatterlist *sg)
1169  {
1170  	return page_address(sg_page(sg)) + sg->offset;
1171  }
1172  
1173  /* Test one hash test vector in one configuration, using the shash API */
1174  static int test_shash_vec_cfg(const char *driver,
1175  			      const struct hash_testvec *vec,
1176  			      const char *vec_name,
1177  			      const struct testvec_config *cfg,
1178  			      struct shash_desc *desc,
1179  			      struct test_sglist *tsgl,
1180  			      u8 *hashstate)
1181  {
1182  	struct crypto_shash *tfm = desc->tfm;
1183  	const unsigned int alignmask = crypto_shash_alignmask(tfm);
1184  	const unsigned int digestsize = crypto_shash_digestsize(tfm);
1185  	const unsigned int statesize = crypto_shash_statesize(tfm);
1186  	const struct test_sg_division *divs[XBUFSIZE];
1187  	unsigned int i;
1188  	u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
1189  	int err;
1190  
1191  	/* Set the key, if specified */
1192  	if (vec->ksize) {
1193  		err = do_setkey(crypto_shash_setkey, tfm, vec->key, vec->ksize,
1194  				cfg, alignmask);
1195  		if (err) {
1196  			if (err == vec->setkey_error)
1197  				return 0;
1198  			pr_err("alg: shash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1199  			       driver, vec_name, vec->setkey_error, err,
1200  			       crypto_shash_get_flags(tfm));
1201  			return err;
1202  		}
1203  		if (vec->setkey_error) {
1204  			pr_err("alg: shash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1205  			       driver, vec_name, vec->setkey_error);
1206  			return -EINVAL;
1207  		}
1208  	}
1209  
1210  	/* Build the scatterlist for the source data */
1211  	err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
1212  	if (err) {
1213  		pr_err("alg: shash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1214  		       driver, vec_name, cfg->name);
1215  		return err;
1216  	}
1217  
1218  	/* Do the actual hashing */
1219  
1220  	testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
1221  	testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
1222  
1223  	if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
1224  	    vec->digest_error) {
1225  		/* Just using digest() */
1226  		if (tsgl->nents != 1)
1227  			return 0;
1228  		if (cfg->nosimd)
1229  			crypto_disable_simd_for_test();
1230  		err = crypto_shash_digest(desc, sg_data(&tsgl->sgl[0]),
1231  					  tsgl->sgl[0].length, result);
1232  		if (cfg->nosimd)
1233  			crypto_reenable_simd_for_test();
1234  		if (err) {
1235  			if (err == vec->digest_error)
1236  				return 0;
1237  			pr_err("alg: shash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1238  			       driver, vec_name, vec->digest_error, err,
1239  			       cfg->name);
1240  			return err;
1241  		}
1242  		if (vec->digest_error) {
1243  			pr_err("alg: shash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1244  			       driver, vec_name, vec->digest_error, cfg->name);
1245  			return -EINVAL;
1246  		}
1247  		goto result_ready;
1248  	}
1249  
1250  	/* Using init(), zero or more update(), then final() or finup() */
1251  
1252  	if (cfg->nosimd)
1253  		crypto_disable_simd_for_test();
1254  	err = crypto_shash_init(desc);
1255  	if (cfg->nosimd)
1256  		crypto_reenable_simd_for_test();
1257  	err = check_shash_op("init", err, driver, vec_name, cfg);
1258  	if (err)
1259  		return err;
1260  
1261  	for (i = 0; i < tsgl->nents; i++) {
1262  		if (i + 1 == tsgl->nents &&
1263  		    cfg->finalization_type == FINALIZATION_TYPE_FINUP) {
1264  			if (divs[i]->nosimd)
1265  				crypto_disable_simd_for_test();
1266  			err = crypto_shash_finup(desc, sg_data(&tsgl->sgl[i]),
1267  						 tsgl->sgl[i].length, result);
1268  			if (divs[i]->nosimd)
1269  				crypto_reenable_simd_for_test();
1270  			err = check_shash_op("finup", err, driver, vec_name,
1271  					     cfg);
1272  			if (err)
1273  				return err;
1274  			goto result_ready;
1275  		}
1276  		if (divs[i]->nosimd)
1277  			crypto_disable_simd_for_test();
1278  		err = crypto_shash_update(desc, sg_data(&tsgl->sgl[i]),
1279  					  tsgl->sgl[i].length);
1280  		if (divs[i]->nosimd)
1281  			crypto_reenable_simd_for_test();
1282  		err = check_shash_op("update", err, driver, vec_name, cfg);
1283  		if (err)
1284  			return err;
1285  		if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1286  			/* Test ->export() and ->import() */
1287  			testmgr_poison(hashstate + statesize,
1288  				       TESTMGR_POISON_LEN);
1289  			err = crypto_shash_export(desc, hashstate);
1290  			err = check_shash_op("export", err, driver, vec_name,
1291  					     cfg);
1292  			if (err)
1293  				return err;
1294  			if (!testmgr_is_poison(hashstate + statesize,
1295  					       TESTMGR_POISON_LEN)) {
1296  				pr_err("alg: shash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1297  				       driver, vec_name, cfg->name);
1298  				return -EOVERFLOW;
1299  			}
1300  			testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
1301  			err = crypto_shash_import(desc, hashstate);
1302  			err = check_shash_op("import", err, driver, vec_name,
1303  					     cfg);
1304  			if (err)
1305  				return err;
1306  		}
1307  	}
1308  
1309  	if (cfg->nosimd)
1310  		crypto_disable_simd_for_test();
1311  	err = crypto_shash_final(desc, result);
1312  	if (cfg->nosimd)
1313  		crypto_reenable_simd_for_test();
1314  	err = check_shash_op("final", err, driver, vec_name, cfg);
1315  	if (err)
1316  		return err;
1317  result_ready:
1318  	return check_hash_result("shash", result, digestsize, vec, vec_name,
1319  				 driver, cfg);
1320  }
1321  
1322  static int do_ahash_op(int (*op)(struct ahash_request *req),
1323  		       struct ahash_request *req,
1324  		       struct crypto_wait *wait, bool nosimd)
1325  {
1326  	int err;
1327  
1328  	if (nosimd)
1329  		crypto_disable_simd_for_test();
1330  
1331  	err = op(req);
1332  
1333  	if (nosimd)
1334  		crypto_reenable_simd_for_test();
1335  
1336  	return crypto_wait_req(err, wait);
1337  }
1338  
1339  static int check_nonfinal_ahash_op(const char *op, int err,
1340  				   u8 *result, unsigned int digestsize,
1341  				   const char *driver, const char *vec_name,
1342  				   const struct testvec_config *cfg)
1343  {
1344  	if (err) {
1345  		pr_err("alg: ahash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1346  		       driver, op, err, vec_name, cfg->name);
1347  		return err;
1348  	}
1349  	if (!testmgr_is_poison(result, digestsize)) {
1350  		pr_err("alg: ahash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
1351  		       driver, op, vec_name, cfg->name);
1352  		return -EINVAL;
1353  	}
1354  	return 0;
1355  }
1356  
1357  /* Test one hash test vector in one configuration, using the ahash API */
1358  static int test_ahash_vec_cfg(const char *driver,
1359  			      const struct hash_testvec *vec,
1360  			      const char *vec_name,
1361  			      const struct testvec_config *cfg,
1362  			      struct ahash_request *req,
1363  			      struct test_sglist *tsgl,
1364  			      u8 *hashstate)
1365  {
1366  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1367  	const unsigned int alignmask = crypto_ahash_alignmask(tfm);
1368  	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1369  	const unsigned int statesize = crypto_ahash_statesize(tfm);
1370  	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1371  	const struct test_sg_division *divs[XBUFSIZE];
1372  	DECLARE_CRYPTO_WAIT(wait);
1373  	unsigned int i;
1374  	struct scatterlist *pending_sgl;
1375  	unsigned int pending_len;
1376  	u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
1377  	int err;
1378  
1379  	/* Set the key, if specified */
1380  	if (vec->ksize) {
1381  		err = do_setkey(crypto_ahash_setkey, tfm, vec->key, vec->ksize,
1382  				cfg, alignmask);
1383  		if (err) {
1384  			if (err == vec->setkey_error)
1385  				return 0;
1386  			pr_err("alg: ahash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1387  			       driver, vec_name, vec->setkey_error, err,
1388  			       crypto_ahash_get_flags(tfm));
1389  			return err;
1390  		}
1391  		if (vec->setkey_error) {
1392  			pr_err("alg: ahash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1393  			       driver, vec_name, vec->setkey_error);
1394  			return -EINVAL;
1395  		}
1396  	}
1397  
1398  	/* Build the scatterlist for the source data */
1399  	err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
1400  	if (err) {
1401  		pr_err("alg: ahash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1402  		       driver, vec_name, cfg->name);
1403  		return err;
1404  	}
1405  
1406  	/* Do the actual hashing */
1407  
1408  	testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1409  	testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
1410  
1411  	if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
1412  	    vec->digest_error) {
1413  		/* Just using digest() */
1414  		ahash_request_set_callback(req, req_flags, crypto_req_done,
1415  					   &wait);
1416  		ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
1417  		err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
1418  		if (err) {
1419  			if (err == vec->digest_error)
1420  				return 0;
1421  			pr_err("alg: ahash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1422  			       driver, vec_name, vec->digest_error, err,
1423  			       cfg->name);
1424  			return err;
1425  		}
1426  		if (vec->digest_error) {
1427  			pr_err("alg: ahash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1428  			       driver, vec_name, vec->digest_error, cfg->name);
1429  			return -EINVAL;
1430  		}
1431  		goto result_ready;
1432  	}
1433  
1434  	/* Using init(), zero or more update(), then final() or finup() */
1435  
1436  	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1437  	ahash_request_set_crypt(req, NULL, result, 0);
1438  	err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
1439  	err = check_nonfinal_ahash_op("init", err, result, digestsize,
1440  				      driver, vec_name, cfg);
1441  	if (err)
1442  		return err;
1443  
1444  	pending_sgl = NULL;
1445  	pending_len = 0;
1446  	for (i = 0; i < tsgl->nents; i++) {
1447  		if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
1448  		    pending_sgl != NULL) {
1449  			/* update() with the pending data */
1450  			ahash_request_set_callback(req, req_flags,
1451  						   crypto_req_done, &wait);
1452  			ahash_request_set_crypt(req, pending_sgl, result,
1453  						pending_len);
1454  			err = do_ahash_op(crypto_ahash_update, req, &wait,
1455  					  divs[i]->nosimd);
1456  			err = check_nonfinal_ahash_op("update", err,
1457  						      result, digestsize,
1458  						      driver, vec_name, cfg);
1459  			if (err)
1460  				return err;
1461  			pending_sgl = NULL;
1462  			pending_len = 0;
1463  		}
1464  		if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1465  			/* Test ->export() and ->import() */
1466  			testmgr_poison(hashstate + statesize,
1467  				       TESTMGR_POISON_LEN);
1468  			err = crypto_ahash_export(req, hashstate);
1469  			err = check_nonfinal_ahash_op("export", err,
1470  						      result, digestsize,
1471  						      driver, vec_name, cfg);
1472  			if (err)
1473  				return err;
1474  			if (!testmgr_is_poison(hashstate + statesize,
1475  					       TESTMGR_POISON_LEN)) {
1476  				pr_err("alg: ahash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1477  				       driver, vec_name, cfg->name);
1478  				return -EOVERFLOW;
1479  			}
1480  
1481  			testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1482  			err = crypto_ahash_import(req, hashstate);
1483  			err = check_nonfinal_ahash_op("import", err,
1484  						      result, digestsize,
1485  						      driver, vec_name, cfg);
1486  			if (err)
1487  				return err;
1488  		}
1489  		if (pending_sgl == NULL)
1490  			pending_sgl = &tsgl->sgl[i];
1491  		pending_len += tsgl->sgl[i].length;
1492  	}
1493  
1494  	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1495  	ahash_request_set_crypt(req, pending_sgl, result, pending_len);
1496  	if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
1497  		/* finish with update() and final() */
1498  		err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
1499  		err = check_nonfinal_ahash_op("update", err, result, digestsize,
1500  					      driver, vec_name, cfg);
1501  		if (err)
1502  			return err;
1503  		err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
1504  		if (err) {
1505  			pr_err("alg: ahash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
1506  			       driver, err, vec_name, cfg->name);
1507  			return err;
1508  		}
1509  	} else {
1510  		/* finish with finup() */
1511  		err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
1512  		if (err) {
1513  			pr_err("alg: ahash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
1514  			       driver, err, vec_name, cfg->name);
1515  			return err;
1516  		}
1517  	}
1518  
1519  result_ready:
1520  	return check_hash_result("ahash", result, digestsize, vec, vec_name,
1521  				 driver, cfg);
1522  }
1523  
1524  static int test_hash_vec_cfg(const char *driver,
1525  			     const struct hash_testvec *vec,
1526  			     const char *vec_name,
1527  			     const struct testvec_config *cfg,
1528  			     struct ahash_request *req,
1529  			     struct shash_desc *desc,
1530  			     struct test_sglist *tsgl,
1531  			     u8 *hashstate)
1532  {
1533  	int err;
1534  
1535  	/*
1536  	 * For algorithms implemented as "shash", most bugs will be detected by
1537  	 * both the shash and ahash tests.  Test the shash API first so that the
1538  	 * failures involve less indirection, so are easier to debug.
1539  	 */
1540  
1541  	if (desc) {
1542  		err = test_shash_vec_cfg(driver, vec, vec_name, cfg, desc, tsgl,
1543  					 hashstate);
1544  		if (err)
1545  			return err;
1546  	}
1547  
1548  	return test_ahash_vec_cfg(driver, vec, vec_name, cfg, req, tsgl,
1549  				  hashstate);
1550  }
1551  
1552  static int test_hash_vec(const char *driver, const struct hash_testvec *vec,
1553  			 unsigned int vec_num, struct ahash_request *req,
1554  			 struct shash_desc *desc, struct test_sglist *tsgl,
1555  			 u8 *hashstate)
1556  {
1557  	char vec_name[16];
1558  	unsigned int i;
1559  	int err;
1560  
1561  	sprintf(vec_name, "%u", vec_num);
1562  
1563  	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
1564  		err = test_hash_vec_cfg(driver, vec, vec_name,
1565  					&default_hash_testvec_configs[i],
1566  					req, desc, tsgl, hashstate);
1567  		if (err)
1568  			return err;
1569  	}
1570  
1571  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1572  	if (!noextratests) {
1573  		struct testvec_config cfg;
1574  		char cfgname[TESTVEC_CONFIG_NAMELEN];
1575  
1576  		for (i = 0; i < fuzz_iterations; i++) {
1577  			generate_random_testvec_config(&cfg, cfgname,
1578  						       sizeof(cfgname));
1579  			err = test_hash_vec_cfg(driver, vec, vec_name, &cfg,
1580  						req, desc, tsgl, hashstate);
1581  			if (err)
1582  				return err;
1583  			cond_resched();
1584  		}
1585  	}
1586  #endif
1587  	return 0;
1588  }
1589  
1590  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1591  /*
1592   * Generate a hash test vector from the given implementation.
1593   * Assumes the buffers in 'vec' were already allocated.
1594   */
1595  static void generate_random_hash_testvec(struct shash_desc *desc,
1596  					 struct hash_testvec *vec,
1597  					 unsigned int maxkeysize,
1598  					 unsigned int maxdatasize,
1599  					 char *name, size_t max_namelen)
1600  {
1601  	/* Data */
1602  	vec->psize = generate_random_length(maxdatasize);
1603  	generate_random_bytes((u8 *)vec->plaintext, vec->psize);
1604  
1605  	/*
1606  	 * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
1607  	 * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
1608  	 */
1609  	vec->setkey_error = 0;
1610  	vec->ksize = 0;
1611  	if (maxkeysize) {
1612  		vec->ksize = maxkeysize;
1613  		if (prandom_u32() % 4 == 0)
1614  			vec->ksize = 1 + (prandom_u32() % maxkeysize);
1615  		generate_random_bytes((u8 *)vec->key, vec->ksize);
1616  
1617  		vec->setkey_error = crypto_shash_setkey(desc->tfm, vec->key,
1618  							vec->ksize);
1619  		/* If the key couldn't be set, no need to continue to digest. */
1620  		if (vec->setkey_error)
1621  			goto done;
1622  	}
1623  
1624  	/* Digest */
1625  	vec->digest_error = crypto_shash_digest(desc, vec->plaintext,
1626  						vec->psize, (u8 *)vec->digest);
1627  done:
1628  	snprintf(name, max_namelen, "\"random: psize=%u ksize=%u\"",
1629  		 vec->psize, vec->ksize);
1630  }
1631  
1632  /*
1633   * Test the hash algorithm represented by @req against the corresponding generic
1634   * implementation, if one is available.
1635   */
1636  static int test_hash_vs_generic_impl(const char *driver,
1637  				     const char *generic_driver,
1638  				     unsigned int maxkeysize,
1639  				     struct ahash_request *req,
1640  				     struct shash_desc *desc,
1641  				     struct test_sglist *tsgl,
1642  				     u8 *hashstate)
1643  {
1644  	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1645  	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1646  	const unsigned int blocksize = crypto_ahash_blocksize(tfm);
1647  	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
1648  	const char *algname = crypto_hash_alg_common(tfm)->base.cra_name;
1649  	char _generic_driver[CRYPTO_MAX_ALG_NAME];
1650  	struct crypto_shash *generic_tfm = NULL;
1651  	struct shash_desc *generic_desc = NULL;
1652  	unsigned int i;
1653  	struct hash_testvec vec = { 0 };
1654  	char vec_name[64];
1655  	struct testvec_config *cfg;
1656  	char cfgname[TESTVEC_CONFIG_NAMELEN];
1657  	int err;
1658  
1659  	if (noextratests)
1660  		return 0;
1661  
1662  	if (!generic_driver) { /* Use default naming convention? */
1663  		err = build_generic_driver_name(algname, _generic_driver);
1664  		if (err)
1665  			return err;
1666  		generic_driver = _generic_driver;
1667  	}
1668  
1669  	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
1670  		return 0;
1671  
1672  	generic_tfm = crypto_alloc_shash(generic_driver, 0, 0);
1673  	if (IS_ERR(generic_tfm)) {
1674  		err = PTR_ERR(generic_tfm);
1675  		if (err == -ENOENT) {
1676  			pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
1677  				driver, generic_driver);
1678  			return 0;
1679  		}
1680  		pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
1681  		       generic_driver, algname, err);
1682  		return err;
1683  	}
1684  
1685  	cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
1686  	if (!cfg) {
1687  		err = -ENOMEM;
1688  		goto out;
1689  	}
1690  
1691  	generic_desc = kzalloc(sizeof(*desc) +
1692  			       crypto_shash_descsize(generic_tfm), GFP_KERNEL);
1693  	if (!generic_desc) {
1694  		err = -ENOMEM;
1695  		goto out;
1696  	}
1697  	generic_desc->tfm = generic_tfm;
1698  
1699  	/* Check the algorithm properties for consistency. */
1700  
1701  	if (digestsize != crypto_shash_digestsize(generic_tfm)) {
1702  		pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
1703  		       driver, digestsize,
1704  		       crypto_shash_digestsize(generic_tfm));
1705  		err = -EINVAL;
1706  		goto out;
1707  	}
1708  
1709  	if (blocksize != crypto_shash_blocksize(generic_tfm)) {
1710  		pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
1711  		       driver, blocksize, crypto_shash_blocksize(generic_tfm));
1712  		err = -EINVAL;
1713  		goto out;
1714  	}
1715  
1716  	/*
1717  	 * Now generate test vectors using the generic implementation, and test
1718  	 * the other implementation against them.
1719  	 */
1720  
1721  	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
1722  	vec.plaintext = kmalloc(maxdatasize, GFP_KERNEL);
1723  	vec.digest = kmalloc(digestsize, GFP_KERNEL);
1724  	if (!vec.key || !vec.plaintext || !vec.digest) {
1725  		err = -ENOMEM;
1726  		goto out;
1727  	}
1728  
1729  	for (i = 0; i < fuzz_iterations * 8; i++) {
1730  		generate_random_hash_testvec(generic_desc, &vec,
1731  					     maxkeysize, maxdatasize,
1732  					     vec_name, sizeof(vec_name));
1733  		generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
1734  
1735  		err = test_hash_vec_cfg(driver, &vec, vec_name, cfg,
1736  					req, desc, tsgl, hashstate);
1737  		if (err)
1738  			goto out;
1739  		cond_resched();
1740  	}
1741  	err = 0;
1742  out:
1743  	kfree(cfg);
1744  	kfree(vec.key);
1745  	kfree(vec.plaintext);
1746  	kfree(vec.digest);
1747  	crypto_free_shash(generic_tfm);
1748  	kfree_sensitive(generic_desc);
1749  	return err;
1750  }
1751  #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1752  static int test_hash_vs_generic_impl(const char *driver,
1753  				     const char *generic_driver,
1754  				     unsigned int maxkeysize,
1755  				     struct ahash_request *req,
1756  				     struct shash_desc *desc,
1757  				     struct test_sglist *tsgl,
1758  				     u8 *hashstate)
1759  {
1760  	return 0;
1761  }
1762  #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1763  
1764  static int alloc_shash(const char *driver, u32 type, u32 mask,
1765  		       struct crypto_shash **tfm_ret,
1766  		       struct shash_desc **desc_ret)
1767  {
1768  	struct crypto_shash *tfm;
1769  	struct shash_desc *desc;
1770  
1771  	tfm = crypto_alloc_shash(driver, type, mask);
1772  	if (IS_ERR(tfm)) {
1773  		if (PTR_ERR(tfm) == -ENOENT) {
1774  			/*
1775  			 * This algorithm is only available through the ahash
1776  			 * API, not the shash API, so skip the shash tests.
1777  			 */
1778  			return 0;
1779  		}
1780  		pr_err("alg: hash: failed to allocate shash transform for %s: %ld\n",
1781  		       driver, PTR_ERR(tfm));
1782  		return PTR_ERR(tfm);
1783  	}
1784  
1785  	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(tfm), GFP_KERNEL);
1786  	if (!desc) {
1787  		crypto_free_shash(tfm);
1788  		return -ENOMEM;
1789  	}
1790  	desc->tfm = tfm;
1791  
1792  	*tfm_ret = tfm;
1793  	*desc_ret = desc;
1794  	return 0;
1795  }
1796  
1797  static int __alg_test_hash(const struct hash_testvec *vecs,
1798  			   unsigned int num_vecs, const char *driver,
1799  			   u32 type, u32 mask,
1800  			   const char *generic_driver, unsigned int maxkeysize)
1801  {
1802  	struct crypto_ahash *atfm = NULL;
1803  	struct ahash_request *req = NULL;
1804  	struct crypto_shash *stfm = NULL;
1805  	struct shash_desc *desc = NULL;
1806  	struct test_sglist *tsgl = NULL;
1807  	u8 *hashstate = NULL;
1808  	unsigned int statesize;
1809  	unsigned int i;
1810  	int err;
1811  
1812  	/*
1813  	 * Always test the ahash API.  This works regardless of whether the
1814  	 * algorithm is implemented as ahash or shash.
1815  	 */
1816  
1817  	atfm = crypto_alloc_ahash(driver, type, mask);
1818  	if (IS_ERR(atfm)) {
1819  		pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
1820  		       driver, PTR_ERR(atfm));
1821  		return PTR_ERR(atfm);
1822  	}
1823  
1824  	req = ahash_request_alloc(atfm, GFP_KERNEL);
1825  	if (!req) {
1826  		pr_err("alg: hash: failed to allocate request for %s\n",
1827  		       driver);
1828  		err = -ENOMEM;
1829  		goto out;
1830  	}
1831  
1832  	/*
1833  	 * If available also test the shash API, to cover corner cases that may
1834  	 * be missed by testing the ahash API only.
1835  	 */
1836  	err = alloc_shash(driver, type, mask, &stfm, &desc);
1837  	if (err)
1838  		goto out;
1839  
1840  	tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
1841  	if (!tsgl || init_test_sglist(tsgl) != 0) {
1842  		pr_err("alg: hash: failed to allocate test buffers for %s\n",
1843  		       driver);
1844  		kfree(tsgl);
1845  		tsgl = NULL;
1846  		err = -ENOMEM;
1847  		goto out;
1848  	}
1849  
1850  	statesize = crypto_ahash_statesize(atfm);
1851  	if (stfm)
1852  		statesize = max(statesize, crypto_shash_statesize(stfm));
1853  	hashstate = kmalloc(statesize + TESTMGR_POISON_LEN, GFP_KERNEL);
1854  	if (!hashstate) {
1855  		pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
1856  		       driver);
1857  		err = -ENOMEM;
1858  		goto out;
1859  	}
1860  
1861  	for (i = 0; i < num_vecs; i++) {
1862  		err = test_hash_vec(driver, &vecs[i], i, req, desc, tsgl,
1863  				    hashstate);
1864  		if (err)
1865  			goto out;
1866  		cond_resched();
1867  	}
1868  	err = test_hash_vs_generic_impl(driver, generic_driver, maxkeysize, req,
1869  					desc, tsgl, hashstate);
1870  out:
1871  	kfree(hashstate);
1872  	if (tsgl) {
1873  		destroy_test_sglist(tsgl);
1874  		kfree(tsgl);
1875  	}
1876  	kfree(desc);
1877  	crypto_free_shash(stfm);
1878  	ahash_request_free(req);
1879  	crypto_free_ahash(atfm);
1880  	return err;
1881  }
1882  
1883  static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1884  			 u32 type, u32 mask)
1885  {
1886  	const struct hash_testvec *template = desc->suite.hash.vecs;
1887  	unsigned int tcount = desc->suite.hash.count;
1888  	unsigned int nr_unkeyed, nr_keyed;
1889  	unsigned int maxkeysize = 0;
1890  	int err;
1891  
1892  	/*
1893  	 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1894  	 * first, before setting a key on the tfm.  To make this easier, we
1895  	 * require that the unkeyed test vectors (if any) are listed first.
1896  	 */
1897  
1898  	for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1899  		if (template[nr_unkeyed].ksize)
1900  			break;
1901  	}
1902  	for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1903  		if (!template[nr_unkeyed + nr_keyed].ksize) {
1904  			pr_err("alg: hash: test vectors for %s out of order, "
1905  			       "unkeyed ones must come first\n", desc->alg);
1906  			return -EINVAL;
1907  		}
1908  		maxkeysize = max_t(unsigned int, maxkeysize,
1909  				   template[nr_unkeyed + nr_keyed].ksize);
1910  	}
1911  
1912  	err = 0;
1913  	if (nr_unkeyed) {
1914  		err = __alg_test_hash(template, nr_unkeyed, driver, type, mask,
1915  				      desc->generic_driver, maxkeysize);
1916  		template += nr_unkeyed;
1917  	}
1918  
1919  	if (!err && nr_keyed)
1920  		err = __alg_test_hash(template, nr_keyed, driver, type, mask,
1921  				      desc->generic_driver, maxkeysize);
1922  
1923  	return err;
1924  }
1925  
1926  static int test_aead_vec_cfg(const char *driver, int enc,
1927  			     const struct aead_testvec *vec,
1928  			     const char *vec_name,
1929  			     const struct testvec_config *cfg,
1930  			     struct aead_request *req,
1931  			     struct cipher_test_sglists *tsgls)
1932  {
1933  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1934  	const unsigned int alignmask = crypto_aead_alignmask(tfm);
1935  	const unsigned int ivsize = crypto_aead_ivsize(tfm);
1936  	const unsigned int authsize = vec->clen - vec->plen;
1937  	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1938  	const char *op = enc ? "encryption" : "decryption";
1939  	DECLARE_CRYPTO_WAIT(wait);
1940  	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1941  	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1942  		 cfg->iv_offset +
1943  		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1944  	struct kvec input[2];
1945  	int err;
1946  
1947  	/* Set the key */
1948  	if (vec->wk)
1949  		crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1950  	else
1951  		crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1952  
1953  	err = do_setkey(crypto_aead_setkey, tfm, vec->key, vec->klen,
1954  			cfg, alignmask);
1955  	if (err && err != vec->setkey_error) {
1956  		pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1957  		       driver, vec_name, vec->setkey_error, err,
1958  		       crypto_aead_get_flags(tfm));
1959  		return err;
1960  	}
1961  	if (!err && vec->setkey_error) {
1962  		pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1963  		       driver, vec_name, vec->setkey_error);
1964  		return -EINVAL;
1965  	}
1966  
1967  	/* Set the authentication tag size */
1968  	err = crypto_aead_setauthsize(tfm, authsize);
1969  	if (err && err != vec->setauthsize_error) {
1970  		pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
1971  		       driver, vec_name, vec->setauthsize_error, err);
1972  		return err;
1973  	}
1974  	if (!err && vec->setauthsize_error) {
1975  		pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
1976  		       driver, vec_name, vec->setauthsize_error);
1977  		return -EINVAL;
1978  	}
1979  
1980  	if (vec->setkey_error || vec->setauthsize_error)
1981  		return 0;
1982  
1983  	/* The IV must be copied to a buffer, as the algorithm may modify it */
1984  	if (WARN_ON(ivsize > MAX_IVLEN))
1985  		return -EINVAL;
1986  	if (vec->iv)
1987  		memcpy(iv, vec->iv, ivsize);
1988  	else
1989  		memset(iv, 0, ivsize);
1990  
1991  	/* Build the src/dst scatterlists */
1992  	input[0].iov_base = (void *)vec->assoc;
1993  	input[0].iov_len = vec->alen;
1994  	input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1995  	input[1].iov_len = enc ? vec->plen : vec->clen;
1996  	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1997  					vec->alen + (enc ? vec->plen :
1998  						     vec->clen),
1999  					vec->alen + (enc ? vec->clen :
2000  						     vec->plen),
2001  					input, 2);
2002  	if (err) {
2003  		pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2004  		       driver, op, vec_name, cfg->name);
2005  		return err;
2006  	}
2007  
2008  	/* Do the actual encryption or decryption */
2009  	testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
2010  	aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
2011  	aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
2012  			       enc ? vec->plen : vec->clen, iv);
2013  	aead_request_set_ad(req, vec->alen);
2014  	if (cfg->nosimd)
2015  		crypto_disable_simd_for_test();
2016  	err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
2017  	if (cfg->nosimd)
2018  		crypto_reenable_simd_for_test();
2019  	err = crypto_wait_req(err, &wait);
2020  
2021  	/* Check that the algorithm didn't overwrite things it shouldn't have */
2022  	if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
2023  	    req->assoclen != vec->alen ||
2024  	    req->iv != iv ||
2025  	    req->src != tsgls->src.sgl_ptr ||
2026  	    req->dst != tsgls->dst.sgl_ptr ||
2027  	    crypto_aead_reqtfm(req) != tfm ||
2028  	    req->base.complete != crypto_req_done ||
2029  	    req->base.flags != req_flags ||
2030  	    req->base.data != &wait) {
2031  		pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2032  		       driver, op, vec_name, cfg->name);
2033  		if (req->cryptlen != (enc ? vec->plen : vec->clen))
2034  			pr_err("alg: aead: changed 'req->cryptlen'\n");
2035  		if (req->assoclen != vec->alen)
2036  			pr_err("alg: aead: changed 'req->assoclen'\n");
2037  		if (req->iv != iv)
2038  			pr_err("alg: aead: changed 'req->iv'\n");
2039  		if (req->src != tsgls->src.sgl_ptr)
2040  			pr_err("alg: aead: changed 'req->src'\n");
2041  		if (req->dst != tsgls->dst.sgl_ptr)
2042  			pr_err("alg: aead: changed 'req->dst'\n");
2043  		if (crypto_aead_reqtfm(req) != tfm)
2044  			pr_err("alg: aead: changed 'req->base.tfm'\n");
2045  		if (req->base.complete != crypto_req_done)
2046  			pr_err("alg: aead: changed 'req->base.complete'\n");
2047  		if (req->base.flags != req_flags)
2048  			pr_err("alg: aead: changed 'req->base.flags'\n");
2049  		if (req->base.data != &wait)
2050  			pr_err("alg: aead: changed 'req->base.data'\n");
2051  		return -EINVAL;
2052  	}
2053  	if (is_test_sglist_corrupted(&tsgls->src)) {
2054  		pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2055  		       driver, op, vec_name, cfg->name);
2056  		return -EINVAL;
2057  	}
2058  	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
2059  	    is_test_sglist_corrupted(&tsgls->dst)) {
2060  		pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2061  		       driver, op, vec_name, cfg->name);
2062  		return -EINVAL;
2063  	}
2064  
2065  	/* Check for unexpected success or failure, or wrong error code */
2066  	if ((err == 0 && vec->novrfy) ||
2067  	    (err != vec->crypt_error && !(err == -EBADMSG && vec->novrfy))) {
2068  		char expected_error[32];
2069  
2070  		if (vec->novrfy &&
2071  		    vec->crypt_error != 0 && vec->crypt_error != -EBADMSG)
2072  			sprintf(expected_error, "-EBADMSG or %d",
2073  				vec->crypt_error);
2074  		else if (vec->novrfy)
2075  			sprintf(expected_error, "-EBADMSG");
2076  		else
2077  			sprintf(expected_error, "%d", vec->crypt_error);
2078  		if (err) {
2079  			pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%s, actual_error=%d, cfg=\"%s\"\n",
2080  			       driver, op, vec_name, expected_error, err,
2081  			       cfg->name);
2082  			return err;
2083  		}
2084  		pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%s, cfg=\"%s\"\n",
2085  		       driver, op, vec_name, expected_error, cfg->name);
2086  		return -EINVAL;
2087  	}
2088  	if (err) /* Expectedly failed. */
2089  		return 0;
2090  
2091  	/* Check for the correct output (ciphertext or plaintext) */
2092  	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
2093  				    enc ? vec->clen : vec->plen,
2094  				    vec->alen, enc || !cfg->inplace);
2095  	if (err == -EOVERFLOW) {
2096  		pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2097  		       driver, op, vec_name, cfg->name);
2098  		return err;
2099  	}
2100  	if (err) {
2101  		pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2102  		       driver, op, vec_name, cfg->name);
2103  		return err;
2104  	}
2105  
2106  	return 0;
2107  }
2108  
2109  static int test_aead_vec(const char *driver, int enc,
2110  			 const struct aead_testvec *vec, unsigned int vec_num,
2111  			 struct aead_request *req,
2112  			 struct cipher_test_sglists *tsgls)
2113  {
2114  	char vec_name[16];
2115  	unsigned int i;
2116  	int err;
2117  
2118  	if (enc && vec->novrfy)
2119  		return 0;
2120  
2121  	sprintf(vec_name, "%u", vec_num);
2122  
2123  	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
2124  		err = test_aead_vec_cfg(driver, enc, vec, vec_name,
2125  					&default_cipher_testvec_configs[i],
2126  					req, tsgls);
2127  		if (err)
2128  			return err;
2129  	}
2130  
2131  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2132  	if (!noextratests) {
2133  		struct testvec_config cfg;
2134  		char cfgname[TESTVEC_CONFIG_NAMELEN];
2135  
2136  		for (i = 0; i < fuzz_iterations; i++) {
2137  			generate_random_testvec_config(&cfg, cfgname,
2138  						       sizeof(cfgname));
2139  			err = test_aead_vec_cfg(driver, enc, vec, vec_name,
2140  						&cfg, req, tsgls);
2141  			if (err)
2142  				return err;
2143  			cond_resched();
2144  		}
2145  	}
2146  #endif
2147  	return 0;
2148  }
2149  
2150  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2151  
2152  struct aead_extra_tests_ctx {
2153  	struct aead_request *req;
2154  	struct crypto_aead *tfm;
2155  	const char *driver;
2156  	const struct alg_test_desc *test_desc;
2157  	struct cipher_test_sglists *tsgls;
2158  	unsigned int maxdatasize;
2159  	unsigned int maxkeysize;
2160  
2161  	struct aead_testvec vec;
2162  	char vec_name[64];
2163  	char cfgname[TESTVEC_CONFIG_NAMELEN];
2164  	struct testvec_config cfg;
2165  };
2166  
2167  /*
2168   * Make at least one random change to a (ciphertext, AAD) pair.  "Ciphertext"
2169   * here means the full ciphertext including the authentication tag.  The
2170   * authentication tag (and hence also the ciphertext) is assumed to be nonempty.
2171   */
2172  static void mutate_aead_message(struct aead_testvec *vec, bool aad_iv,
2173  				unsigned int ivsize)
2174  {
2175  	const unsigned int aad_tail_size = aad_iv ? ivsize : 0;
2176  	const unsigned int authsize = vec->clen - vec->plen;
2177  
2178  	if (prandom_u32() % 2 == 0 && vec->alen > aad_tail_size) {
2179  		 /* Mutate the AAD */
2180  		flip_random_bit((u8 *)vec->assoc, vec->alen - aad_tail_size);
2181  		if (prandom_u32() % 2 == 0)
2182  			return;
2183  	}
2184  	if (prandom_u32() % 2 == 0) {
2185  		/* Mutate auth tag (assuming it's at the end of ciphertext) */
2186  		flip_random_bit((u8 *)vec->ctext + vec->plen, authsize);
2187  	} else {
2188  		/* Mutate any part of the ciphertext */
2189  		flip_random_bit((u8 *)vec->ctext, vec->clen);
2190  	}
2191  }
2192  
2193  /*
2194   * Minimum authentication tag size in bytes at which we assume that we can
2195   * reliably generate inauthentic messages, i.e. not generate an authentic
2196   * message by chance.
2197   */
2198  #define MIN_COLLISION_FREE_AUTHSIZE 8
2199  
2200  static void generate_aead_message(struct aead_request *req,
2201  				  const struct aead_test_suite *suite,
2202  				  struct aead_testvec *vec,
2203  				  bool prefer_inauthentic)
2204  {
2205  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2206  	const unsigned int ivsize = crypto_aead_ivsize(tfm);
2207  	const unsigned int authsize = vec->clen - vec->plen;
2208  	const bool inauthentic = (authsize >= MIN_COLLISION_FREE_AUTHSIZE) &&
2209  				 (prefer_inauthentic || prandom_u32() % 4 == 0);
2210  
2211  	/* Generate the AAD. */
2212  	generate_random_bytes((u8 *)vec->assoc, vec->alen);
2213  	if (suite->aad_iv && vec->alen >= ivsize)
2214  		/* Avoid implementation-defined behavior. */
2215  		memcpy((u8 *)vec->assoc + vec->alen - ivsize, vec->iv, ivsize);
2216  
2217  	if (inauthentic && prandom_u32() % 2 == 0) {
2218  		/* Generate a random ciphertext. */
2219  		generate_random_bytes((u8 *)vec->ctext, vec->clen);
2220  	} else {
2221  		int i = 0;
2222  		struct scatterlist src[2], dst;
2223  		u8 iv[MAX_IVLEN];
2224  		DECLARE_CRYPTO_WAIT(wait);
2225  
2226  		/* Generate a random plaintext and encrypt it. */
2227  		sg_init_table(src, 2);
2228  		if (vec->alen)
2229  			sg_set_buf(&src[i++], vec->assoc, vec->alen);
2230  		if (vec->plen) {
2231  			generate_random_bytes((u8 *)vec->ptext, vec->plen);
2232  			sg_set_buf(&src[i++], vec->ptext, vec->plen);
2233  		}
2234  		sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
2235  		memcpy(iv, vec->iv, ivsize);
2236  		aead_request_set_callback(req, 0, crypto_req_done, &wait);
2237  		aead_request_set_crypt(req, src, &dst, vec->plen, iv);
2238  		aead_request_set_ad(req, vec->alen);
2239  		vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req),
2240  						   &wait);
2241  		/* If encryption failed, we're done. */
2242  		if (vec->crypt_error != 0)
2243  			return;
2244  		memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
2245  		if (!inauthentic)
2246  			return;
2247  		/*
2248  		 * Mutate the authentic (ciphertext, AAD) pair to get an
2249  		 * inauthentic one.
2250  		 */
2251  		mutate_aead_message(vec, suite->aad_iv, ivsize);
2252  	}
2253  	vec->novrfy = 1;
2254  	if (suite->einval_allowed)
2255  		vec->crypt_error = -EINVAL;
2256  }
2257  
2258  /*
2259   * Generate an AEAD test vector 'vec' using the implementation specified by
2260   * 'req'.  The buffers in 'vec' must already be allocated.
2261   *
2262   * If 'prefer_inauthentic' is true, then this function will generate inauthentic
2263   * test vectors (i.e. vectors with 'vec->novrfy=1') more often.
2264   */
2265  static void generate_random_aead_testvec(struct aead_request *req,
2266  					 struct aead_testvec *vec,
2267  					 const struct aead_test_suite *suite,
2268  					 unsigned int maxkeysize,
2269  					 unsigned int maxdatasize,
2270  					 char *name, size_t max_namelen,
2271  					 bool prefer_inauthentic)
2272  {
2273  	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2274  	const unsigned int ivsize = crypto_aead_ivsize(tfm);
2275  	const unsigned int maxauthsize = crypto_aead_maxauthsize(tfm);
2276  	unsigned int authsize;
2277  	unsigned int total_len;
2278  
2279  	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2280  	vec->klen = maxkeysize;
2281  	if (prandom_u32() % 4 == 0)
2282  		vec->klen = prandom_u32() % (maxkeysize + 1);
2283  	generate_random_bytes((u8 *)vec->key, vec->klen);
2284  	vec->setkey_error = crypto_aead_setkey(tfm, vec->key, vec->klen);
2285  
2286  	/* IV */
2287  	generate_random_bytes((u8 *)vec->iv, ivsize);
2288  
2289  	/* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
2290  	authsize = maxauthsize;
2291  	if (prandom_u32() % 4 == 0)
2292  		authsize = prandom_u32() % (maxauthsize + 1);
2293  	if (prefer_inauthentic && authsize < MIN_COLLISION_FREE_AUTHSIZE)
2294  		authsize = MIN_COLLISION_FREE_AUTHSIZE;
2295  	if (WARN_ON(authsize > maxdatasize))
2296  		authsize = maxdatasize;
2297  	maxdatasize -= authsize;
2298  	vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
2299  
2300  	/* AAD, plaintext, and ciphertext lengths */
2301  	total_len = generate_random_length(maxdatasize);
2302  	if (prandom_u32() % 4 == 0)
2303  		vec->alen = 0;
2304  	else
2305  		vec->alen = generate_random_length(total_len);
2306  	vec->plen = total_len - vec->alen;
2307  	vec->clen = vec->plen + authsize;
2308  
2309  	/*
2310  	 * Generate the AAD, plaintext, and ciphertext.  Not applicable if the
2311  	 * key or the authentication tag size couldn't be set.
2312  	 */
2313  	vec->novrfy = 0;
2314  	vec->crypt_error = 0;
2315  	if (vec->setkey_error == 0 && vec->setauthsize_error == 0)
2316  		generate_aead_message(req, suite, vec, prefer_inauthentic);
2317  	snprintf(name, max_namelen,
2318  		 "\"random: alen=%u plen=%u authsize=%u klen=%u novrfy=%d\"",
2319  		 vec->alen, vec->plen, authsize, vec->klen, vec->novrfy);
2320  }
2321  
2322  static void try_to_generate_inauthentic_testvec(
2323  					struct aead_extra_tests_ctx *ctx)
2324  {
2325  	int i;
2326  
2327  	for (i = 0; i < 10; i++) {
2328  		generate_random_aead_testvec(ctx->req, &ctx->vec,
2329  					     &ctx->test_desc->suite.aead,
2330  					     ctx->maxkeysize, ctx->maxdatasize,
2331  					     ctx->vec_name,
2332  					     sizeof(ctx->vec_name), true);
2333  		if (ctx->vec.novrfy)
2334  			return;
2335  	}
2336  }
2337  
2338  /*
2339   * Generate inauthentic test vectors (i.e. ciphertext, AAD pairs that aren't the
2340   * result of an encryption with the key) and verify that decryption fails.
2341   */
2342  static int test_aead_inauthentic_inputs(struct aead_extra_tests_ctx *ctx)
2343  {
2344  	unsigned int i;
2345  	int err;
2346  
2347  	for (i = 0; i < fuzz_iterations * 8; i++) {
2348  		/*
2349  		 * Since this part of the tests isn't comparing the
2350  		 * implementation to another, there's no point in testing any
2351  		 * test vectors other than inauthentic ones (vec.novrfy=1) here.
2352  		 *
2353  		 * If we're having trouble generating such a test vector, e.g.
2354  		 * if the algorithm keeps rejecting the generated keys, don't
2355  		 * retry forever; just continue on.
2356  		 */
2357  		try_to_generate_inauthentic_testvec(ctx);
2358  		if (ctx->vec.novrfy) {
2359  			generate_random_testvec_config(&ctx->cfg, ctx->cfgname,
2360  						       sizeof(ctx->cfgname));
2361  			err = test_aead_vec_cfg(ctx->driver, DECRYPT, &ctx->vec,
2362  						ctx->vec_name, &ctx->cfg,
2363  						ctx->req, ctx->tsgls);
2364  			if (err)
2365  				return err;
2366  		}
2367  		cond_resched();
2368  	}
2369  	return 0;
2370  }
2371  
2372  /*
2373   * Test the AEAD algorithm against the corresponding generic implementation, if
2374   * one is available.
2375   */
2376  static int test_aead_vs_generic_impl(struct aead_extra_tests_ctx *ctx)
2377  {
2378  	struct crypto_aead *tfm = ctx->tfm;
2379  	const char *algname = crypto_aead_alg(tfm)->base.cra_name;
2380  	const char *driver = ctx->driver;
2381  	const char *generic_driver = ctx->test_desc->generic_driver;
2382  	char _generic_driver[CRYPTO_MAX_ALG_NAME];
2383  	struct crypto_aead *generic_tfm = NULL;
2384  	struct aead_request *generic_req = NULL;
2385  	unsigned int i;
2386  	int err;
2387  
2388  	if (!generic_driver) { /* Use default naming convention? */
2389  		err = build_generic_driver_name(algname, _generic_driver);
2390  		if (err)
2391  			return err;
2392  		generic_driver = _generic_driver;
2393  	}
2394  
2395  	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
2396  		return 0;
2397  
2398  	generic_tfm = crypto_alloc_aead(generic_driver, 0, 0);
2399  	if (IS_ERR(generic_tfm)) {
2400  		err = PTR_ERR(generic_tfm);
2401  		if (err == -ENOENT) {
2402  			pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
2403  				driver, generic_driver);
2404  			return 0;
2405  		}
2406  		pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
2407  		       generic_driver, algname, err);
2408  		return err;
2409  	}
2410  
2411  	generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
2412  	if (!generic_req) {
2413  		err = -ENOMEM;
2414  		goto out;
2415  	}
2416  
2417  	/* Check the algorithm properties for consistency. */
2418  
2419  	if (crypto_aead_maxauthsize(tfm) !=
2420  	    crypto_aead_maxauthsize(generic_tfm)) {
2421  		pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
2422  		       driver, crypto_aead_maxauthsize(tfm),
2423  		       crypto_aead_maxauthsize(generic_tfm));
2424  		err = -EINVAL;
2425  		goto out;
2426  	}
2427  
2428  	if (crypto_aead_ivsize(tfm) != crypto_aead_ivsize(generic_tfm)) {
2429  		pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
2430  		       driver, crypto_aead_ivsize(tfm),
2431  		       crypto_aead_ivsize(generic_tfm));
2432  		err = -EINVAL;
2433  		goto out;
2434  	}
2435  
2436  	if (crypto_aead_blocksize(tfm) != crypto_aead_blocksize(generic_tfm)) {
2437  		pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
2438  		       driver, crypto_aead_blocksize(tfm),
2439  		       crypto_aead_blocksize(generic_tfm));
2440  		err = -EINVAL;
2441  		goto out;
2442  	}
2443  
2444  	/*
2445  	 * Now generate test vectors using the generic implementation, and test
2446  	 * the other implementation against them.
2447  	 */
2448  	for (i = 0; i < fuzz_iterations * 8; i++) {
2449  		generate_random_aead_testvec(generic_req, &ctx->vec,
2450  					     &ctx->test_desc->suite.aead,
2451  					     ctx->maxkeysize, ctx->maxdatasize,
2452  					     ctx->vec_name,
2453  					     sizeof(ctx->vec_name), false);
2454  		generate_random_testvec_config(&ctx->cfg, ctx->cfgname,
2455  					       sizeof(ctx->cfgname));
2456  		if (!ctx->vec.novrfy) {
2457  			err = test_aead_vec_cfg(driver, ENCRYPT, &ctx->vec,
2458  						ctx->vec_name, &ctx->cfg,
2459  						ctx->req, ctx->tsgls);
2460  			if (err)
2461  				goto out;
2462  		}
2463  		if (ctx->vec.crypt_error == 0 || ctx->vec.novrfy) {
2464  			err = test_aead_vec_cfg(driver, DECRYPT, &ctx->vec,
2465  						ctx->vec_name, &ctx->cfg,
2466  						ctx->req, ctx->tsgls);
2467  			if (err)
2468  				goto out;
2469  		}
2470  		cond_resched();
2471  	}
2472  	err = 0;
2473  out:
2474  	crypto_free_aead(generic_tfm);
2475  	aead_request_free(generic_req);
2476  	return err;
2477  }
2478  
2479  static int test_aead_extra(const char *driver,
2480  			   const struct alg_test_desc *test_desc,
2481  			   struct aead_request *req,
2482  			   struct cipher_test_sglists *tsgls)
2483  {
2484  	struct aead_extra_tests_ctx *ctx;
2485  	unsigned int i;
2486  	int err;
2487  
2488  	if (noextratests)
2489  		return 0;
2490  
2491  	ctx = kzalloc(sizeof(*ctx), GFP_KERNEL);
2492  	if (!ctx)
2493  		return -ENOMEM;
2494  	ctx->req = req;
2495  	ctx->tfm = crypto_aead_reqtfm(req);
2496  	ctx->driver = driver;
2497  	ctx->test_desc = test_desc;
2498  	ctx->tsgls = tsgls;
2499  	ctx->maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
2500  	ctx->maxkeysize = 0;
2501  	for (i = 0; i < test_desc->suite.aead.count; i++)
2502  		ctx->maxkeysize = max_t(unsigned int, ctx->maxkeysize,
2503  					test_desc->suite.aead.vecs[i].klen);
2504  
2505  	ctx->vec.key = kmalloc(ctx->maxkeysize, GFP_KERNEL);
2506  	ctx->vec.iv = kmalloc(crypto_aead_ivsize(ctx->tfm), GFP_KERNEL);
2507  	ctx->vec.assoc = kmalloc(ctx->maxdatasize, GFP_KERNEL);
2508  	ctx->vec.ptext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
2509  	ctx->vec.ctext = kmalloc(ctx->maxdatasize, GFP_KERNEL);
2510  	if (!ctx->vec.key || !ctx->vec.iv || !ctx->vec.assoc ||
2511  	    !ctx->vec.ptext || !ctx->vec.ctext) {
2512  		err = -ENOMEM;
2513  		goto out;
2514  	}
2515  
2516  	err = test_aead_vs_generic_impl(ctx);
2517  	if (err)
2518  		goto out;
2519  
2520  	err = test_aead_inauthentic_inputs(ctx);
2521  out:
2522  	kfree(ctx->vec.key);
2523  	kfree(ctx->vec.iv);
2524  	kfree(ctx->vec.assoc);
2525  	kfree(ctx->vec.ptext);
2526  	kfree(ctx->vec.ctext);
2527  	kfree(ctx);
2528  	return err;
2529  }
2530  #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2531  static int test_aead_extra(const char *driver,
2532  			   const struct alg_test_desc *test_desc,
2533  			   struct aead_request *req,
2534  			   struct cipher_test_sglists *tsgls)
2535  {
2536  	return 0;
2537  }
2538  #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2539  
2540  static int test_aead(const char *driver, int enc,
2541  		     const struct aead_test_suite *suite,
2542  		     struct aead_request *req,
2543  		     struct cipher_test_sglists *tsgls)
2544  {
2545  	unsigned int i;
2546  	int err;
2547  
2548  	for (i = 0; i < suite->count; i++) {
2549  		err = test_aead_vec(driver, enc, &suite->vecs[i], i, req,
2550  				    tsgls);
2551  		if (err)
2552  			return err;
2553  		cond_resched();
2554  	}
2555  	return 0;
2556  }
2557  
2558  static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2559  			 u32 type, u32 mask)
2560  {
2561  	const struct aead_test_suite *suite = &desc->suite.aead;
2562  	struct crypto_aead *tfm;
2563  	struct aead_request *req = NULL;
2564  	struct cipher_test_sglists *tsgls = NULL;
2565  	int err;
2566  
2567  	if (suite->count <= 0) {
2568  		pr_err("alg: aead: empty test suite for %s\n", driver);
2569  		return -EINVAL;
2570  	}
2571  
2572  	tfm = crypto_alloc_aead(driver, type, mask);
2573  	if (IS_ERR(tfm)) {
2574  		pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
2575  		       driver, PTR_ERR(tfm));
2576  		return PTR_ERR(tfm);
2577  	}
2578  
2579  	req = aead_request_alloc(tfm, GFP_KERNEL);
2580  	if (!req) {
2581  		pr_err("alg: aead: failed to allocate request for %s\n",
2582  		       driver);
2583  		err = -ENOMEM;
2584  		goto out;
2585  	}
2586  
2587  	tsgls = alloc_cipher_test_sglists();
2588  	if (!tsgls) {
2589  		pr_err("alg: aead: failed to allocate test buffers for %s\n",
2590  		       driver);
2591  		err = -ENOMEM;
2592  		goto out;
2593  	}
2594  
2595  	err = test_aead(driver, ENCRYPT, suite, req, tsgls);
2596  	if (err)
2597  		goto out;
2598  
2599  	err = test_aead(driver, DECRYPT, suite, req, tsgls);
2600  	if (err)
2601  		goto out;
2602  
2603  	err = test_aead_extra(driver, desc, req, tsgls);
2604  out:
2605  	free_cipher_test_sglists(tsgls);
2606  	aead_request_free(req);
2607  	crypto_free_aead(tfm);
2608  	return err;
2609  }
2610  
2611  static int test_cipher(struct crypto_cipher *tfm, int enc,
2612  		       const struct cipher_testvec *template,
2613  		       unsigned int tcount)
2614  {
2615  	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
2616  	unsigned int i, j, k;
2617  	char *q;
2618  	const char *e;
2619  	const char *input, *result;
2620  	void *data;
2621  	char *xbuf[XBUFSIZE];
2622  	int ret = -ENOMEM;
2623  
2624  	if (testmgr_alloc_buf(xbuf))
2625  		goto out_nobuf;
2626  
2627  	if (enc == ENCRYPT)
2628  	        e = "encryption";
2629  	else
2630  		e = "decryption";
2631  
2632  	j = 0;
2633  	for (i = 0; i < tcount; i++) {
2634  
2635  		if (fips_enabled && template[i].fips_skip)
2636  			continue;
2637  
2638  		input  = enc ? template[i].ptext : template[i].ctext;
2639  		result = enc ? template[i].ctext : template[i].ptext;
2640  		j++;
2641  
2642  		ret = -EINVAL;
2643  		if (WARN_ON(template[i].len > PAGE_SIZE))
2644  			goto out;
2645  
2646  		data = xbuf[0];
2647  		memcpy(data, input, template[i].len);
2648  
2649  		crypto_cipher_clear_flags(tfm, ~0);
2650  		if (template[i].wk)
2651  			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2652  
2653  		ret = crypto_cipher_setkey(tfm, template[i].key,
2654  					   template[i].klen);
2655  		if (ret) {
2656  			if (ret == template[i].setkey_error)
2657  				continue;
2658  			pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
2659  			       algo, j, template[i].setkey_error, ret,
2660  			       crypto_cipher_get_flags(tfm));
2661  			goto out;
2662  		}
2663  		if (template[i].setkey_error) {
2664  			pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
2665  			       algo, j, template[i].setkey_error);
2666  			ret = -EINVAL;
2667  			goto out;
2668  		}
2669  
2670  		for (k = 0; k < template[i].len;
2671  		     k += crypto_cipher_blocksize(tfm)) {
2672  			if (enc)
2673  				crypto_cipher_encrypt_one(tfm, data + k,
2674  							  data + k);
2675  			else
2676  				crypto_cipher_decrypt_one(tfm, data + k,
2677  							  data + k);
2678  		}
2679  
2680  		q = data;
2681  		if (memcmp(q, result, template[i].len)) {
2682  			printk(KERN_ERR "alg: cipher: Test %d failed "
2683  			       "on %s for %s\n", j, e, algo);
2684  			hexdump(q, template[i].len);
2685  			ret = -EINVAL;
2686  			goto out;
2687  		}
2688  	}
2689  
2690  	ret = 0;
2691  
2692  out:
2693  	testmgr_free_buf(xbuf);
2694  out_nobuf:
2695  	return ret;
2696  }
2697  
2698  static int test_skcipher_vec_cfg(const char *driver, int enc,
2699  				 const struct cipher_testvec *vec,
2700  				 const char *vec_name,
2701  				 const struct testvec_config *cfg,
2702  				 struct skcipher_request *req,
2703  				 struct cipher_test_sglists *tsgls)
2704  {
2705  	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2706  	const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
2707  	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2708  	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
2709  	const char *op = enc ? "encryption" : "decryption";
2710  	DECLARE_CRYPTO_WAIT(wait);
2711  	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
2712  	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
2713  		 cfg->iv_offset +
2714  		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
2715  	struct kvec input;
2716  	int err;
2717  
2718  	/* Set the key */
2719  	if (vec->wk)
2720  		crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2721  	else
2722  		crypto_skcipher_clear_flags(tfm,
2723  					    CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2724  	err = do_setkey(crypto_skcipher_setkey, tfm, vec->key, vec->klen,
2725  			cfg, alignmask);
2726  	if (err) {
2727  		if (err == vec->setkey_error)
2728  			return 0;
2729  		pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
2730  		       driver, vec_name, vec->setkey_error, err,
2731  		       crypto_skcipher_get_flags(tfm));
2732  		return err;
2733  	}
2734  	if (vec->setkey_error) {
2735  		pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
2736  		       driver, vec_name, vec->setkey_error);
2737  		return -EINVAL;
2738  	}
2739  
2740  	/* The IV must be copied to a buffer, as the algorithm may modify it */
2741  	if (ivsize) {
2742  		if (WARN_ON(ivsize > MAX_IVLEN))
2743  			return -EINVAL;
2744  		if (vec->generates_iv && !enc)
2745  			memcpy(iv, vec->iv_out, ivsize);
2746  		else if (vec->iv)
2747  			memcpy(iv, vec->iv, ivsize);
2748  		else
2749  			memset(iv, 0, ivsize);
2750  	} else {
2751  		if (vec->generates_iv) {
2752  			pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
2753  			       driver, vec_name);
2754  			return -EINVAL;
2755  		}
2756  		iv = NULL;
2757  	}
2758  
2759  	/* Build the src/dst scatterlists */
2760  	input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
2761  	input.iov_len = vec->len;
2762  	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
2763  					vec->len, vec->len, &input, 1);
2764  	if (err) {
2765  		pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2766  		       driver, op, vec_name, cfg->name);
2767  		return err;
2768  	}
2769  
2770  	/* Do the actual encryption or decryption */
2771  	testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
2772  	skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
2773  	skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
2774  				   vec->len, iv);
2775  	if (cfg->nosimd)
2776  		crypto_disable_simd_for_test();
2777  	err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
2778  	if (cfg->nosimd)
2779  		crypto_reenable_simd_for_test();
2780  	err = crypto_wait_req(err, &wait);
2781  
2782  	/* Check that the algorithm didn't overwrite things it shouldn't have */
2783  	if (req->cryptlen != vec->len ||
2784  	    req->iv != iv ||
2785  	    req->src != tsgls->src.sgl_ptr ||
2786  	    req->dst != tsgls->dst.sgl_ptr ||
2787  	    crypto_skcipher_reqtfm(req) != tfm ||
2788  	    req->base.complete != crypto_req_done ||
2789  	    req->base.flags != req_flags ||
2790  	    req->base.data != &wait) {
2791  		pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2792  		       driver, op, vec_name, cfg->name);
2793  		if (req->cryptlen != vec->len)
2794  			pr_err("alg: skcipher: changed 'req->cryptlen'\n");
2795  		if (req->iv != iv)
2796  			pr_err("alg: skcipher: changed 'req->iv'\n");
2797  		if (req->src != tsgls->src.sgl_ptr)
2798  			pr_err("alg: skcipher: changed 'req->src'\n");
2799  		if (req->dst != tsgls->dst.sgl_ptr)
2800  			pr_err("alg: skcipher: changed 'req->dst'\n");
2801  		if (crypto_skcipher_reqtfm(req) != tfm)
2802  			pr_err("alg: skcipher: changed 'req->base.tfm'\n");
2803  		if (req->base.complete != crypto_req_done)
2804  			pr_err("alg: skcipher: changed 'req->base.complete'\n");
2805  		if (req->base.flags != req_flags)
2806  			pr_err("alg: skcipher: changed 'req->base.flags'\n");
2807  		if (req->base.data != &wait)
2808  			pr_err("alg: skcipher: changed 'req->base.data'\n");
2809  		return -EINVAL;
2810  	}
2811  	if (is_test_sglist_corrupted(&tsgls->src)) {
2812  		pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2813  		       driver, op, vec_name, cfg->name);
2814  		return -EINVAL;
2815  	}
2816  	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
2817  	    is_test_sglist_corrupted(&tsgls->dst)) {
2818  		pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2819  		       driver, op, vec_name, cfg->name);
2820  		return -EINVAL;
2821  	}
2822  
2823  	/* Check for success or failure */
2824  	if (err) {
2825  		if (err == vec->crypt_error)
2826  			return 0;
2827  		pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
2828  		       driver, op, vec_name, vec->crypt_error, err, cfg->name);
2829  		return err;
2830  	}
2831  	if (vec->crypt_error) {
2832  		pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
2833  		       driver, op, vec_name, vec->crypt_error, cfg->name);
2834  		return -EINVAL;
2835  	}
2836  
2837  	/* Check for the correct output (ciphertext or plaintext) */
2838  	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
2839  				    vec->len, 0, true);
2840  	if (err == -EOVERFLOW) {
2841  		pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2842  		       driver, op, vec_name, cfg->name);
2843  		return err;
2844  	}
2845  	if (err) {
2846  		pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2847  		       driver, op, vec_name, cfg->name);
2848  		return err;
2849  	}
2850  
2851  	/* If applicable, check that the algorithm generated the correct IV */
2852  	if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
2853  		pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
2854  		       driver, op, vec_name, cfg->name);
2855  		hexdump(iv, ivsize);
2856  		return -EINVAL;
2857  	}
2858  
2859  	return 0;
2860  }
2861  
2862  static int test_skcipher_vec(const char *driver, int enc,
2863  			     const struct cipher_testvec *vec,
2864  			     unsigned int vec_num,
2865  			     struct skcipher_request *req,
2866  			     struct cipher_test_sglists *tsgls)
2867  {
2868  	char vec_name[16];
2869  	unsigned int i;
2870  	int err;
2871  
2872  	if (fips_enabled && vec->fips_skip)
2873  		return 0;
2874  
2875  	sprintf(vec_name, "%u", vec_num);
2876  
2877  	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
2878  		err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2879  					    &default_cipher_testvec_configs[i],
2880  					    req, tsgls);
2881  		if (err)
2882  			return err;
2883  	}
2884  
2885  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2886  	if (!noextratests) {
2887  		struct testvec_config cfg;
2888  		char cfgname[TESTVEC_CONFIG_NAMELEN];
2889  
2890  		for (i = 0; i < fuzz_iterations; i++) {
2891  			generate_random_testvec_config(&cfg, cfgname,
2892  						       sizeof(cfgname));
2893  			err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2894  						    &cfg, req, tsgls);
2895  			if (err)
2896  				return err;
2897  			cond_resched();
2898  		}
2899  	}
2900  #endif
2901  	return 0;
2902  }
2903  
2904  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2905  /*
2906   * Generate a symmetric cipher test vector from the given implementation.
2907   * Assumes the buffers in 'vec' were already allocated.
2908   */
2909  static void generate_random_cipher_testvec(struct skcipher_request *req,
2910  					   struct cipher_testvec *vec,
2911  					   unsigned int maxdatasize,
2912  					   char *name, size_t max_namelen)
2913  {
2914  	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2915  	const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
2916  	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2917  	struct scatterlist src, dst;
2918  	u8 iv[MAX_IVLEN];
2919  	DECLARE_CRYPTO_WAIT(wait);
2920  
2921  	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2922  	vec->klen = maxkeysize;
2923  	if (prandom_u32() % 4 == 0)
2924  		vec->klen = prandom_u32() % (maxkeysize + 1);
2925  	generate_random_bytes((u8 *)vec->key, vec->klen);
2926  	vec->setkey_error = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
2927  
2928  	/* IV */
2929  	generate_random_bytes((u8 *)vec->iv, ivsize);
2930  
2931  	/* Plaintext */
2932  	vec->len = generate_random_length(maxdatasize);
2933  	generate_random_bytes((u8 *)vec->ptext, vec->len);
2934  
2935  	/* If the key couldn't be set, no need to continue to encrypt. */
2936  	if (vec->setkey_error)
2937  		goto done;
2938  
2939  	/* Ciphertext */
2940  	sg_init_one(&src, vec->ptext, vec->len);
2941  	sg_init_one(&dst, vec->ctext, vec->len);
2942  	memcpy(iv, vec->iv, ivsize);
2943  	skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
2944  	skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
2945  	vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
2946  	if (vec->crypt_error != 0) {
2947  		/*
2948  		 * The only acceptable error here is for an invalid length, so
2949  		 * skcipher decryption should fail with the same error too.
2950  		 * We'll test for this.  But to keep the API usage well-defined,
2951  		 * explicitly initialize the ciphertext buffer too.
2952  		 */
2953  		memset((u8 *)vec->ctext, 0, vec->len);
2954  	}
2955  done:
2956  	snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
2957  		 vec->len, vec->klen);
2958  }
2959  
2960  /*
2961   * Test the skcipher algorithm represented by @req against the corresponding
2962   * generic implementation, if one is available.
2963   */
2964  static int test_skcipher_vs_generic_impl(const char *driver,
2965  					 const char *generic_driver,
2966  					 struct skcipher_request *req,
2967  					 struct cipher_test_sglists *tsgls)
2968  {
2969  	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2970  	const unsigned int maxkeysize = crypto_skcipher_max_keysize(tfm);
2971  	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2972  	const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
2973  	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
2974  	const char *algname = crypto_skcipher_alg(tfm)->base.cra_name;
2975  	char _generic_driver[CRYPTO_MAX_ALG_NAME];
2976  	struct crypto_skcipher *generic_tfm = NULL;
2977  	struct skcipher_request *generic_req = NULL;
2978  	unsigned int i;
2979  	struct cipher_testvec vec = { 0 };
2980  	char vec_name[64];
2981  	struct testvec_config *cfg;
2982  	char cfgname[TESTVEC_CONFIG_NAMELEN];
2983  	int err;
2984  
2985  	if (noextratests)
2986  		return 0;
2987  
2988  	/* Keywrap isn't supported here yet as it handles its IV differently. */
2989  	if (strncmp(algname, "kw(", 3) == 0)
2990  		return 0;
2991  
2992  	if (!generic_driver) { /* Use default naming convention? */
2993  		err = build_generic_driver_name(algname, _generic_driver);
2994  		if (err)
2995  			return err;
2996  		generic_driver = _generic_driver;
2997  	}
2998  
2999  	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
3000  		return 0;
3001  
3002  	generic_tfm = crypto_alloc_skcipher(generic_driver, 0, 0);
3003  	if (IS_ERR(generic_tfm)) {
3004  		err = PTR_ERR(generic_tfm);
3005  		if (err == -ENOENT) {
3006  			pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
3007  				driver, generic_driver);
3008  			return 0;
3009  		}
3010  		pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
3011  		       generic_driver, algname, err);
3012  		return err;
3013  	}
3014  
3015  	cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
3016  	if (!cfg) {
3017  		err = -ENOMEM;
3018  		goto out;
3019  	}
3020  
3021  	generic_req = skcipher_request_alloc(generic_tfm, GFP_KERNEL);
3022  	if (!generic_req) {
3023  		err = -ENOMEM;
3024  		goto out;
3025  	}
3026  
3027  	/* Check the algorithm properties for consistency. */
3028  
3029  	if (crypto_skcipher_min_keysize(tfm) !=
3030  	    crypto_skcipher_min_keysize(generic_tfm)) {
3031  		pr_err("alg: skcipher: min keysize for %s (%u) doesn't match generic impl (%u)\n",
3032  		       driver, crypto_skcipher_min_keysize(tfm),
3033  		       crypto_skcipher_min_keysize(generic_tfm));
3034  		err = -EINVAL;
3035  		goto out;
3036  	}
3037  
3038  	if (maxkeysize != crypto_skcipher_max_keysize(generic_tfm)) {
3039  		pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
3040  		       driver, maxkeysize,
3041  		       crypto_skcipher_max_keysize(generic_tfm));
3042  		err = -EINVAL;
3043  		goto out;
3044  	}
3045  
3046  	if (ivsize != crypto_skcipher_ivsize(generic_tfm)) {
3047  		pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
3048  		       driver, ivsize, crypto_skcipher_ivsize(generic_tfm));
3049  		err = -EINVAL;
3050  		goto out;
3051  	}
3052  
3053  	if (blocksize != crypto_skcipher_blocksize(generic_tfm)) {
3054  		pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
3055  		       driver, blocksize,
3056  		       crypto_skcipher_blocksize(generic_tfm));
3057  		err = -EINVAL;
3058  		goto out;
3059  	}
3060  
3061  	/*
3062  	 * Now generate test vectors using the generic implementation, and test
3063  	 * the other implementation against them.
3064  	 */
3065  
3066  	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
3067  	vec.iv = kmalloc(ivsize, GFP_KERNEL);
3068  	vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
3069  	vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
3070  	if (!vec.key || !vec.iv || !vec.ptext || !vec.ctext) {
3071  		err = -ENOMEM;
3072  		goto out;
3073  	}
3074  
3075  	for (i = 0; i < fuzz_iterations * 8; i++) {
3076  		generate_random_cipher_testvec(generic_req, &vec, maxdatasize,
3077  					       vec_name, sizeof(vec_name));
3078  		generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
3079  
3080  		err = test_skcipher_vec_cfg(driver, ENCRYPT, &vec, vec_name,
3081  					    cfg, req, tsgls);
3082  		if (err)
3083  			goto out;
3084  		err = test_skcipher_vec_cfg(driver, DECRYPT, &vec, vec_name,
3085  					    cfg, req, tsgls);
3086  		if (err)
3087  			goto out;
3088  		cond_resched();
3089  	}
3090  	err = 0;
3091  out:
3092  	kfree(cfg);
3093  	kfree(vec.key);
3094  	kfree(vec.iv);
3095  	kfree(vec.ptext);
3096  	kfree(vec.ctext);
3097  	crypto_free_skcipher(generic_tfm);
3098  	skcipher_request_free(generic_req);
3099  	return err;
3100  }
3101  #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
3102  static int test_skcipher_vs_generic_impl(const char *driver,
3103  					 const char *generic_driver,
3104  					 struct skcipher_request *req,
3105  					 struct cipher_test_sglists *tsgls)
3106  {
3107  	return 0;
3108  }
3109  #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
3110  
3111  static int test_skcipher(const char *driver, int enc,
3112  			 const struct cipher_test_suite *suite,
3113  			 struct skcipher_request *req,
3114  			 struct cipher_test_sglists *tsgls)
3115  {
3116  	unsigned int i;
3117  	int err;
3118  
3119  	for (i = 0; i < suite->count; i++) {
3120  		err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req,
3121  					tsgls);
3122  		if (err)
3123  			return err;
3124  		cond_resched();
3125  	}
3126  	return 0;
3127  }
3128  
3129  static int alg_test_skcipher(const struct alg_test_desc *desc,
3130  			     const char *driver, u32 type, u32 mask)
3131  {
3132  	const struct cipher_test_suite *suite = &desc->suite.cipher;
3133  	struct crypto_skcipher *tfm;
3134  	struct skcipher_request *req = NULL;
3135  	struct cipher_test_sglists *tsgls = NULL;
3136  	int err;
3137  
3138  	if (suite->count <= 0) {
3139  		pr_err("alg: skcipher: empty test suite for %s\n", driver);
3140  		return -EINVAL;
3141  	}
3142  
3143  	tfm = crypto_alloc_skcipher(driver, type, mask);
3144  	if (IS_ERR(tfm)) {
3145  		pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
3146  		       driver, PTR_ERR(tfm));
3147  		return PTR_ERR(tfm);
3148  	}
3149  
3150  	req = skcipher_request_alloc(tfm, GFP_KERNEL);
3151  	if (!req) {
3152  		pr_err("alg: skcipher: failed to allocate request for %s\n",
3153  		       driver);
3154  		err = -ENOMEM;
3155  		goto out;
3156  	}
3157  
3158  	tsgls = alloc_cipher_test_sglists();
3159  	if (!tsgls) {
3160  		pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
3161  		       driver);
3162  		err = -ENOMEM;
3163  		goto out;
3164  	}
3165  
3166  	err = test_skcipher(driver, ENCRYPT, suite, req, tsgls);
3167  	if (err)
3168  		goto out;
3169  
3170  	err = test_skcipher(driver, DECRYPT, suite, req, tsgls);
3171  	if (err)
3172  		goto out;
3173  
3174  	err = test_skcipher_vs_generic_impl(driver, desc->generic_driver, req,
3175  					    tsgls);
3176  out:
3177  	free_cipher_test_sglists(tsgls);
3178  	skcipher_request_free(req);
3179  	crypto_free_skcipher(tfm);
3180  	return err;
3181  }
3182  
3183  static int test_comp(struct crypto_comp *tfm,
3184  		     const struct comp_testvec *ctemplate,
3185  		     const struct comp_testvec *dtemplate,
3186  		     int ctcount, int dtcount)
3187  {
3188  	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
3189  	char *output, *decomp_output;
3190  	unsigned int i;
3191  	int ret;
3192  
3193  	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3194  	if (!output)
3195  		return -ENOMEM;
3196  
3197  	decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3198  	if (!decomp_output) {
3199  		kfree(output);
3200  		return -ENOMEM;
3201  	}
3202  
3203  	for (i = 0; i < ctcount; i++) {
3204  		int ilen;
3205  		unsigned int dlen = COMP_BUF_SIZE;
3206  
3207  		memset(output, 0, COMP_BUF_SIZE);
3208  		memset(decomp_output, 0, COMP_BUF_SIZE);
3209  
3210  		ilen = ctemplate[i].inlen;
3211  		ret = crypto_comp_compress(tfm, ctemplate[i].input,
3212  					   ilen, output, &dlen);
3213  		if (ret) {
3214  			printk(KERN_ERR "alg: comp: compression failed "
3215  			       "on test %d for %s: ret=%d\n", i + 1, algo,
3216  			       -ret);
3217  			goto out;
3218  		}
3219  
3220  		ilen = dlen;
3221  		dlen = COMP_BUF_SIZE;
3222  		ret = crypto_comp_decompress(tfm, output,
3223  					     ilen, decomp_output, &dlen);
3224  		if (ret) {
3225  			pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
3226  			       i + 1, algo, -ret);
3227  			goto out;
3228  		}
3229  
3230  		if (dlen != ctemplate[i].inlen) {
3231  			printk(KERN_ERR "alg: comp: Compression test %d "
3232  			       "failed for %s: output len = %d\n", i + 1, algo,
3233  			       dlen);
3234  			ret = -EINVAL;
3235  			goto out;
3236  		}
3237  
3238  		if (memcmp(decomp_output, ctemplate[i].input,
3239  			   ctemplate[i].inlen)) {
3240  			pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
3241  			       i + 1, algo);
3242  			hexdump(decomp_output, dlen);
3243  			ret = -EINVAL;
3244  			goto out;
3245  		}
3246  	}
3247  
3248  	for (i = 0; i < dtcount; i++) {
3249  		int ilen;
3250  		unsigned int dlen = COMP_BUF_SIZE;
3251  
3252  		memset(decomp_output, 0, COMP_BUF_SIZE);
3253  
3254  		ilen = dtemplate[i].inlen;
3255  		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
3256  					     ilen, decomp_output, &dlen);
3257  		if (ret) {
3258  			printk(KERN_ERR "alg: comp: decompression failed "
3259  			       "on test %d for %s: ret=%d\n", i + 1, algo,
3260  			       -ret);
3261  			goto out;
3262  		}
3263  
3264  		if (dlen != dtemplate[i].outlen) {
3265  			printk(KERN_ERR "alg: comp: Decompression test %d "
3266  			       "failed for %s: output len = %d\n", i + 1, algo,
3267  			       dlen);
3268  			ret = -EINVAL;
3269  			goto out;
3270  		}
3271  
3272  		if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
3273  			printk(KERN_ERR "alg: comp: Decompression test %d "
3274  			       "failed for %s\n", i + 1, algo);
3275  			hexdump(decomp_output, dlen);
3276  			ret = -EINVAL;
3277  			goto out;
3278  		}
3279  	}
3280  
3281  	ret = 0;
3282  
3283  out:
3284  	kfree(decomp_output);
3285  	kfree(output);
3286  	return ret;
3287  }
3288  
3289  static int test_acomp(struct crypto_acomp *tfm,
3290  			      const struct comp_testvec *ctemplate,
3291  		      const struct comp_testvec *dtemplate,
3292  		      int ctcount, int dtcount)
3293  {
3294  	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
3295  	unsigned int i;
3296  	char *output, *decomp_out;
3297  	int ret;
3298  	struct scatterlist src, dst;
3299  	struct acomp_req *req;
3300  	struct crypto_wait wait;
3301  
3302  	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3303  	if (!output)
3304  		return -ENOMEM;
3305  
3306  	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3307  	if (!decomp_out) {
3308  		kfree(output);
3309  		return -ENOMEM;
3310  	}
3311  
3312  	for (i = 0; i < ctcount; i++) {
3313  		unsigned int dlen = COMP_BUF_SIZE;
3314  		int ilen = ctemplate[i].inlen;
3315  		void *input_vec;
3316  
3317  		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
3318  		if (!input_vec) {
3319  			ret = -ENOMEM;
3320  			goto out;
3321  		}
3322  
3323  		memset(output, 0, dlen);
3324  		crypto_init_wait(&wait);
3325  		sg_init_one(&src, input_vec, ilen);
3326  		sg_init_one(&dst, output, dlen);
3327  
3328  		req = acomp_request_alloc(tfm);
3329  		if (!req) {
3330  			pr_err("alg: acomp: request alloc failed for %s\n",
3331  			       algo);
3332  			kfree(input_vec);
3333  			ret = -ENOMEM;
3334  			goto out;
3335  		}
3336  
3337  		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3338  		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3339  					   crypto_req_done, &wait);
3340  
3341  		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
3342  		if (ret) {
3343  			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3344  			       i + 1, algo, -ret);
3345  			kfree(input_vec);
3346  			acomp_request_free(req);
3347  			goto out;
3348  		}
3349  
3350  		ilen = req->dlen;
3351  		dlen = COMP_BUF_SIZE;
3352  		sg_init_one(&src, output, ilen);
3353  		sg_init_one(&dst, decomp_out, dlen);
3354  		crypto_init_wait(&wait);
3355  		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3356  
3357  		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
3358  		if (ret) {
3359  			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3360  			       i + 1, algo, -ret);
3361  			kfree(input_vec);
3362  			acomp_request_free(req);
3363  			goto out;
3364  		}
3365  
3366  		if (req->dlen != ctemplate[i].inlen) {
3367  			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
3368  			       i + 1, algo, req->dlen);
3369  			ret = -EINVAL;
3370  			kfree(input_vec);
3371  			acomp_request_free(req);
3372  			goto out;
3373  		}
3374  
3375  		if (memcmp(input_vec, decomp_out, req->dlen)) {
3376  			pr_err("alg: acomp: Compression test %d failed for %s\n",
3377  			       i + 1, algo);
3378  			hexdump(output, req->dlen);
3379  			ret = -EINVAL;
3380  			kfree(input_vec);
3381  			acomp_request_free(req);
3382  			goto out;
3383  		}
3384  
3385  		kfree(input_vec);
3386  		acomp_request_free(req);
3387  	}
3388  
3389  	for (i = 0; i < dtcount; i++) {
3390  		unsigned int dlen = COMP_BUF_SIZE;
3391  		int ilen = dtemplate[i].inlen;
3392  		void *input_vec;
3393  
3394  		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
3395  		if (!input_vec) {
3396  			ret = -ENOMEM;
3397  			goto out;
3398  		}
3399  
3400  		memset(output, 0, dlen);
3401  		crypto_init_wait(&wait);
3402  		sg_init_one(&src, input_vec, ilen);
3403  		sg_init_one(&dst, output, dlen);
3404  
3405  		req = acomp_request_alloc(tfm);
3406  		if (!req) {
3407  			pr_err("alg: acomp: request alloc failed for %s\n",
3408  			       algo);
3409  			kfree(input_vec);
3410  			ret = -ENOMEM;
3411  			goto out;
3412  		}
3413  
3414  		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3415  		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3416  					   crypto_req_done, &wait);
3417  
3418  		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
3419  		if (ret) {
3420  			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
3421  			       i + 1, algo, -ret);
3422  			kfree(input_vec);
3423  			acomp_request_free(req);
3424  			goto out;
3425  		}
3426  
3427  		if (req->dlen != dtemplate[i].outlen) {
3428  			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
3429  			       i + 1, algo, req->dlen);
3430  			ret = -EINVAL;
3431  			kfree(input_vec);
3432  			acomp_request_free(req);
3433  			goto out;
3434  		}
3435  
3436  		if (memcmp(output, dtemplate[i].output, req->dlen)) {
3437  			pr_err("alg: acomp: Decompression test %d failed for %s\n",
3438  			       i + 1, algo);
3439  			hexdump(output, req->dlen);
3440  			ret = -EINVAL;
3441  			kfree(input_vec);
3442  			acomp_request_free(req);
3443  			goto out;
3444  		}
3445  
3446  		kfree(input_vec);
3447  		acomp_request_free(req);
3448  	}
3449  
3450  	ret = 0;
3451  
3452  out:
3453  	kfree(decomp_out);
3454  	kfree(output);
3455  	return ret;
3456  }
3457  
3458  static int test_cprng(struct crypto_rng *tfm,
3459  		      const struct cprng_testvec *template,
3460  		      unsigned int tcount)
3461  {
3462  	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
3463  	int err = 0, i, j, seedsize;
3464  	u8 *seed;
3465  	char result[32];
3466  
3467  	seedsize = crypto_rng_seedsize(tfm);
3468  
3469  	seed = kmalloc(seedsize, GFP_KERNEL);
3470  	if (!seed) {
3471  		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
3472  		       "for %s\n", algo);
3473  		return -ENOMEM;
3474  	}
3475  
3476  	for (i = 0; i < tcount; i++) {
3477  		memset(result, 0, 32);
3478  
3479  		memcpy(seed, template[i].v, template[i].vlen);
3480  		memcpy(seed + template[i].vlen, template[i].key,
3481  		       template[i].klen);
3482  		memcpy(seed + template[i].vlen + template[i].klen,
3483  		       template[i].dt, template[i].dtlen);
3484  
3485  		err = crypto_rng_reset(tfm, seed, seedsize);
3486  		if (err) {
3487  			printk(KERN_ERR "alg: cprng: Failed to reset rng "
3488  			       "for %s\n", algo);
3489  			goto out;
3490  		}
3491  
3492  		for (j = 0; j < template[i].loops; j++) {
3493  			err = crypto_rng_get_bytes(tfm, result,
3494  						   template[i].rlen);
3495  			if (err < 0) {
3496  				printk(KERN_ERR "alg: cprng: Failed to obtain "
3497  				       "the correct amount of random data for "
3498  				       "%s (requested %d)\n", algo,
3499  				       template[i].rlen);
3500  				goto out;
3501  			}
3502  		}
3503  
3504  		err = memcmp(result, template[i].result,
3505  			     template[i].rlen);
3506  		if (err) {
3507  			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
3508  			       i, algo);
3509  			hexdump(result, template[i].rlen);
3510  			err = -EINVAL;
3511  			goto out;
3512  		}
3513  	}
3514  
3515  out:
3516  	kfree(seed);
3517  	return err;
3518  }
3519  
3520  static int alg_test_cipher(const struct alg_test_desc *desc,
3521  			   const char *driver, u32 type, u32 mask)
3522  {
3523  	const struct cipher_test_suite *suite = &desc->suite.cipher;
3524  	struct crypto_cipher *tfm;
3525  	int err;
3526  
3527  	tfm = crypto_alloc_cipher(driver, type, mask);
3528  	if (IS_ERR(tfm)) {
3529  		printk(KERN_ERR "alg: cipher: Failed to load transform for "
3530  		       "%s: %ld\n", driver, PTR_ERR(tfm));
3531  		return PTR_ERR(tfm);
3532  	}
3533  
3534  	err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
3535  	if (!err)
3536  		err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
3537  
3538  	crypto_free_cipher(tfm);
3539  	return err;
3540  }
3541  
3542  static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
3543  			 u32 type, u32 mask)
3544  {
3545  	struct crypto_comp *comp;
3546  	struct crypto_acomp *acomp;
3547  	int err;
3548  	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
3549  
3550  	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
3551  		acomp = crypto_alloc_acomp(driver, type, mask);
3552  		if (IS_ERR(acomp)) {
3553  			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
3554  			       driver, PTR_ERR(acomp));
3555  			return PTR_ERR(acomp);
3556  		}
3557  		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
3558  				 desc->suite.comp.decomp.vecs,
3559  				 desc->suite.comp.comp.count,
3560  				 desc->suite.comp.decomp.count);
3561  		crypto_free_acomp(acomp);
3562  	} else {
3563  		comp = crypto_alloc_comp(driver, type, mask);
3564  		if (IS_ERR(comp)) {
3565  			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
3566  			       driver, PTR_ERR(comp));
3567  			return PTR_ERR(comp);
3568  		}
3569  
3570  		err = test_comp(comp, desc->suite.comp.comp.vecs,
3571  				desc->suite.comp.decomp.vecs,
3572  				desc->suite.comp.comp.count,
3573  				desc->suite.comp.decomp.count);
3574  
3575  		crypto_free_comp(comp);
3576  	}
3577  	return err;
3578  }
3579  
3580  static int alg_test_crc32c(const struct alg_test_desc *desc,
3581  			   const char *driver, u32 type, u32 mask)
3582  {
3583  	struct crypto_shash *tfm;
3584  	__le32 val;
3585  	int err;
3586  
3587  	err = alg_test_hash(desc, driver, type, mask);
3588  	if (err)
3589  		return err;
3590  
3591  	tfm = crypto_alloc_shash(driver, type, mask);
3592  	if (IS_ERR(tfm)) {
3593  		if (PTR_ERR(tfm) == -ENOENT) {
3594  			/*
3595  			 * This crc32c implementation is only available through
3596  			 * ahash API, not the shash API, so the remaining part
3597  			 * of the test is not applicable to it.
3598  			 */
3599  			return 0;
3600  		}
3601  		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
3602  		       "%ld\n", driver, PTR_ERR(tfm));
3603  		return PTR_ERR(tfm);
3604  	}
3605  
3606  	do {
3607  		SHASH_DESC_ON_STACK(shash, tfm);
3608  		u32 *ctx = (u32 *)shash_desc_ctx(shash);
3609  
3610  		shash->tfm = tfm;
3611  
3612  		*ctx = 420553207;
3613  		err = crypto_shash_final(shash, (u8 *)&val);
3614  		if (err) {
3615  			printk(KERN_ERR "alg: crc32c: Operation failed for "
3616  			       "%s: %d\n", driver, err);
3617  			break;
3618  		}
3619  
3620  		if (val != cpu_to_le32(~420553207)) {
3621  			pr_err("alg: crc32c: Test failed for %s: %u\n",
3622  			       driver, le32_to_cpu(val));
3623  			err = -EINVAL;
3624  		}
3625  	} while (0);
3626  
3627  	crypto_free_shash(tfm);
3628  
3629  	return err;
3630  }
3631  
3632  static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
3633  			  u32 type, u32 mask)
3634  {
3635  	struct crypto_rng *rng;
3636  	int err;
3637  
3638  	rng = crypto_alloc_rng(driver, type, mask);
3639  	if (IS_ERR(rng)) {
3640  		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
3641  		       "%ld\n", driver, PTR_ERR(rng));
3642  		return PTR_ERR(rng);
3643  	}
3644  
3645  	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
3646  
3647  	crypto_free_rng(rng);
3648  
3649  	return err;
3650  }
3651  
3652  
3653  static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
3654  			  const char *driver, u32 type, u32 mask)
3655  {
3656  	int ret = -EAGAIN;
3657  	struct crypto_rng *drng;
3658  	struct drbg_test_data test_data;
3659  	struct drbg_string addtl, pers, testentropy;
3660  	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
3661  
3662  	if (!buf)
3663  		return -ENOMEM;
3664  
3665  	drng = crypto_alloc_rng(driver, type, mask);
3666  	if (IS_ERR(drng)) {
3667  		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
3668  		       "%s\n", driver);
3669  		kfree_sensitive(buf);
3670  		return -ENOMEM;
3671  	}
3672  
3673  	test_data.testentropy = &testentropy;
3674  	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
3675  	drbg_string_fill(&pers, test->pers, test->perslen);
3676  	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
3677  	if (ret) {
3678  		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
3679  		goto outbuf;
3680  	}
3681  
3682  	drbg_string_fill(&addtl, test->addtla, test->addtllen);
3683  	if (pr) {
3684  		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
3685  		ret = crypto_drbg_get_bytes_addtl_test(drng,
3686  			buf, test->expectedlen, &addtl,	&test_data);
3687  	} else {
3688  		ret = crypto_drbg_get_bytes_addtl(drng,
3689  			buf, test->expectedlen, &addtl);
3690  	}
3691  	if (ret < 0) {
3692  		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3693  		       "driver %s\n", driver);
3694  		goto outbuf;
3695  	}
3696  
3697  	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
3698  	if (pr) {
3699  		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
3700  		ret = crypto_drbg_get_bytes_addtl_test(drng,
3701  			buf, test->expectedlen, &addtl, &test_data);
3702  	} else {
3703  		ret = crypto_drbg_get_bytes_addtl(drng,
3704  			buf, test->expectedlen, &addtl);
3705  	}
3706  	if (ret < 0) {
3707  		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3708  		       "driver %s\n", driver);
3709  		goto outbuf;
3710  	}
3711  
3712  	ret = memcmp(test->expected, buf, test->expectedlen);
3713  
3714  outbuf:
3715  	crypto_free_rng(drng);
3716  	kfree_sensitive(buf);
3717  	return ret;
3718  }
3719  
3720  
3721  static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
3722  			 u32 type, u32 mask)
3723  {
3724  	int err = 0;
3725  	int pr = 0;
3726  	int i = 0;
3727  	const struct drbg_testvec *template = desc->suite.drbg.vecs;
3728  	unsigned int tcount = desc->suite.drbg.count;
3729  
3730  	if (0 == memcmp(driver, "drbg_pr_", 8))
3731  		pr = 1;
3732  
3733  	for (i = 0; i < tcount; i++) {
3734  		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
3735  		if (err) {
3736  			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
3737  			       i, driver);
3738  			err = -EINVAL;
3739  			break;
3740  		}
3741  	}
3742  	return err;
3743  
3744  }
3745  
3746  static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
3747  		       const char *alg)
3748  {
3749  	struct kpp_request *req;
3750  	void *input_buf = NULL;
3751  	void *output_buf = NULL;
3752  	void *a_public = NULL;
3753  	void *a_ss = NULL;
3754  	void *shared_secret = NULL;
3755  	struct crypto_wait wait;
3756  	unsigned int out_len_max;
3757  	int err = -ENOMEM;
3758  	struct scatterlist src, dst;
3759  
3760  	req = kpp_request_alloc(tfm, GFP_KERNEL);
3761  	if (!req)
3762  		return err;
3763  
3764  	crypto_init_wait(&wait);
3765  
3766  	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
3767  	if (err < 0)
3768  		goto free_req;
3769  
3770  	out_len_max = crypto_kpp_maxsize(tfm);
3771  	output_buf = kzalloc(out_len_max, GFP_KERNEL);
3772  	if (!output_buf) {
3773  		err = -ENOMEM;
3774  		goto free_req;
3775  	}
3776  
3777  	/* Use appropriate parameter as base */
3778  	kpp_request_set_input(req, NULL, 0);
3779  	sg_init_one(&dst, output_buf, out_len_max);
3780  	kpp_request_set_output(req, &dst, out_len_max);
3781  	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3782  				 crypto_req_done, &wait);
3783  
3784  	/* Compute party A's public key */
3785  	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
3786  	if (err) {
3787  		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
3788  		       alg, err);
3789  		goto free_output;
3790  	}
3791  
3792  	if (vec->genkey) {
3793  		/* Save party A's public key */
3794  		a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
3795  		if (!a_public) {
3796  			err = -ENOMEM;
3797  			goto free_output;
3798  		}
3799  	} else {
3800  		/* Verify calculated public key */
3801  		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
3802  			   vec->expected_a_public_size)) {
3803  			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
3804  			       alg);
3805  			err = -EINVAL;
3806  			goto free_output;
3807  		}
3808  	}
3809  
3810  	/* Calculate shared secret key by using counter part (b) public key. */
3811  	input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
3812  	if (!input_buf) {
3813  		err = -ENOMEM;
3814  		goto free_output;
3815  	}
3816  
3817  	sg_init_one(&src, input_buf, vec->b_public_size);
3818  	sg_init_one(&dst, output_buf, out_len_max);
3819  	kpp_request_set_input(req, &src, vec->b_public_size);
3820  	kpp_request_set_output(req, &dst, out_len_max);
3821  	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3822  				 crypto_req_done, &wait);
3823  	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
3824  	if (err) {
3825  		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
3826  		       alg, err);
3827  		goto free_all;
3828  	}
3829  
3830  	if (vec->genkey) {
3831  		/* Save the shared secret obtained by party A */
3832  		a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
3833  		if (!a_ss) {
3834  			err = -ENOMEM;
3835  			goto free_all;
3836  		}
3837  
3838  		/*
3839  		 * Calculate party B's shared secret by using party A's
3840  		 * public key.
3841  		 */
3842  		err = crypto_kpp_set_secret(tfm, vec->b_secret,
3843  					    vec->b_secret_size);
3844  		if (err < 0)
3845  			goto free_all;
3846  
3847  		sg_init_one(&src, a_public, vec->expected_a_public_size);
3848  		sg_init_one(&dst, output_buf, out_len_max);
3849  		kpp_request_set_input(req, &src, vec->expected_a_public_size);
3850  		kpp_request_set_output(req, &dst, out_len_max);
3851  		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3852  					 crypto_req_done, &wait);
3853  		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
3854  				      &wait);
3855  		if (err) {
3856  			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
3857  			       alg, err);
3858  			goto free_all;
3859  		}
3860  
3861  		shared_secret = a_ss;
3862  	} else {
3863  		shared_secret = (void *)vec->expected_ss;
3864  	}
3865  
3866  	/*
3867  	 * verify shared secret from which the user will derive
3868  	 * secret key by executing whatever hash it has chosen
3869  	 */
3870  	if (memcmp(shared_secret, sg_virt(req->dst),
3871  		   vec->expected_ss_size)) {
3872  		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
3873  		       alg);
3874  		err = -EINVAL;
3875  	}
3876  
3877  free_all:
3878  	kfree(a_ss);
3879  	kfree(input_buf);
3880  free_output:
3881  	kfree(a_public);
3882  	kfree(output_buf);
3883  free_req:
3884  	kpp_request_free(req);
3885  	return err;
3886  }
3887  
3888  static int test_kpp(struct crypto_kpp *tfm, const char *alg,
3889  		    const struct kpp_testvec *vecs, unsigned int tcount)
3890  {
3891  	int ret, i;
3892  
3893  	for (i = 0; i < tcount; i++) {
3894  		ret = do_test_kpp(tfm, vecs++, alg);
3895  		if (ret) {
3896  			pr_err("alg: %s: test failed on vector %d, err=%d\n",
3897  			       alg, i + 1, ret);
3898  			return ret;
3899  		}
3900  	}
3901  	return 0;
3902  }
3903  
3904  static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
3905  			u32 type, u32 mask)
3906  {
3907  	struct crypto_kpp *tfm;
3908  	int err = 0;
3909  
3910  	tfm = crypto_alloc_kpp(driver, type, mask);
3911  	if (IS_ERR(tfm)) {
3912  		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
3913  		       driver, PTR_ERR(tfm));
3914  		return PTR_ERR(tfm);
3915  	}
3916  	if (desc->suite.kpp.vecs)
3917  		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
3918  			       desc->suite.kpp.count);
3919  
3920  	crypto_free_kpp(tfm);
3921  	return err;
3922  }
3923  
3924  static u8 *test_pack_u32(u8 *dst, u32 val)
3925  {
3926  	memcpy(dst, &val, sizeof(val));
3927  	return dst + sizeof(val);
3928  }
3929  
3930  static int test_akcipher_one(struct crypto_akcipher *tfm,
3931  			     const struct akcipher_testvec *vecs)
3932  {
3933  	char *xbuf[XBUFSIZE];
3934  	struct akcipher_request *req;
3935  	void *outbuf_enc = NULL;
3936  	void *outbuf_dec = NULL;
3937  	struct crypto_wait wait;
3938  	unsigned int out_len_max, out_len = 0;
3939  	int err = -ENOMEM;
3940  	struct scatterlist src, dst, src_tab[3];
3941  	const char *m, *c;
3942  	unsigned int m_size, c_size;
3943  	const char *op;
3944  	u8 *key, *ptr;
3945  
3946  	if (testmgr_alloc_buf(xbuf))
3947  		return err;
3948  
3949  	req = akcipher_request_alloc(tfm, GFP_KERNEL);
3950  	if (!req)
3951  		goto free_xbuf;
3952  
3953  	crypto_init_wait(&wait);
3954  
3955  	key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
3956  		      GFP_KERNEL);
3957  	if (!key)
3958  		goto free_req;
3959  	memcpy(key, vecs->key, vecs->key_len);
3960  	ptr = key + vecs->key_len;
3961  	ptr = test_pack_u32(ptr, vecs->algo);
3962  	ptr = test_pack_u32(ptr, vecs->param_len);
3963  	memcpy(ptr, vecs->params, vecs->param_len);
3964  
3965  	if (vecs->public_key_vec)
3966  		err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
3967  	else
3968  		err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
3969  	if (err)
3970  		goto free_key;
3971  
3972  	/*
3973  	 * First run test which do not require a private key, such as
3974  	 * encrypt or verify.
3975  	 */
3976  	err = -ENOMEM;
3977  	out_len_max = crypto_akcipher_maxsize(tfm);
3978  	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
3979  	if (!outbuf_enc)
3980  		goto free_key;
3981  
3982  	if (!vecs->siggen_sigver_test) {
3983  		m = vecs->m;
3984  		m_size = vecs->m_size;
3985  		c = vecs->c;
3986  		c_size = vecs->c_size;
3987  		op = "encrypt";
3988  	} else {
3989  		/* Swap args so we could keep plaintext (digest)
3990  		 * in vecs->m, and cooked signature in vecs->c.
3991  		 */
3992  		m = vecs->c; /* signature */
3993  		m_size = vecs->c_size;
3994  		c = vecs->m; /* digest */
3995  		c_size = vecs->m_size;
3996  		op = "verify";
3997  	}
3998  
3999  	err = -E2BIG;
4000  	if (WARN_ON(m_size > PAGE_SIZE))
4001  		goto free_all;
4002  	memcpy(xbuf[0], m, m_size);
4003  
4004  	sg_init_table(src_tab, 3);
4005  	sg_set_buf(&src_tab[0], xbuf[0], 8);
4006  	sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
4007  	if (vecs->siggen_sigver_test) {
4008  		if (WARN_ON(c_size > PAGE_SIZE))
4009  			goto free_all;
4010  		memcpy(xbuf[1], c, c_size);
4011  		sg_set_buf(&src_tab[2], xbuf[1], c_size);
4012  		akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
4013  	} else {
4014  		sg_init_one(&dst, outbuf_enc, out_len_max);
4015  		akcipher_request_set_crypt(req, src_tab, &dst, m_size,
4016  					   out_len_max);
4017  	}
4018  	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
4019  				      crypto_req_done, &wait);
4020  
4021  	err = crypto_wait_req(vecs->siggen_sigver_test ?
4022  			      /* Run asymmetric signature verification */
4023  			      crypto_akcipher_verify(req) :
4024  			      /* Run asymmetric encrypt */
4025  			      crypto_akcipher_encrypt(req), &wait);
4026  	if (err) {
4027  		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
4028  		goto free_all;
4029  	}
4030  	if (!vecs->siggen_sigver_test && c) {
4031  		if (req->dst_len != c_size) {
4032  			pr_err("alg: akcipher: %s test failed. Invalid output len\n",
4033  			       op);
4034  			err = -EINVAL;
4035  			goto free_all;
4036  		}
4037  		/* verify that encrypted message is equal to expected */
4038  		if (memcmp(c, outbuf_enc, c_size) != 0) {
4039  			pr_err("alg: akcipher: %s test failed. Invalid output\n",
4040  			       op);
4041  			hexdump(outbuf_enc, c_size);
4042  			err = -EINVAL;
4043  			goto free_all;
4044  		}
4045  	}
4046  
4047  	/*
4048  	 * Don't invoke (decrypt or sign) test which require a private key
4049  	 * for vectors with only a public key.
4050  	 */
4051  	if (vecs->public_key_vec) {
4052  		err = 0;
4053  		goto free_all;
4054  	}
4055  	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
4056  	if (!outbuf_dec) {
4057  		err = -ENOMEM;
4058  		goto free_all;
4059  	}
4060  
4061  	if (!vecs->siggen_sigver_test && !c) {
4062  		c = outbuf_enc;
4063  		c_size = req->dst_len;
4064  	}
4065  
4066  	err = -E2BIG;
4067  	op = vecs->siggen_sigver_test ? "sign" : "decrypt";
4068  	if (WARN_ON(c_size > PAGE_SIZE))
4069  		goto free_all;
4070  	memcpy(xbuf[0], c, c_size);
4071  
4072  	sg_init_one(&src, xbuf[0], c_size);
4073  	sg_init_one(&dst, outbuf_dec, out_len_max);
4074  	crypto_init_wait(&wait);
4075  	akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
4076  
4077  	err = crypto_wait_req(vecs->siggen_sigver_test ?
4078  			      /* Run asymmetric signature generation */
4079  			      crypto_akcipher_sign(req) :
4080  			      /* Run asymmetric decrypt */
4081  			      crypto_akcipher_decrypt(req), &wait);
4082  	if (err) {
4083  		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
4084  		goto free_all;
4085  	}
4086  	out_len = req->dst_len;
4087  	if (out_len < m_size) {
4088  		pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
4089  		       op, out_len);
4090  		err = -EINVAL;
4091  		goto free_all;
4092  	}
4093  	/* verify that decrypted message is equal to the original msg */
4094  	if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
4095  	    memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
4096  		pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
4097  		hexdump(outbuf_dec, out_len);
4098  		err = -EINVAL;
4099  	}
4100  free_all:
4101  	kfree(outbuf_dec);
4102  	kfree(outbuf_enc);
4103  free_key:
4104  	kfree(key);
4105  free_req:
4106  	akcipher_request_free(req);
4107  free_xbuf:
4108  	testmgr_free_buf(xbuf);
4109  	return err;
4110  }
4111  
4112  static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
4113  			 const struct akcipher_testvec *vecs,
4114  			 unsigned int tcount)
4115  {
4116  	const char *algo =
4117  		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
4118  	int ret, i;
4119  
4120  	for (i = 0; i < tcount; i++) {
4121  		ret = test_akcipher_one(tfm, vecs++);
4122  		if (!ret)
4123  			continue;
4124  
4125  		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
4126  		       i + 1, algo, ret);
4127  		return ret;
4128  	}
4129  	return 0;
4130  }
4131  
4132  static int alg_test_akcipher(const struct alg_test_desc *desc,
4133  			     const char *driver, u32 type, u32 mask)
4134  {
4135  	struct crypto_akcipher *tfm;
4136  	int err = 0;
4137  
4138  	tfm = crypto_alloc_akcipher(driver, type, mask);
4139  	if (IS_ERR(tfm)) {
4140  		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
4141  		       driver, PTR_ERR(tfm));
4142  		return PTR_ERR(tfm);
4143  	}
4144  	if (desc->suite.akcipher.vecs)
4145  		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
4146  				    desc->suite.akcipher.count);
4147  
4148  	crypto_free_akcipher(tfm);
4149  	return err;
4150  }
4151  
4152  static int alg_test_null(const struct alg_test_desc *desc,
4153  			     const char *driver, u32 type, u32 mask)
4154  {
4155  	return 0;
4156  }
4157  
4158  #define ____VECS(tv)	.vecs = tv, .count = ARRAY_SIZE(tv)
4159  #define __VECS(tv)	{ ____VECS(tv) }
4160  
4161  /* Please keep this list sorted by algorithm name. */
4162  static const struct alg_test_desc alg_test_descs[] = {
4163  	{
4164  		.alg = "adiantum(xchacha12,aes)",
4165  		.generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
4166  		.test = alg_test_skcipher,
4167  		.suite = {
4168  			.cipher = __VECS(adiantum_xchacha12_aes_tv_template)
4169  		},
4170  	}, {
4171  		.alg = "adiantum(xchacha20,aes)",
4172  		.generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
4173  		.test = alg_test_skcipher,
4174  		.suite = {
4175  			.cipher = __VECS(adiantum_xchacha20_aes_tv_template)
4176  		},
4177  	}, {
4178  		.alg = "aegis128",
4179  		.test = alg_test_aead,
4180  		.suite = {
4181  			.aead = __VECS(aegis128_tv_template)
4182  		}
4183  	}, {
4184  		.alg = "ansi_cprng",
4185  		.test = alg_test_cprng,
4186  		.suite = {
4187  			.cprng = __VECS(ansi_cprng_aes_tv_template)
4188  		}
4189  	}, {
4190  		.alg = "authenc(hmac(md5),ecb(cipher_null))",
4191  		.test = alg_test_aead,
4192  		.suite = {
4193  			.aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
4194  		}
4195  	}, {
4196  		.alg = "authenc(hmac(sha1),cbc(aes))",
4197  		.test = alg_test_aead,
4198  		.fips_allowed = 1,
4199  		.suite = {
4200  			.aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
4201  		}
4202  	}, {
4203  		.alg = "authenc(hmac(sha1),cbc(des))",
4204  		.test = alg_test_aead,
4205  		.suite = {
4206  			.aead = __VECS(hmac_sha1_des_cbc_tv_temp)
4207  		}
4208  	}, {
4209  		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
4210  		.test = alg_test_aead,
4211  		.fips_allowed = 1,
4212  		.suite = {
4213  			.aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
4214  		}
4215  	}, {
4216  		.alg = "authenc(hmac(sha1),ctr(aes))",
4217  		.test = alg_test_null,
4218  		.fips_allowed = 1,
4219  	}, {
4220  		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
4221  		.test = alg_test_aead,
4222  		.suite = {
4223  			.aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
4224  		}
4225  	}, {
4226  		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
4227  		.test = alg_test_null,
4228  		.fips_allowed = 1,
4229  	}, {
4230  		.alg = "authenc(hmac(sha224),cbc(des))",
4231  		.test = alg_test_aead,
4232  		.suite = {
4233  			.aead = __VECS(hmac_sha224_des_cbc_tv_temp)
4234  		}
4235  	}, {
4236  		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
4237  		.test = alg_test_aead,
4238  		.fips_allowed = 1,
4239  		.suite = {
4240  			.aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
4241  		}
4242  	}, {
4243  		.alg = "authenc(hmac(sha256),cbc(aes))",
4244  		.test = alg_test_aead,
4245  		.fips_allowed = 1,
4246  		.suite = {
4247  			.aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
4248  		}
4249  	}, {
4250  		.alg = "authenc(hmac(sha256),cbc(des))",
4251  		.test = alg_test_aead,
4252  		.suite = {
4253  			.aead = __VECS(hmac_sha256_des_cbc_tv_temp)
4254  		}
4255  	}, {
4256  		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
4257  		.test = alg_test_aead,
4258  		.fips_allowed = 1,
4259  		.suite = {
4260  			.aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
4261  		}
4262  	}, {
4263  		.alg = "authenc(hmac(sha256),ctr(aes))",
4264  		.test = alg_test_null,
4265  		.fips_allowed = 1,
4266  	}, {
4267  		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
4268  		.test = alg_test_null,
4269  		.fips_allowed = 1,
4270  	}, {
4271  		.alg = "authenc(hmac(sha384),cbc(des))",
4272  		.test = alg_test_aead,
4273  		.suite = {
4274  			.aead = __VECS(hmac_sha384_des_cbc_tv_temp)
4275  		}
4276  	}, {
4277  		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
4278  		.test = alg_test_aead,
4279  		.fips_allowed = 1,
4280  		.suite = {
4281  			.aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
4282  		}
4283  	}, {
4284  		.alg = "authenc(hmac(sha384),ctr(aes))",
4285  		.test = alg_test_null,
4286  		.fips_allowed = 1,
4287  	}, {
4288  		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
4289  		.test = alg_test_null,
4290  		.fips_allowed = 1,
4291  	}, {
4292  		.alg = "authenc(hmac(sha512),cbc(aes))",
4293  		.fips_allowed = 1,
4294  		.test = alg_test_aead,
4295  		.suite = {
4296  			.aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
4297  		}
4298  	}, {
4299  		.alg = "authenc(hmac(sha512),cbc(des))",
4300  		.test = alg_test_aead,
4301  		.suite = {
4302  			.aead = __VECS(hmac_sha512_des_cbc_tv_temp)
4303  		}
4304  	}, {
4305  		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
4306  		.test = alg_test_aead,
4307  		.fips_allowed = 1,
4308  		.suite = {
4309  			.aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
4310  		}
4311  	}, {
4312  		.alg = "authenc(hmac(sha512),ctr(aes))",
4313  		.test = alg_test_null,
4314  		.fips_allowed = 1,
4315  	}, {
4316  		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
4317  		.test = alg_test_null,
4318  		.fips_allowed = 1,
4319  	}, {
4320  		.alg = "blake2b-160",
4321  		.test = alg_test_hash,
4322  		.fips_allowed = 0,
4323  		.suite = {
4324  			.hash = __VECS(blake2b_160_tv_template)
4325  		}
4326  	}, {
4327  		.alg = "blake2b-256",
4328  		.test = alg_test_hash,
4329  		.fips_allowed = 0,
4330  		.suite = {
4331  			.hash = __VECS(blake2b_256_tv_template)
4332  		}
4333  	}, {
4334  		.alg = "blake2b-384",
4335  		.test = alg_test_hash,
4336  		.fips_allowed = 0,
4337  		.suite = {
4338  			.hash = __VECS(blake2b_384_tv_template)
4339  		}
4340  	}, {
4341  		.alg = "blake2b-512",
4342  		.test = alg_test_hash,
4343  		.fips_allowed = 0,
4344  		.suite = {
4345  			.hash = __VECS(blake2b_512_tv_template)
4346  		}
4347  	}, {
4348  		.alg = "blake2s-128",
4349  		.test = alg_test_hash,
4350  		.suite = {
4351  			.hash = __VECS(blakes2s_128_tv_template)
4352  		}
4353  	}, {
4354  		.alg = "blake2s-160",
4355  		.test = alg_test_hash,
4356  		.suite = {
4357  			.hash = __VECS(blakes2s_160_tv_template)
4358  		}
4359  	}, {
4360  		.alg = "blake2s-224",
4361  		.test = alg_test_hash,
4362  		.suite = {
4363  			.hash = __VECS(blakes2s_224_tv_template)
4364  		}
4365  	}, {
4366  		.alg = "blake2s-256",
4367  		.test = alg_test_hash,
4368  		.suite = {
4369  			.hash = __VECS(blakes2s_256_tv_template)
4370  		}
4371  	}, {
4372  		.alg = "cbc(aes)",
4373  		.test = alg_test_skcipher,
4374  		.fips_allowed = 1,
4375  		.suite = {
4376  			.cipher = __VECS(aes_cbc_tv_template)
4377  		},
4378  	}, {
4379  		.alg = "cbc(anubis)",
4380  		.test = alg_test_skcipher,
4381  		.suite = {
4382  			.cipher = __VECS(anubis_cbc_tv_template)
4383  		},
4384  	}, {
4385  		.alg = "cbc(blowfish)",
4386  		.test = alg_test_skcipher,
4387  		.suite = {
4388  			.cipher = __VECS(bf_cbc_tv_template)
4389  		},
4390  	}, {
4391  		.alg = "cbc(camellia)",
4392  		.test = alg_test_skcipher,
4393  		.suite = {
4394  			.cipher = __VECS(camellia_cbc_tv_template)
4395  		},
4396  	}, {
4397  		.alg = "cbc(cast5)",
4398  		.test = alg_test_skcipher,
4399  		.suite = {
4400  			.cipher = __VECS(cast5_cbc_tv_template)
4401  		},
4402  	}, {
4403  		.alg = "cbc(cast6)",
4404  		.test = alg_test_skcipher,
4405  		.suite = {
4406  			.cipher = __VECS(cast6_cbc_tv_template)
4407  		},
4408  	}, {
4409  		.alg = "cbc(des)",
4410  		.test = alg_test_skcipher,
4411  		.suite = {
4412  			.cipher = __VECS(des_cbc_tv_template)
4413  		},
4414  	}, {
4415  		.alg = "cbc(des3_ede)",
4416  		.test = alg_test_skcipher,
4417  		.fips_allowed = 1,
4418  		.suite = {
4419  			.cipher = __VECS(des3_ede_cbc_tv_template)
4420  		},
4421  	}, {
4422  		/* Same as cbc(aes) except the key is stored in
4423  		 * hardware secure memory which we reference by index
4424  		 */
4425  		.alg = "cbc(paes)",
4426  		.test = alg_test_null,
4427  		.fips_allowed = 1,
4428  	}, {
4429  		/* Same as cbc(sm4) except the key is stored in
4430  		 * hardware secure memory which we reference by index
4431  		 */
4432  		.alg = "cbc(psm4)",
4433  		.test = alg_test_null,
4434  	}, {
4435  		.alg = "cbc(serpent)",
4436  		.test = alg_test_skcipher,
4437  		.suite = {
4438  			.cipher = __VECS(serpent_cbc_tv_template)
4439  		},
4440  	}, {
4441  		.alg = "cbc(sm4)",
4442  		.test = alg_test_skcipher,
4443  		.suite = {
4444  			.cipher = __VECS(sm4_cbc_tv_template)
4445  		}
4446  	}, {
4447  		.alg = "cbc(twofish)",
4448  		.test = alg_test_skcipher,
4449  		.suite = {
4450  			.cipher = __VECS(tf_cbc_tv_template)
4451  		},
4452  	}, {
4453  #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
4454  		.alg = "cbc-paes-s390",
4455  		.fips_allowed = 1,
4456  		.test = alg_test_skcipher,
4457  		.suite = {
4458  			.cipher = __VECS(aes_cbc_tv_template)
4459  		}
4460  	}, {
4461  #endif
4462  		.alg = "cbcmac(aes)",
4463  		.fips_allowed = 1,
4464  		.test = alg_test_hash,
4465  		.suite = {
4466  			.hash = __VECS(aes_cbcmac_tv_template)
4467  		}
4468  	}, {
4469  		.alg = "ccm(aes)",
4470  		.generic_driver = "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
4471  		.test = alg_test_aead,
4472  		.fips_allowed = 1,
4473  		.suite = {
4474  			.aead = {
4475  				____VECS(aes_ccm_tv_template),
4476  				.einval_allowed = 1,
4477  			}
4478  		}
4479  	}, {
4480  		.alg = "cfb(aes)",
4481  		.test = alg_test_skcipher,
4482  		.fips_allowed = 1,
4483  		.suite = {
4484  			.cipher = __VECS(aes_cfb_tv_template)
4485  		},
4486  	}, {
4487  		.alg = "cfb(sm4)",
4488  		.test = alg_test_skcipher,
4489  		.suite = {
4490  			.cipher = __VECS(sm4_cfb_tv_template)
4491  		}
4492  	}, {
4493  		.alg = "chacha20",
4494  		.test = alg_test_skcipher,
4495  		.suite = {
4496  			.cipher = __VECS(chacha20_tv_template)
4497  		},
4498  	}, {
4499  		.alg = "cmac(aes)",
4500  		.fips_allowed = 1,
4501  		.test = alg_test_hash,
4502  		.suite = {
4503  			.hash = __VECS(aes_cmac128_tv_template)
4504  		}
4505  	}, {
4506  		.alg = "cmac(des3_ede)",
4507  		.fips_allowed = 1,
4508  		.test = alg_test_hash,
4509  		.suite = {
4510  			.hash = __VECS(des3_ede_cmac64_tv_template)
4511  		}
4512  	}, {
4513  		.alg = "compress_null",
4514  		.test = alg_test_null,
4515  	}, {
4516  		.alg = "crc32",
4517  		.test = alg_test_hash,
4518  		.fips_allowed = 1,
4519  		.suite = {
4520  			.hash = __VECS(crc32_tv_template)
4521  		}
4522  	}, {
4523  		.alg = "crc32c",
4524  		.test = alg_test_crc32c,
4525  		.fips_allowed = 1,
4526  		.suite = {
4527  			.hash = __VECS(crc32c_tv_template)
4528  		}
4529  	}, {
4530  		.alg = "crct10dif",
4531  		.test = alg_test_hash,
4532  		.fips_allowed = 1,
4533  		.suite = {
4534  			.hash = __VECS(crct10dif_tv_template)
4535  		}
4536  	}, {
4537  		.alg = "ctr(aes)",
4538  		.test = alg_test_skcipher,
4539  		.fips_allowed = 1,
4540  		.suite = {
4541  			.cipher = __VECS(aes_ctr_tv_template)
4542  		}
4543  	}, {
4544  		.alg = "ctr(blowfish)",
4545  		.test = alg_test_skcipher,
4546  		.suite = {
4547  			.cipher = __VECS(bf_ctr_tv_template)
4548  		}
4549  	}, {
4550  		.alg = "ctr(camellia)",
4551  		.test = alg_test_skcipher,
4552  		.suite = {
4553  			.cipher = __VECS(camellia_ctr_tv_template)
4554  		}
4555  	}, {
4556  		.alg = "ctr(cast5)",
4557  		.test = alg_test_skcipher,
4558  		.suite = {
4559  			.cipher = __VECS(cast5_ctr_tv_template)
4560  		}
4561  	}, {
4562  		.alg = "ctr(cast6)",
4563  		.test = alg_test_skcipher,
4564  		.suite = {
4565  			.cipher = __VECS(cast6_ctr_tv_template)
4566  		}
4567  	}, {
4568  		.alg = "ctr(des)",
4569  		.test = alg_test_skcipher,
4570  		.suite = {
4571  			.cipher = __VECS(des_ctr_tv_template)
4572  		}
4573  	}, {
4574  		.alg = "ctr(des3_ede)",
4575  		.test = alg_test_skcipher,
4576  		.fips_allowed = 1,
4577  		.suite = {
4578  			.cipher = __VECS(des3_ede_ctr_tv_template)
4579  		}
4580  	}, {
4581  		/* Same as ctr(aes) except the key is stored in
4582  		 * hardware secure memory which we reference by index
4583  		 */
4584  		.alg = "ctr(paes)",
4585  		.test = alg_test_null,
4586  		.fips_allowed = 1,
4587  	}, {
4588  
4589  		/* Same as ctr(sm4) except the key is stored in
4590  		 * hardware secure memory which we reference by index
4591  		 */
4592  		.alg = "ctr(psm4)",
4593  		.test = alg_test_null,
4594  	}, {
4595  		.alg = "ctr(serpent)",
4596  		.test = alg_test_skcipher,
4597  		.suite = {
4598  			.cipher = __VECS(serpent_ctr_tv_template)
4599  		}
4600  	}, {
4601  		.alg = "ctr(sm4)",
4602  		.test = alg_test_skcipher,
4603  		.suite = {
4604  			.cipher = __VECS(sm4_ctr_tv_template)
4605  		}
4606  	}, {
4607  		.alg = "ctr(twofish)",
4608  		.test = alg_test_skcipher,
4609  		.suite = {
4610  			.cipher = __VECS(tf_ctr_tv_template)
4611  		}
4612  	}, {
4613  #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
4614  		.alg = "ctr-paes-s390",
4615  		.fips_allowed = 1,
4616  		.test = alg_test_skcipher,
4617  		.suite = {
4618  			.cipher = __VECS(aes_ctr_tv_template)
4619  		}
4620  	}, {
4621  #endif
4622  		.alg = "cts(cbc(aes))",
4623  		.test = alg_test_skcipher,
4624  		.fips_allowed = 1,
4625  		.suite = {
4626  			.cipher = __VECS(cts_mode_tv_template)
4627  		}
4628  	}, {
4629  		/* Same as cts(cbc((aes)) except the key is stored in
4630  		 * hardware secure memory which we reference by index
4631  		 */
4632  		.alg = "cts(cbc(paes))",
4633  		.test = alg_test_null,
4634  		.fips_allowed = 1,
4635  	}, {
4636  		.alg = "curve25519",
4637  		.test = alg_test_kpp,
4638  		.suite = {
4639  			.kpp = __VECS(curve25519_tv_template)
4640  		}
4641  	}, {
4642  		.alg = "deflate",
4643  		.test = alg_test_comp,
4644  		.fips_allowed = 1,
4645  		.suite = {
4646  			.comp = {
4647  				.comp = __VECS(deflate_comp_tv_template),
4648  				.decomp = __VECS(deflate_decomp_tv_template)
4649  			}
4650  		}
4651  	}, {
4652  		.alg = "dh",
4653  		.test = alg_test_kpp,
4654  		.fips_allowed = 1,
4655  		.suite = {
4656  			.kpp = __VECS(dh_tv_template)
4657  		}
4658  	}, {
4659  		.alg = "digest_null",
4660  		.test = alg_test_null,
4661  	}, {
4662  		.alg = "drbg_nopr_ctr_aes128",
4663  		.test = alg_test_drbg,
4664  		.fips_allowed = 1,
4665  		.suite = {
4666  			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
4667  		}
4668  	}, {
4669  		.alg = "drbg_nopr_ctr_aes192",
4670  		.test = alg_test_drbg,
4671  		.fips_allowed = 1,
4672  		.suite = {
4673  			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
4674  		}
4675  	}, {
4676  		.alg = "drbg_nopr_ctr_aes256",
4677  		.test = alg_test_drbg,
4678  		.fips_allowed = 1,
4679  		.suite = {
4680  			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
4681  		}
4682  	}, {
4683  		/*
4684  		 * There is no need to specifically test the DRBG with every
4685  		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
4686  		 */
4687  		.alg = "drbg_nopr_hmac_sha1",
4688  		.fips_allowed = 1,
4689  		.test = alg_test_null,
4690  	}, {
4691  		.alg = "drbg_nopr_hmac_sha256",
4692  		.test = alg_test_drbg,
4693  		.fips_allowed = 1,
4694  		.suite = {
4695  			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
4696  		}
4697  	}, {
4698  		/* covered by drbg_nopr_hmac_sha256 test */
4699  		.alg = "drbg_nopr_hmac_sha384",
4700  		.fips_allowed = 1,
4701  		.test = alg_test_null,
4702  	}, {
4703  		.alg = "drbg_nopr_hmac_sha512",
4704  		.test = alg_test_null,
4705  		.fips_allowed = 1,
4706  	}, {
4707  		.alg = "drbg_nopr_sha1",
4708  		.fips_allowed = 1,
4709  		.test = alg_test_null,
4710  	}, {
4711  		.alg = "drbg_nopr_sha256",
4712  		.test = alg_test_drbg,
4713  		.fips_allowed = 1,
4714  		.suite = {
4715  			.drbg = __VECS(drbg_nopr_sha256_tv_template)
4716  		}
4717  	}, {
4718  		/* covered by drbg_nopr_sha256 test */
4719  		.alg = "drbg_nopr_sha384",
4720  		.fips_allowed = 1,
4721  		.test = alg_test_null,
4722  	}, {
4723  		.alg = "drbg_nopr_sha512",
4724  		.fips_allowed = 1,
4725  		.test = alg_test_null,
4726  	}, {
4727  		.alg = "drbg_pr_ctr_aes128",
4728  		.test = alg_test_drbg,
4729  		.fips_allowed = 1,
4730  		.suite = {
4731  			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
4732  		}
4733  	}, {
4734  		/* covered by drbg_pr_ctr_aes128 test */
4735  		.alg = "drbg_pr_ctr_aes192",
4736  		.fips_allowed = 1,
4737  		.test = alg_test_null,
4738  	}, {
4739  		.alg = "drbg_pr_ctr_aes256",
4740  		.fips_allowed = 1,
4741  		.test = alg_test_null,
4742  	}, {
4743  		.alg = "drbg_pr_hmac_sha1",
4744  		.fips_allowed = 1,
4745  		.test = alg_test_null,
4746  	}, {
4747  		.alg = "drbg_pr_hmac_sha256",
4748  		.test = alg_test_drbg,
4749  		.fips_allowed = 1,
4750  		.suite = {
4751  			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
4752  		}
4753  	}, {
4754  		/* covered by drbg_pr_hmac_sha256 test */
4755  		.alg = "drbg_pr_hmac_sha384",
4756  		.fips_allowed = 1,
4757  		.test = alg_test_null,
4758  	}, {
4759  		.alg = "drbg_pr_hmac_sha512",
4760  		.test = alg_test_null,
4761  		.fips_allowed = 1,
4762  	}, {
4763  		.alg = "drbg_pr_sha1",
4764  		.fips_allowed = 1,
4765  		.test = alg_test_null,
4766  	}, {
4767  		.alg = "drbg_pr_sha256",
4768  		.test = alg_test_drbg,
4769  		.fips_allowed = 1,
4770  		.suite = {
4771  			.drbg = __VECS(drbg_pr_sha256_tv_template)
4772  		}
4773  	}, {
4774  		/* covered by drbg_pr_sha256 test */
4775  		.alg = "drbg_pr_sha384",
4776  		.fips_allowed = 1,
4777  		.test = alg_test_null,
4778  	}, {
4779  		.alg = "drbg_pr_sha512",
4780  		.fips_allowed = 1,
4781  		.test = alg_test_null,
4782  	}, {
4783  		.alg = "ecb(aes)",
4784  		.test = alg_test_skcipher,
4785  		.fips_allowed = 1,
4786  		.suite = {
4787  			.cipher = __VECS(aes_tv_template)
4788  		}
4789  	}, {
4790  		.alg = "ecb(anubis)",
4791  		.test = alg_test_skcipher,
4792  		.suite = {
4793  			.cipher = __VECS(anubis_tv_template)
4794  		}
4795  	}, {
4796  		.alg = "ecb(arc4)",
4797  		.generic_driver = "ecb(arc4)-generic",
4798  		.test = alg_test_skcipher,
4799  		.suite = {
4800  			.cipher = __VECS(arc4_tv_template)
4801  		}
4802  	}, {
4803  		.alg = "ecb(blowfish)",
4804  		.test = alg_test_skcipher,
4805  		.suite = {
4806  			.cipher = __VECS(bf_tv_template)
4807  		}
4808  	}, {
4809  		.alg = "ecb(camellia)",
4810  		.test = alg_test_skcipher,
4811  		.suite = {
4812  			.cipher = __VECS(camellia_tv_template)
4813  		}
4814  	}, {
4815  		.alg = "ecb(cast5)",
4816  		.test = alg_test_skcipher,
4817  		.suite = {
4818  			.cipher = __VECS(cast5_tv_template)
4819  		}
4820  	}, {
4821  		.alg = "ecb(cast6)",
4822  		.test = alg_test_skcipher,
4823  		.suite = {
4824  			.cipher = __VECS(cast6_tv_template)
4825  		}
4826  	}, {
4827  		.alg = "ecb(cipher_null)",
4828  		.test = alg_test_null,
4829  		.fips_allowed = 1,
4830  	}, {
4831  		.alg = "ecb(des)",
4832  		.test = alg_test_skcipher,
4833  		.suite = {
4834  			.cipher = __VECS(des_tv_template)
4835  		}
4836  	}, {
4837  		.alg = "ecb(des3_ede)",
4838  		.test = alg_test_skcipher,
4839  		.fips_allowed = 1,
4840  		.suite = {
4841  			.cipher = __VECS(des3_ede_tv_template)
4842  		}
4843  	}, {
4844  		.alg = "ecb(fcrypt)",
4845  		.test = alg_test_skcipher,
4846  		.suite = {
4847  			.cipher = {
4848  				.vecs = fcrypt_pcbc_tv_template,
4849  				.count = 1
4850  			}
4851  		}
4852  	}, {
4853  		.alg = "ecb(khazad)",
4854  		.test = alg_test_skcipher,
4855  		.suite = {
4856  			.cipher = __VECS(khazad_tv_template)
4857  		}
4858  	}, {
4859  		/* Same as ecb(aes) except the key is stored in
4860  		 * hardware secure memory which we reference by index
4861  		 */
4862  		.alg = "ecb(paes)",
4863  		.test = alg_test_null,
4864  		.fips_allowed = 1,
4865  	}, {
4866  		.alg = "ecb(seed)",
4867  		.test = alg_test_skcipher,
4868  		.suite = {
4869  			.cipher = __VECS(seed_tv_template)
4870  		}
4871  	}, {
4872  		.alg = "ecb(serpent)",
4873  		.test = alg_test_skcipher,
4874  		.suite = {
4875  			.cipher = __VECS(serpent_tv_template)
4876  		}
4877  	}, {
4878  		.alg = "ecb(sm4)",
4879  		.test = alg_test_skcipher,
4880  		.suite = {
4881  			.cipher = __VECS(sm4_tv_template)
4882  		}
4883  	}, {
4884  		.alg = "ecb(tea)",
4885  		.test = alg_test_skcipher,
4886  		.suite = {
4887  			.cipher = __VECS(tea_tv_template)
4888  		}
4889  	}, {
4890  		.alg = "ecb(tnepres)",
4891  		.test = alg_test_skcipher,
4892  		.suite = {
4893  			.cipher = __VECS(tnepres_tv_template)
4894  		}
4895  	}, {
4896  		.alg = "ecb(twofish)",
4897  		.test = alg_test_skcipher,
4898  		.suite = {
4899  			.cipher = __VECS(tf_tv_template)
4900  		}
4901  	}, {
4902  		.alg = "ecb(xeta)",
4903  		.test = alg_test_skcipher,
4904  		.suite = {
4905  			.cipher = __VECS(xeta_tv_template)
4906  		}
4907  	}, {
4908  		.alg = "ecb(xtea)",
4909  		.test = alg_test_skcipher,
4910  		.suite = {
4911  			.cipher = __VECS(xtea_tv_template)
4912  		}
4913  	}, {
4914  #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
4915  		.alg = "ecb-paes-s390",
4916  		.fips_allowed = 1,
4917  		.test = alg_test_skcipher,
4918  		.suite = {
4919  			.cipher = __VECS(aes_tv_template)
4920  		}
4921  	}, {
4922  #endif
4923  		.alg = "ecdh",
4924  		.test = alg_test_kpp,
4925  		.fips_allowed = 1,
4926  		.suite = {
4927  			.kpp = __VECS(ecdh_tv_template)
4928  		}
4929  	}, {
4930  		.alg = "ecrdsa",
4931  		.test = alg_test_akcipher,
4932  		.suite = {
4933  			.akcipher = __VECS(ecrdsa_tv_template)
4934  		}
4935  	}, {
4936  		.alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
4937  		.test = alg_test_aead,
4938  		.fips_allowed = 1,
4939  		.suite = {
4940  			.aead = __VECS(essiv_hmac_sha256_aes_cbc_tv_temp)
4941  		}
4942  	}, {
4943  		.alg = "essiv(cbc(aes),sha256)",
4944  		.test = alg_test_skcipher,
4945  		.fips_allowed = 1,
4946  		.suite = {
4947  			.cipher = __VECS(essiv_aes_cbc_tv_template)
4948  		}
4949  	}, {
4950  		.alg = "gcm(aes)",
4951  		.generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
4952  		.test = alg_test_aead,
4953  		.fips_allowed = 1,
4954  		.suite = {
4955  			.aead = __VECS(aes_gcm_tv_template)
4956  		}
4957  	}, {
4958  		.alg = "ghash",
4959  		.test = alg_test_hash,
4960  		.fips_allowed = 1,
4961  		.suite = {
4962  			.hash = __VECS(ghash_tv_template)
4963  		}
4964  	}, {
4965  		.alg = "hmac(md5)",
4966  		.test = alg_test_hash,
4967  		.suite = {
4968  			.hash = __VECS(hmac_md5_tv_template)
4969  		}
4970  	}, {
4971  		.alg = "hmac(rmd128)",
4972  		.test = alg_test_hash,
4973  		.suite = {
4974  			.hash = __VECS(hmac_rmd128_tv_template)
4975  		}
4976  	}, {
4977  		.alg = "hmac(rmd160)",
4978  		.test = alg_test_hash,
4979  		.suite = {
4980  			.hash = __VECS(hmac_rmd160_tv_template)
4981  		}
4982  	}, {
4983  		.alg = "hmac(sha1)",
4984  		.test = alg_test_hash,
4985  		.fips_allowed = 1,
4986  		.suite = {
4987  			.hash = __VECS(hmac_sha1_tv_template)
4988  		}
4989  	}, {
4990  		.alg = "hmac(sha224)",
4991  		.test = alg_test_hash,
4992  		.fips_allowed = 1,
4993  		.suite = {
4994  			.hash = __VECS(hmac_sha224_tv_template)
4995  		}
4996  	}, {
4997  		.alg = "hmac(sha256)",
4998  		.test = alg_test_hash,
4999  		.fips_allowed = 1,
5000  		.suite = {
5001  			.hash = __VECS(hmac_sha256_tv_template)
5002  		}
5003  	}, {
5004  		.alg = "hmac(sha3-224)",
5005  		.test = alg_test_hash,
5006  		.fips_allowed = 1,
5007  		.suite = {
5008  			.hash = __VECS(hmac_sha3_224_tv_template)
5009  		}
5010  	}, {
5011  		.alg = "hmac(sha3-256)",
5012  		.test = alg_test_hash,
5013  		.fips_allowed = 1,
5014  		.suite = {
5015  			.hash = __VECS(hmac_sha3_256_tv_template)
5016  		}
5017  	}, {
5018  		.alg = "hmac(sha3-384)",
5019  		.test = alg_test_hash,
5020  		.fips_allowed = 1,
5021  		.suite = {
5022  			.hash = __VECS(hmac_sha3_384_tv_template)
5023  		}
5024  	}, {
5025  		.alg = "hmac(sha3-512)",
5026  		.test = alg_test_hash,
5027  		.fips_allowed = 1,
5028  		.suite = {
5029  			.hash = __VECS(hmac_sha3_512_tv_template)
5030  		}
5031  	}, {
5032  		.alg = "hmac(sha384)",
5033  		.test = alg_test_hash,
5034  		.fips_allowed = 1,
5035  		.suite = {
5036  			.hash = __VECS(hmac_sha384_tv_template)
5037  		}
5038  	}, {
5039  		.alg = "hmac(sha512)",
5040  		.test = alg_test_hash,
5041  		.fips_allowed = 1,
5042  		.suite = {
5043  			.hash = __VECS(hmac_sha512_tv_template)
5044  		}
5045  	}, {
5046  		.alg = "hmac(sm3)",
5047  		.test = alg_test_hash,
5048  		.suite = {
5049  			.hash = __VECS(hmac_sm3_tv_template)
5050  		}
5051  	}, {
5052  		.alg = "hmac(streebog256)",
5053  		.test = alg_test_hash,
5054  		.suite = {
5055  			.hash = __VECS(hmac_streebog256_tv_template)
5056  		}
5057  	}, {
5058  		.alg = "hmac(streebog512)",
5059  		.test = alg_test_hash,
5060  		.suite = {
5061  			.hash = __VECS(hmac_streebog512_tv_template)
5062  		}
5063  	}, {
5064  		.alg = "jitterentropy_rng",
5065  		.fips_allowed = 1,
5066  		.test = alg_test_null,
5067  	}, {
5068  		.alg = "kw(aes)",
5069  		.test = alg_test_skcipher,
5070  		.fips_allowed = 1,
5071  		.suite = {
5072  			.cipher = __VECS(aes_kw_tv_template)
5073  		}
5074  	}, {
5075  		.alg = "lrw(aes)",
5076  		.generic_driver = "lrw(ecb(aes-generic))",
5077  		.test = alg_test_skcipher,
5078  		.suite = {
5079  			.cipher = __VECS(aes_lrw_tv_template)
5080  		}
5081  	}, {
5082  		.alg = "lrw(camellia)",
5083  		.generic_driver = "lrw(ecb(camellia-generic))",
5084  		.test = alg_test_skcipher,
5085  		.suite = {
5086  			.cipher = __VECS(camellia_lrw_tv_template)
5087  		}
5088  	}, {
5089  		.alg = "lrw(cast6)",
5090  		.generic_driver = "lrw(ecb(cast6-generic))",
5091  		.test = alg_test_skcipher,
5092  		.suite = {
5093  			.cipher = __VECS(cast6_lrw_tv_template)
5094  		}
5095  	}, {
5096  		.alg = "lrw(serpent)",
5097  		.generic_driver = "lrw(ecb(serpent-generic))",
5098  		.test = alg_test_skcipher,
5099  		.suite = {
5100  			.cipher = __VECS(serpent_lrw_tv_template)
5101  		}
5102  	}, {
5103  		.alg = "lrw(twofish)",
5104  		.generic_driver = "lrw(ecb(twofish-generic))",
5105  		.test = alg_test_skcipher,
5106  		.suite = {
5107  			.cipher = __VECS(tf_lrw_tv_template)
5108  		}
5109  	}, {
5110  		.alg = "lz4",
5111  		.test = alg_test_comp,
5112  		.fips_allowed = 1,
5113  		.suite = {
5114  			.comp = {
5115  				.comp = __VECS(lz4_comp_tv_template),
5116  				.decomp = __VECS(lz4_decomp_tv_template)
5117  			}
5118  		}
5119  	}, {
5120  		.alg = "lz4hc",
5121  		.test = alg_test_comp,
5122  		.fips_allowed = 1,
5123  		.suite = {
5124  			.comp = {
5125  				.comp = __VECS(lz4hc_comp_tv_template),
5126  				.decomp = __VECS(lz4hc_decomp_tv_template)
5127  			}
5128  		}
5129  	}, {
5130  		.alg = "lzo",
5131  		.test = alg_test_comp,
5132  		.fips_allowed = 1,
5133  		.suite = {
5134  			.comp = {
5135  				.comp = __VECS(lzo_comp_tv_template),
5136  				.decomp = __VECS(lzo_decomp_tv_template)
5137  			}
5138  		}
5139  	}, {
5140  		.alg = "lzo-rle",
5141  		.test = alg_test_comp,
5142  		.fips_allowed = 1,
5143  		.suite = {
5144  			.comp = {
5145  				.comp = __VECS(lzorle_comp_tv_template),
5146  				.decomp = __VECS(lzorle_decomp_tv_template)
5147  			}
5148  		}
5149  	}, {
5150  		.alg = "md4",
5151  		.test = alg_test_hash,
5152  		.suite = {
5153  			.hash = __VECS(md4_tv_template)
5154  		}
5155  	}, {
5156  		.alg = "md5",
5157  		.test = alg_test_hash,
5158  		.suite = {
5159  			.hash = __VECS(md5_tv_template)
5160  		}
5161  	}, {
5162  		.alg = "michael_mic",
5163  		.test = alg_test_hash,
5164  		.suite = {
5165  			.hash = __VECS(michael_mic_tv_template)
5166  		}
5167  	}, {
5168  		.alg = "nhpoly1305",
5169  		.test = alg_test_hash,
5170  		.suite = {
5171  			.hash = __VECS(nhpoly1305_tv_template)
5172  		}
5173  	}, {
5174  		.alg = "ofb(aes)",
5175  		.test = alg_test_skcipher,
5176  		.fips_allowed = 1,
5177  		.suite = {
5178  			.cipher = __VECS(aes_ofb_tv_template)
5179  		}
5180  	}, {
5181  		/* Same as ofb(aes) except the key is stored in
5182  		 * hardware secure memory which we reference by index
5183  		 */
5184  		.alg = "ofb(paes)",
5185  		.test = alg_test_null,
5186  		.fips_allowed = 1,
5187  	}, {
5188  		.alg = "ofb(sm4)",
5189  		.test = alg_test_skcipher,
5190  		.suite = {
5191  			.cipher = __VECS(sm4_ofb_tv_template)
5192  		}
5193  	}, {
5194  		.alg = "pcbc(fcrypt)",
5195  		.test = alg_test_skcipher,
5196  		.suite = {
5197  			.cipher = __VECS(fcrypt_pcbc_tv_template)
5198  		}
5199  	}, {
5200  		.alg = "pkcs1pad(rsa,sha224)",
5201  		.test = alg_test_null,
5202  		.fips_allowed = 1,
5203  	}, {
5204  		.alg = "pkcs1pad(rsa,sha256)",
5205  		.test = alg_test_akcipher,
5206  		.fips_allowed = 1,
5207  		.suite = {
5208  			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
5209  		}
5210  	}, {
5211  		.alg = "pkcs1pad(rsa,sha384)",
5212  		.test = alg_test_null,
5213  		.fips_allowed = 1,
5214  	}, {
5215  		.alg = "pkcs1pad(rsa,sha512)",
5216  		.test = alg_test_null,
5217  		.fips_allowed = 1,
5218  	}, {
5219  		.alg = "poly1305",
5220  		.test = alg_test_hash,
5221  		.suite = {
5222  			.hash = __VECS(poly1305_tv_template)
5223  		}
5224  	}, {
5225  		.alg = "rfc3686(ctr(aes))",
5226  		.test = alg_test_skcipher,
5227  		.fips_allowed = 1,
5228  		.suite = {
5229  			.cipher = __VECS(aes_ctr_rfc3686_tv_template)
5230  		}
5231  	}, {
5232  		.alg = "rfc3686(ctr(sm4))",
5233  		.test = alg_test_skcipher,
5234  		.suite = {
5235  			.cipher = __VECS(sm4_ctr_rfc3686_tv_template)
5236  		}
5237  	}, {
5238  		.alg = "rfc4106(gcm(aes))",
5239  		.generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
5240  		.test = alg_test_aead,
5241  		.fips_allowed = 1,
5242  		.suite = {
5243  			.aead = {
5244  				____VECS(aes_gcm_rfc4106_tv_template),
5245  				.einval_allowed = 1,
5246  				.aad_iv = 1,
5247  			}
5248  		}
5249  	}, {
5250  		.alg = "rfc4309(ccm(aes))",
5251  		.generic_driver = "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
5252  		.test = alg_test_aead,
5253  		.fips_allowed = 1,
5254  		.suite = {
5255  			.aead = {
5256  				____VECS(aes_ccm_rfc4309_tv_template),
5257  				.einval_allowed = 1,
5258  				.aad_iv = 1,
5259  			}
5260  		}
5261  	}, {
5262  		.alg = "rfc4543(gcm(aes))",
5263  		.generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
5264  		.test = alg_test_aead,
5265  		.suite = {
5266  			.aead = {
5267  				____VECS(aes_gcm_rfc4543_tv_template),
5268  				.einval_allowed = 1,
5269  				.aad_iv = 1,
5270  			}
5271  		}
5272  	}, {
5273  		.alg = "rfc7539(chacha20,poly1305)",
5274  		.test = alg_test_aead,
5275  		.suite = {
5276  			.aead = __VECS(rfc7539_tv_template)
5277  		}
5278  	}, {
5279  		.alg = "rfc7539esp(chacha20,poly1305)",
5280  		.test = alg_test_aead,
5281  		.suite = {
5282  			.aead = {
5283  				____VECS(rfc7539esp_tv_template),
5284  				.einval_allowed = 1,
5285  				.aad_iv = 1,
5286  			}
5287  		}
5288  	}, {
5289  		.alg = "rmd128",
5290  		.test = alg_test_hash,
5291  		.suite = {
5292  			.hash = __VECS(rmd128_tv_template)
5293  		}
5294  	}, {
5295  		.alg = "rmd160",
5296  		.test = alg_test_hash,
5297  		.suite = {
5298  			.hash = __VECS(rmd160_tv_template)
5299  		}
5300  	}, {
5301  		.alg = "rmd256",
5302  		.test = alg_test_hash,
5303  		.suite = {
5304  			.hash = __VECS(rmd256_tv_template)
5305  		}
5306  	}, {
5307  		.alg = "rmd320",
5308  		.test = alg_test_hash,
5309  		.suite = {
5310  			.hash = __VECS(rmd320_tv_template)
5311  		}
5312  	}, {
5313  		.alg = "rsa",
5314  		.test = alg_test_akcipher,
5315  		.fips_allowed = 1,
5316  		.suite = {
5317  			.akcipher = __VECS(rsa_tv_template)
5318  		}
5319  	}, {
5320  		.alg = "salsa20",
5321  		.test = alg_test_skcipher,
5322  		.suite = {
5323  			.cipher = __VECS(salsa20_stream_tv_template)
5324  		}
5325  	}, {
5326  		.alg = "sha1",
5327  		.test = alg_test_hash,
5328  		.fips_allowed = 1,
5329  		.suite = {
5330  			.hash = __VECS(sha1_tv_template)
5331  		}
5332  	}, {
5333  		.alg = "sha224",
5334  		.test = alg_test_hash,
5335  		.fips_allowed = 1,
5336  		.suite = {
5337  			.hash = __VECS(sha224_tv_template)
5338  		}
5339  	}, {
5340  		.alg = "sha256",
5341  		.test = alg_test_hash,
5342  		.fips_allowed = 1,
5343  		.suite = {
5344  			.hash = __VECS(sha256_tv_template)
5345  		}
5346  	}, {
5347  		.alg = "sha3-224",
5348  		.test = alg_test_hash,
5349  		.fips_allowed = 1,
5350  		.suite = {
5351  			.hash = __VECS(sha3_224_tv_template)
5352  		}
5353  	}, {
5354  		.alg = "sha3-256",
5355  		.test = alg_test_hash,
5356  		.fips_allowed = 1,
5357  		.suite = {
5358  			.hash = __VECS(sha3_256_tv_template)
5359  		}
5360  	}, {
5361  		.alg = "sha3-384",
5362  		.test = alg_test_hash,
5363  		.fips_allowed = 1,
5364  		.suite = {
5365  			.hash = __VECS(sha3_384_tv_template)
5366  		}
5367  	}, {
5368  		.alg = "sha3-512",
5369  		.test = alg_test_hash,
5370  		.fips_allowed = 1,
5371  		.suite = {
5372  			.hash = __VECS(sha3_512_tv_template)
5373  		}
5374  	}, {
5375  		.alg = "sha384",
5376  		.test = alg_test_hash,
5377  		.fips_allowed = 1,
5378  		.suite = {
5379  			.hash = __VECS(sha384_tv_template)
5380  		}
5381  	}, {
5382  		.alg = "sha512",
5383  		.test = alg_test_hash,
5384  		.fips_allowed = 1,
5385  		.suite = {
5386  			.hash = __VECS(sha512_tv_template)
5387  		}
5388  	}, {
5389  		.alg = "sm2",
5390  		.test = alg_test_akcipher,
5391  		.suite = {
5392  			.akcipher = __VECS(sm2_tv_template)
5393  		}
5394  	}, {
5395  		.alg = "sm3",
5396  		.test = alg_test_hash,
5397  		.suite = {
5398  			.hash = __VECS(sm3_tv_template)
5399  		}
5400  	}, {
5401  		.alg = "streebog256",
5402  		.test = alg_test_hash,
5403  		.suite = {
5404  			.hash = __VECS(streebog256_tv_template)
5405  		}
5406  	}, {
5407  		.alg = "streebog512",
5408  		.test = alg_test_hash,
5409  		.suite = {
5410  			.hash = __VECS(streebog512_tv_template)
5411  		}
5412  	}, {
5413  		.alg = "tgr128",
5414  		.test = alg_test_hash,
5415  		.suite = {
5416  			.hash = __VECS(tgr128_tv_template)
5417  		}
5418  	}, {
5419  		.alg = "tgr160",
5420  		.test = alg_test_hash,
5421  		.suite = {
5422  			.hash = __VECS(tgr160_tv_template)
5423  		}
5424  	}, {
5425  		.alg = "tgr192",
5426  		.test = alg_test_hash,
5427  		.suite = {
5428  			.hash = __VECS(tgr192_tv_template)
5429  		}
5430  	}, {
5431  		.alg = "vmac64(aes)",
5432  		.test = alg_test_hash,
5433  		.suite = {
5434  			.hash = __VECS(vmac64_aes_tv_template)
5435  		}
5436  	}, {
5437  		.alg = "wp256",
5438  		.test = alg_test_hash,
5439  		.suite = {
5440  			.hash = __VECS(wp256_tv_template)
5441  		}
5442  	}, {
5443  		.alg = "wp384",
5444  		.test = alg_test_hash,
5445  		.suite = {
5446  			.hash = __VECS(wp384_tv_template)
5447  		}
5448  	}, {
5449  		.alg = "wp512",
5450  		.test = alg_test_hash,
5451  		.suite = {
5452  			.hash = __VECS(wp512_tv_template)
5453  		}
5454  	}, {
5455  		.alg = "xcbc(aes)",
5456  		.test = alg_test_hash,
5457  		.suite = {
5458  			.hash = __VECS(aes_xcbc128_tv_template)
5459  		}
5460  	}, {
5461  		.alg = "xchacha12",
5462  		.test = alg_test_skcipher,
5463  		.suite = {
5464  			.cipher = __VECS(xchacha12_tv_template)
5465  		},
5466  	}, {
5467  		.alg = "xchacha20",
5468  		.test = alg_test_skcipher,
5469  		.suite = {
5470  			.cipher = __VECS(xchacha20_tv_template)
5471  		},
5472  	}, {
5473  		.alg = "xts(aes)",
5474  		.generic_driver = "xts(ecb(aes-generic))",
5475  		.test = alg_test_skcipher,
5476  		.fips_allowed = 1,
5477  		.suite = {
5478  			.cipher = __VECS(aes_xts_tv_template)
5479  		}
5480  	}, {
5481  		.alg = "xts(camellia)",
5482  		.generic_driver = "xts(ecb(camellia-generic))",
5483  		.test = alg_test_skcipher,
5484  		.suite = {
5485  			.cipher = __VECS(camellia_xts_tv_template)
5486  		}
5487  	}, {
5488  		.alg = "xts(cast6)",
5489  		.generic_driver = "xts(ecb(cast6-generic))",
5490  		.test = alg_test_skcipher,
5491  		.suite = {
5492  			.cipher = __VECS(cast6_xts_tv_template)
5493  		}
5494  	}, {
5495  		/* Same as xts(aes) except the key is stored in
5496  		 * hardware secure memory which we reference by index
5497  		 */
5498  		.alg = "xts(paes)",
5499  		.test = alg_test_null,
5500  		.fips_allowed = 1,
5501  	}, {
5502  		.alg = "xts(serpent)",
5503  		.generic_driver = "xts(ecb(serpent-generic))",
5504  		.test = alg_test_skcipher,
5505  		.suite = {
5506  			.cipher = __VECS(serpent_xts_tv_template)
5507  		}
5508  	}, {
5509  		.alg = "xts(twofish)",
5510  		.generic_driver = "xts(ecb(twofish-generic))",
5511  		.test = alg_test_skcipher,
5512  		.suite = {
5513  			.cipher = __VECS(tf_xts_tv_template)
5514  		}
5515  	}, {
5516  #if IS_ENABLED(CONFIG_CRYPTO_PAES_S390)
5517  		.alg = "xts-paes-s390",
5518  		.fips_allowed = 1,
5519  		.test = alg_test_skcipher,
5520  		.suite = {
5521  			.cipher = __VECS(aes_xts_tv_template)
5522  		}
5523  	}, {
5524  #endif
5525  		.alg = "xts4096(paes)",
5526  		.test = alg_test_null,
5527  		.fips_allowed = 1,
5528  	}, {
5529  		.alg = "xts512(paes)",
5530  		.test = alg_test_null,
5531  		.fips_allowed = 1,
5532  	}, {
5533  		.alg = "xxhash64",
5534  		.test = alg_test_hash,
5535  		.fips_allowed = 1,
5536  		.suite = {
5537  			.hash = __VECS(xxhash64_tv_template)
5538  		}
5539  	}, {
5540  		.alg = "zlib-deflate",
5541  		.test = alg_test_comp,
5542  		.fips_allowed = 1,
5543  		.suite = {
5544  			.comp = {
5545  				.comp = __VECS(zlib_deflate_comp_tv_template),
5546  				.decomp = __VECS(zlib_deflate_decomp_tv_template)
5547  			}
5548  		}
5549  	}, {
5550  		.alg = "zstd",
5551  		.test = alg_test_comp,
5552  		.fips_allowed = 1,
5553  		.suite = {
5554  			.comp = {
5555  				.comp = __VECS(zstd_comp_tv_template),
5556  				.decomp = __VECS(zstd_decomp_tv_template)
5557  			}
5558  		}
5559  	}
5560  };
5561  
5562  static void alg_check_test_descs_order(void)
5563  {
5564  	int i;
5565  
5566  	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
5567  		int diff = strcmp(alg_test_descs[i - 1].alg,
5568  				  alg_test_descs[i].alg);
5569  
5570  		if (WARN_ON(diff > 0)) {
5571  			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
5572  				alg_test_descs[i - 1].alg,
5573  				alg_test_descs[i].alg);
5574  		}
5575  
5576  		if (WARN_ON(diff == 0)) {
5577  			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
5578  				alg_test_descs[i].alg);
5579  		}
5580  	}
5581  }
5582  
5583  static void alg_check_testvec_configs(void)
5584  {
5585  	int i;
5586  
5587  	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
5588  		WARN_ON(!valid_testvec_config(
5589  				&default_cipher_testvec_configs[i]));
5590  
5591  	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
5592  		WARN_ON(!valid_testvec_config(
5593  				&default_hash_testvec_configs[i]));
5594  }
5595  
5596  static void testmgr_onetime_init(void)
5597  {
5598  	alg_check_test_descs_order();
5599  	alg_check_testvec_configs();
5600  
5601  #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
5602  	pr_warn("alg: extra crypto tests enabled.  This is intended for developer use only.\n");
5603  #endif
5604  }
5605  
5606  static int alg_find_test(const char *alg)
5607  {
5608  	int start = 0;
5609  	int end = ARRAY_SIZE(alg_test_descs);
5610  
5611  	while (start < end) {
5612  		int i = (start + end) / 2;
5613  		int diff = strcmp(alg_test_descs[i].alg, alg);
5614  
5615  		if (diff > 0) {
5616  			end = i;
5617  			continue;
5618  		}
5619  
5620  		if (diff < 0) {
5621  			start = i + 1;
5622  			continue;
5623  		}
5624  
5625  		return i;
5626  	}
5627  
5628  	return -1;
5629  }
5630  
5631  int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
5632  {
5633  	int i;
5634  	int j;
5635  	int rc;
5636  
5637  	if (!fips_enabled && notests) {
5638  		printk_once(KERN_INFO "alg: self-tests disabled\n");
5639  		return 0;
5640  	}
5641  
5642  	DO_ONCE(testmgr_onetime_init);
5643  
5644  	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
5645  		char nalg[CRYPTO_MAX_ALG_NAME];
5646  
5647  		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
5648  		    sizeof(nalg))
5649  			return -ENAMETOOLONG;
5650  
5651  		i = alg_find_test(nalg);
5652  		if (i < 0)
5653  			goto notest;
5654  
5655  		if (fips_enabled && !alg_test_descs[i].fips_allowed)
5656  			goto non_fips_alg;
5657  
5658  		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
5659  		goto test_done;
5660  	}
5661  
5662  	i = alg_find_test(alg);
5663  	j = alg_find_test(driver);
5664  	if (i < 0 && j < 0)
5665  		goto notest;
5666  
5667  	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
5668  			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
5669  		goto non_fips_alg;
5670  
5671  	rc = 0;
5672  	if (i >= 0)
5673  		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
5674  					     type, mask);
5675  	if (j >= 0 && j != i)
5676  		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
5677  					     type, mask);
5678  
5679  test_done:
5680  	if (rc && (fips_enabled || panic_on_fail)) {
5681  		fips_fail_notify();
5682  		panic("alg: self-tests for %s (%s) failed in %s mode!\n",
5683  		      driver, alg, fips_enabled ? "fips" : "panic_on_fail");
5684  	}
5685  
5686  	if (fips_enabled && !rc)
5687  		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
5688  
5689  	return rc;
5690  
5691  notest:
5692  	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
5693  	return 0;
5694  non_fips_alg:
5695  	return -EINVAL;
5696  }
5697  
5698  #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
5699  
5700  EXPORT_SYMBOL_GPL(alg_test);
5701