xref: /openbmc/linux/crypto/testmgr.c (revision 4cff79e9)
1 /*
2  * Algorithm testing framework and tests.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6  * Copyright (c) 2007 Nokia Siemens Networks
7  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Updated RFC4106 AES-GCM testing.
10  *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11  *             Adrian Hoban <adrian.hoban@intel.com>
12  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
13  *             Tadeusz Struk (tadeusz.struk@intel.com)
14  *    Copyright (c) 2010, Intel Corporation.
15  *
16  * This program is free software; you can redistribute it and/or modify it
17  * under the terms of the GNU General Public License as published by the Free
18  * Software Foundation; either version 2 of the License, or (at your option)
19  * any later version.
20  *
21  */
22 
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
37 
38 #include "internal.h"
39 
40 static bool notests;
41 module_param(notests, bool, 0644);
42 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43 
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45 
46 /* a perfect nop */
47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48 {
49 	return 0;
50 }
51 
52 #else
53 
54 #include "testmgr.h"
55 
56 /*
57  * Need slab memory for testing (size in number of pages).
58  */
59 #define XBUFSIZE	8
60 
61 /*
62  * Indexes into the xbuf to simulate cross-page access.
63  */
64 #define IDX1		32
65 #define IDX2		32400
66 #define IDX3		1511
67 #define IDX4		8193
68 #define IDX5		22222
69 #define IDX6		17101
70 #define IDX7		27333
71 #define IDX8		3000
72 
73 /*
74 * Used by test_cipher()
75 */
76 #define ENCRYPT 1
77 #define DECRYPT 0
78 
79 struct aead_test_suite {
80 	struct {
81 		const struct aead_testvec *vecs;
82 		unsigned int count;
83 	} enc, dec;
84 };
85 
86 struct cipher_test_suite {
87 	struct {
88 		const struct cipher_testvec *vecs;
89 		unsigned int count;
90 	} enc, dec;
91 };
92 
93 struct comp_test_suite {
94 	struct {
95 		const struct comp_testvec *vecs;
96 		unsigned int count;
97 	} comp, decomp;
98 };
99 
100 struct hash_test_suite {
101 	const struct hash_testvec *vecs;
102 	unsigned int count;
103 };
104 
105 struct cprng_test_suite {
106 	const struct cprng_testvec *vecs;
107 	unsigned int count;
108 };
109 
110 struct drbg_test_suite {
111 	const struct drbg_testvec *vecs;
112 	unsigned int count;
113 };
114 
115 struct akcipher_test_suite {
116 	const struct akcipher_testvec *vecs;
117 	unsigned int count;
118 };
119 
120 struct kpp_test_suite {
121 	const struct kpp_testvec *vecs;
122 	unsigned int count;
123 };
124 
125 struct alg_test_desc {
126 	const char *alg;
127 	int (*test)(const struct alg_test_desc *desc, const char *driver,
128 		    u32 type, u32 mask);
129 	int fips_allowed;	/* set if alg is allowed in fips mode */
130 
131 	union {
132 		struct aead_test_suite aead;
133 		struct cipher_test_suite cipher;
134 		struct comp_test_suite comp;
135 		struct hash_test_suite hash;
136 		struct cprng_test_suite cprng;
137 		struct drbg_test_suite drbg;
138 		struct akcipher_test_suite akcipher;
139 		struct kpp_test_suite kpp;
140 	} suite;
141 };
142 
143 static const unsigned int IDX[8] = {
144 	IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
145 
146 static void hexdump(unsigned char *buf, unsigned int len)
147 {
148 	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
149 			16, 1,
150 			buf, len, false);
151 }
152 
153 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
154 {
155 	int i;
156 
157 	for (i = 0; i < XBUFSIZE; i++) {
158 		buf[i] = (void *)__get_free_page(GFP_KERNEL);
159 		if (!buf[i])
160 			goto err_free_buf;
161 	}
162 
163 	return 0;
164 
165 err_free_buf:
166 	while (i-- > 0)
167 		free_page((unsigned long)buf[i]);
168 
169 	return -ENOMEM;
170 }
171 
172 static void testmgr_free_buf(char *buf[XBUFSIZE])
173 {
174 	int i;
175 
176 	for (i = 0; i < XBUFSIZE; i++)
177 		free_page((unsigned long)buf[i]);
178 }
179 
180 static int ahash_guard_result(char *result, char c, int size)
181 {
182 	int i;
183 
184 	for (i = 0; i < size; i++) {
185 		if (result[i] != c)
186 			return -EINVAL;
187 	}
188 
189 	return 0;
190 }
191 
192 static int ahash_partial_update(struct ahash_request **preq,
193 	struct crypto_ahash *tfm, const struct hash_testvec *template,
194 	void *hash_buff, int k, int temp, struct scatterlist *sg,
195 	const char *algo, char *result, struct crypto_wait *wait)
196 {
197 	char *state;
198 	struct ahash_request *req;
199 	int statesize, ret = -EINVAL;
200 	static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
201 	int digestsize = crypto_ahash_digestsize(tfm);
202 
203 	req = *preq;
204 	statesize = crypto_ahash_statesize(
205 			crypto_ahash_reqtfm(req));
206 	state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
207 	if (!state) {
208 		pr_err("alg: hash: Failed to alloc state for %s\n", algo);
209 		goto out_nostate;
210 	}
211 	memcpy(state + statesize, guard, sizeof(guard));
212 	memset(result, 1, digestsize);
213 	ret = crypto_ahash_export(req, state);
214 	WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
215 	if (ret) {
216 		pr_err("alg: hash: Failed to export() for %s\n", algo);
217 		goto out;
218 	}
219 	ret = ahash_guard_result(result, 1, digestsize);
220 	if (ret) {
221 		pr_err("alg: hash: Failed, export used req->result for %s\n",
222 		       algo);
223 		goto out;
224 	}
225 	ahash_request_free(req);
226 	req = ahash_request_alloc(tfm, GFP_KERNEL);
227 	if (!req) {
228 		pr_err("alg: hash: Failed to alloc request for %s\n", algo);
229 		goto out_noreq;
230 	}
231 	ahash_request_set_callback(req,
232 		CRYPTO_TFM_REQ_MAY_BACKLOG,
233 		crypto_req_done, wait);
234 
235 	memcpy(hash_buff, template->plaintext + temp,
236 		template->tap[k]);
237 	sg_init_one(&sg[0], hash_buff, template->tap[k]);
238 	ahash_request_set_crypt(req, sg, result, template->tap[k]);
239 	ret = crypto_ahash_import(req, state);
240 	if (ret) {
241 		pr_err("alg: hash: Failed to import() for %s\n", algo);
242 		goto out;
243 	}
244 	ret = ahash_guard_result(result, 1, digestsize);
245 	if (ret) {
246 		pr_err("alg: hash: Failed, import used req->result for %s\n",
247 		       algo);
248 		goto out;
249 	}
250 	ret = crypto_wait_req(crypto_ahash_update(req), wait);
251 	if (ret)
252 		goto out;
253 	*preq = req;
254 	ret = 0;
255 	goto out_noreq;
256 out:
257 	ahash_request_free(req);
258 out_noreq:
259 	kfree(state);
260 out_nostate:
261 	return ret;
262 }
263 
264 static int __test_hash(struct crypto_ahash *tfm,
265 		       const struct hash_testvec *template, unsigned int tcount,
266 		       bool use_digest, const int align_offset)
267 {
268 	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
269 	size_t digest_size = crypto_ahash_digestsize(tfm);
270 	unsigned int i, j, k, temp;
271 	struct scatterlist sg[8];
272 	char *result;
273 	char *key;
274 	struct ahash_request *req;
275 	struct crypto_wait wait;
276 	void *hash_buff;
277 	char *xbuf[XBUFSIZE];
278 	int ret = -ENOMEM;
279 
280 	result = kmalloc(digest_size, GFP_KERNEL);
281 	if (!result)
282 		return ret;
283 	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
284 	if (!key)
285 		goto out_nobuf;
286 	if (testmgr_alloc_buf(xbuf))
287 		goto out_nobuf;
288 
289 	crypto_init_wait(&wait);
290 
291 	req = ahash_request_alloc(tfm, GFP_KERNEL);
292 	if (!req) {
293 		printk(KERN_ERR "alg: hash: Failed to allocate request for "
294 		       "%s\n", algo);
295 		goto out_noreq;
296 	}
297 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
298 				   crypto_req_done, &wait);
299 
300 	j = 0;
301 	for (i = 0; i < tcount; i++) {
302 		if (template[i].np)
303 			continue;
304 
305 		ret = -EINVAL;
306 		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
307 			goto out;
308 
309 		j++;
310 		memset(result, 0, digest_size);
311 
312 		hash_buff = xbuf[0];
313 		hash_buff += align_offset;
314 
315 		memcpy(hash_buff, template[i].plaintext, template[i].psize);
316 		sg_init_one(&sg[0], hash_buff, template[i].psize);
317 
318 		if (template[i].ksize) {
319 			crypto_ahash_clear_flags(tfm, ~0);
320 			if (template[i].ksize > MAX_KEYLEN) {
321 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
322 				       j, algo, template[i].ksize, MAX_KEYLEN);
323 				ret = -EINVAL;
324 				goto out;
325 			}
326 			memcpy(key, template[i].key, template[i].ksize);
327 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
328 			if (ret) {
329 				printk(KERN_ERR "alg: hash: setkey failed on "
330 				       "test %d for %s: ret=%d\n", j, algo,
331 				       -ret);
332 				goto out;
333 			}
334 		}
335 
336 		ahash_request_set_crypt(req, sg, result, template[i].psize);
337 		if (use_digest) {
338 			ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
339 			if (ret) {
340 				pr_err("alg: hash: digest failed on test %d "
341 				       "for %s: ret=%d\n", j, algo, -ret);
342 				goto out;
343 			}
344 		} else {
345 			memset(result, 1, digest_size);
346 			ret = crypto_wait_req(crypto_ahash_init(req), &wait);
347 			if (ret) {
348 				pr_err("alg: hash: init failed on test %d "
349 				       "for %s: ret=%d\n", j, algo, -ret);
350 				goto out;
351 			}
352 			ret = ahash_guard_result(result, 1, digest_size);
353 			if (ret) {
354 				pr_err("alg: hash: init failed on test %d "
355 				       "for %s: used req->result\n", j, algo);
356 				goto out;
357 			}
358 			ret = crypto_wait_req(crypto_ahash_update(req), &wait);
359 			if (ret) {
360 				pr_err("alg: hash: update failed on test %d "
361 				       "for %s: ret=%d\n", j, algo, -ret);
362 				goto out;
363 			}
364 			ret = ahash_guard_result(result, 1, digest_size);
365 			if (ret) {
366 				pr_err("alg: hash: update failed on test %d "
367 				       "for %s: used req->result\n", j, algo);
368 				goto out;
369 			}
370 			ret = crypto_wait_req(crypto_ahash_final(req), &wait);
371 			if (ret) {
372 				pr_err("alg: hash: final failed on test %d "
373 				       "for %s: ret=%d\n", j, algo, -ret);
374 				goto out;
375 			}
376 		}
377 
378 		if (memcmp(result, template[i].digest,
379 			   crypto_ahash_digestsize(tfm))) {
380 			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
381 			       j, algo);
382 			hexdump(result, crypto_ahash_digestsize(tfm));
383 			ret = -EINVAL;
384 			goto out;
385 		}
386 	}
387 
388 	j = 0;
389 	for (i = 0; i < tcount; i++) {
390 		/* alignment tests are only done with continuous buffers */
391 		if (align_offset != 0)
392 			break;
393 
394 		if (!template[i].np)
395 			continue;
396 
397 		j++;
398 		memset(result, 0, digest_size);
399 
400 		temp = 0;
401 		sg_init_table(sg, template[i].np);
402 		ret = -EINVAL;
403 		for (k = 0; k < template[i].np; k++) {
404 			if (WARN_ON(offset_in_page(IDX[k]) +
405 				    template[i].tap[k] > PAGE_SIZE))
406 				goto out;
407 			sg_set_buf(&sg[k],
408 				   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
409 					  offset_in_page(IDX[k]),
410 					  template[i].plaintext + temp,
411 					  template[i].tap[k]),
412 				   template[i].tap[k]);
413 			temp += template[i].tap[k];
414 		}
415 
416 		if (template[i].ksize) {
417 			if (template[i].ksize > MAX_KEYLEN) {
418 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
419 				       j, algo, template[i].ksize, MAX_KEYLEN);
420 				ret = -EINVAL;
421 				goto out;
422 			}
423 			crypto_ahash_clear_flags(tfm, ~0);
424 			memcpy(key, template[i].key, template[i].ksize);
425 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
426 
427 			if (ret) {
428 				printk(KERN_ERR "alg: hash: setkey "
429 				       "failed on chunking test %d "
430 				       "for %s: ret=%d\n", j, algo, -ret);
431 				goto out;
432 			}
433 		}
434 
435 		ahash_request_set_crypt(req, sg, result, template[i].psize);
436 		ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
437 		if (ret) {
438 			pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
439 			       j, algo, -ret);
440 			goto out;
441 		}
442 
443 		if (memcmp(result, template[i].digest,
444 			   crypto_ahash_digestsize(tfm))) {
445 			printk(KERN_ERR "alg: hash: Chunking test %d "
446 			       "failed for %s\n", j, algo);
447 			hexdump(result, crypto_ahash_digestsize(tfm));
448 			ret = -EINVAL;
449 			goto out;
450 		}
451 	}
452 
453 	/* partial update exercise */
454 	j = 0;
455 	for (i = 0; i < tcount; i++) {
456 		/* alignment tests are only done with continuous buffers */
457 		if (align_offset != 0)
458 			break;
459 
460 		if (template[i].np < 2)
461 			continue;
462 
463 		j++;
464 		memset(result, 0, digest_size);
465 
466 		ret = -EINVAL;
467 		hash_buff = xbuf[0];
468 		memcpy(hash_buff, template[i].plaintext,
469 			template[i].tap[0]);
470 		sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
471 
472 		if (template[i].ksize) {
473 			crypto_ahash_clear_flags(tfm, ~0);
474 			if (template[i].ksize > MAX_KEYLEN) {
475 				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
476 					j, algo, template[i].ksize, MAX_KEYLEN);
477 				ret = -EINVAL;
478 				goto out;
479 			}
480 			memcpy(key, template[i].key, template[i].ksize);
481 			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
482 			if (ret) {
483 				pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
484 					j, algo, -ret);
485 				goto out;
486 			}
487 		}
488 
489 		ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
490 		ret = crypto_wait_req(crypto_ahash_init(req), &wait);
491 		if (ret) {
492 			pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
493 				j, algo, -ret);
494 			goto out;
495 		}
496 		ret = crypto_wait_req(crypto_ahash_update(req), &wait);
497 		if (ret) {
498 			pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
499 				j, algo, -ret);
500 			goto out;
501 		}
502 
503 		temp = template[i].tap[0];
504 		for (k = 1; k < template[i].np; k++) {
505 			ret = ahash_partial_update(&req, tfm, &template[i],
506 				hash_buff, k, temp, &sg[0], algo, result,
507 				&wait);
508 			if (ret) {
509 				pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
510 					j, algo, -ret);
511 				goto out_noreq;
512 			}
513 			temp += template[i].tap[k];
514 		}
515 		ret = crypto_wait_req(crypto_ahash_final(req), &wait);
516 		if (ret) {
517 			pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
518 				j, algo, -ret);
519 			goto out;
520 		}
521 		if (memcmp(result, template[i].digest,
522 			   crypto_ahash_digestsize(tfm))) {
523 			pr_err("alg: hash: Partial Test %d failed for %s\n",
524 			       j, algo);
525 			hexdump(result, crypto_ahash_digestsize(tfm));
526 			ret = -EINVAL;
527 			goto out;
528 		}
529 	}
530 
531 	ret = 0;
532 
533 out:
534 	ahash_request_free(req);
535 out_noreq:
536 	testmgr_free_buf(xbuf);
537 out_nobuf:
538 	kfree(key);
539 	kfree(result);
540 	return ret;
541 }
542 
543 static int test_hash(struct crypto_ahash *tfm,
544 		     const struct hash_testvec *template,
545 		     unsigned int tcount, bool use_digest)
546 {
547 	unsigned int alignmask;
548 	int ret;
549 
550 	ret = __test_hash(tfm, template, tcount, use_digest, 0);
551 	if (ret)
552 		return ret;
553 
554 	/* test unaligned buffers, check with one byte offset */
555 	ret = __test_hash(tfm, template, tcount, use_digest, 1);
556 	if (ret)
557 		return ret;
558 
559 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
560 	if (alignmask) {
561 		/* Check if alignment mask for tfm is correctly set. */
562 		ret = __test_hash(tfm, template, tcount, use_digest,
563 				  alignmask + 1);
564 		if (ret)
565 			return ret;
566 	}
567 
568 	return 0;
569 }
570 
571 static int __test_aead(struct crypto_aead *tfm, int enc,
572 		       const struct aead_testvec *template, unsigned int tcount,
573 		       const bool diff_dst, const int align_offset)
574 {
575 	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
576 	unsigned int i, j, k, n, temp;
577 	int ret = -ENOMEM;
578 	char *q;
579 	char *key;
580 	struct aead_request *req;
581 	struct scatterlist *sg;
582 	struct scatterlist *sgout;
583 	const char *e, *d;
584 	struct crypto_wait wait;
585 	unsigned int authsize, iv_len;
586 	void *input;
587 	void *output;
588 	void *assoc;
589 	char *iv;
590 	char *xbuf[XBUFSIZE];
591 	char *xoutbuf[XBUFSIZE];
592 	char *axbuf[XBUFSIZE];
593 
594 	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
595 	if (!iv)
596 		return ret;
597 	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
598 	if (!key)
599 		goto out_noxbuf;
600 	if (testmgr_alloc_buf(xbuf))
601 		goto out_noxbuf;
602 	if (testmgr_alloc_buf(axbuf))
603 		goto out_noaxbuf;
604 	if (diff_dst && testmgr_alloc_buf(xoutbuf))
605 		goto out_nooutbuf;
606 
607 	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
608 	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
609 	if (!sg)
610 		goto out_nosg;
611 	sgout = &sg[16];
612 
613 	if (diff_dst)
614 		d = "-ddst";
615 	else
616 		d = "";
617 
618 	if (enc == ENCRYPT)
619 		e = "encryption";
620 	else
621 		e = "decryption";
622 
623 	crypto_init_wait(&wait);
624 
625 	req = aead_request_alloc(tfm, GFP_KERNEL);
626 	if (!req) {
627 		pr_err("alg: aead%s: Failed to allocate request for %s\n",
628 		       d, algo);
629 		goto out;
630 	}
631 
632 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
633 				  crypto_req_done, &wait);
634 
635 	iv_len = crypto_aead_ivsize(tfm);
636 
637 	for (i = 0, j = 0; i < tcount; i++) {
638 		if (template[i].np)
639 			continue;
640 
641 		j++;
642 
643 		/* some templates have no input data but they will
644 		 * touch input
645 		 */
646 		input = xbuf[0];
647 		input += align_offset;
648 		assoc = axbuf[0];
649 
650 		ret = -EINVAL;
651 		if (WARN_ON(align_offset + template[i].ilen >
652 			    PAGE_SIZE || template[i].alen > PAGE_SIZE))
653 			goto out;
654 
655 		memcpy(input, template[i].input, template[i].ilen);
656 		memcpy(assoc, template[i].assoc, template[i].alen);
657 		if (template[i].iv)
658 			memcpy(iv, template[i].iv, iv_len);
659 		else
660 			memset(iv, 0, iv_len);
661 
662 		crypto_aead_clear_flags(tfm, ~0);
663 		if (template[i].wk)
664 			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
665 
666 		if (template[i].klen > MAX_KEYLEN) {
667 			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
668 			       d, j, algo, template[i].klen,
669 			       MAX_KEYLEN);
670 			ret = -EINVAL;
671 			goto out;
672 		}
673 		memcpy(key, template[i].key, template[i].klen);
674 
675 		ret = crypto_aead_setkey(tfm, key, template[i].klen);
676 		if (template[i].fail == !ret) {
677 			pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
678 			       d, j, algo, crypto_aead_get_flags(tfm));
679 			goto out;
680 		} else if (ret)
681 			continue;
682 
683 		authsize = abs(template[i].rlen - template[i].ilen);
684 		ret = crypto_aead_setauthsize(tfm, authsize);
685 		if (ret) {
686 			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
687 			       d, authsize, j, algo);
688 			goto out;
689 		}
690 
691 		k = !!template[i].alen;
692 		sg_init_table(sg, k + 1);
693 		sg_set_buf(&sg[0], assoc, template[i].alen);
694 		sg_set_buf(&sg[k], input,
695 			   template[i].ilen + (enc ? authsize : 0));
696 		output = input;
697 
698 		if (diff_dst) {
699 			sg_init_table(sgout, k + 1);
700 			sg_set_buf(&sgout[0], assoc, template[i].alen);
701 
702 			output = xoutbuf[0];
703 			output += align_offset;
704 			sg_set_buf(&sgout[k], output,
705 				   template[i].rlen + (enc ? 0 : authsize));
706 		}
707 
708 		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
709 				       template[i].ilen, iv);
710 
711 		aead_request_set_ad(req, template[i].alen);
712 
713 		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
714 				      : crypto_aead_decrypt(req), &wait);
715 
716 		switch (ret) {
717 		case 0:
718 			if (template[i].novrfy) {
719 				/* verification was supposed to fail */
720 				pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
721 				       d, e, j, algo);
722 				/* so really, we got a bad message */
723 				ret = -EBADMSG;
724 				goto out;
725 			}
726 			break;
727 		case -EBADMSG:
728 			if (template[i].novrfy)
729 				/* verification failure was expected */
730 				continue;
731 			/* fall through */
732 		default:
733 			pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
734 			       d, e, j, algo, -ret);
735 			goto out;
736 		}
737 
738 		q = output;
739 		if (memcmp(q, template[i].result, template[i].rlen)) {
740 			pr_err("alg: aead%s: Test %d failed on %s for %s\n",
741 			       d, j, e, algo);
742 			hexdump(q, template[i].rlen);
743 			ret = -EINVAL;
744 			goto out;
745 		}
746 	}
747 
748 	for (i = 0, j = 0; i < tcount; i++) {
749 		/* alignment tests are only done with continuous buffers */
750 		if (align_offset != 0)
751 			break;
752 
753 		if (!template[i].np)
754 			continue;
755 
756 		j++;
757 
758 		if (template[i].iv)
759 			memcpy(iv, template[i].iv, iv_len);
760 		else
761 			memset(iv, 0, MAX_IVLEN);
762 
763 		crypto_aead_clear_flags(tfm, ~0);
764 		if (template[i].wk)
765 			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
766 		if (template[i].klen > MAX_KEYLEN) {
767 			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
768 			       d, j, algo, template[i].klen, MAX_KEYLEN);
769 			ret = -EINVAL;
770 			goto out;
771 		}
772 		memcpy(key, template[i].key, template[i].klen);
773 
774 		ret = crypto_aead_setkey(tfm, key, template[i].klen);
775 		if (template[i].fail == !ret) {
776 			pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
777 			       d, j, algo, crypto_aead_get_flags(tfm));
778 			goto out;
779 		} else if (ret)
780 			continue;
781 
782 		authsize = abs(template[i].rlen - template[i].ilen);
783 
784 		ret = -EINVAL;
785 		sg_init_table(sg, template[i].anp + template[i].np);
786 		if (diff_dst)
787 			sg_init_table(sgout, template[i].anp + template[i].np);
788 
789 		ret = -EINVAL;
790 		for (k = 0, temp = 0; k < template[i].anp; k++) {
791 			if (WARN_ON(offset_in_page(IDX[k]) +
792 				    template[i].atap[k] > PAGE_SIZE))
793 				goto out;
794 			sg_set_buf(&sg[k],
795 				   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
796 					  offset_in_page(IDX[k]),
797 					  template[i].assoc + temp,
798 					  template[i].atap[k]),
799 				   template[i].atap[k]);
800 			if (diff_dst)
801 				sg_set_buf(&sgout[k],
802 					   axbuf[IDX[k] >> PAGE_SHIFT] +
803 					   offset_in_page(IDX[k]),
804 					   template[i].atap[k]);
805 			temp += template[i].atap[k];
806 		}
807 
808 		for (k = 0, temp = 0; k < template[i].np; k++) {
809 			if (WARN_ON(offset_in_page(IDX[k]) +
810 				    template[i].tap[k] > PAGE_SIZE))
811 				goto out;
812 
813 			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
814 			memcpy(q, template[i].input + temp, template[i].tap[k]);
815 			sg_set_buf(&sg[template[i].anp + k],
816 				   q, template[i].tap[k]);
817 
818 			if (diff_dst) {
819 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
820 				    offset_in_page(IDX[k]);
821 
822 				memset(q, 0, template[i].tap[k]);
823 
824 				sg_set_buf(&sgout[template[i].anp + k],
825 					   q, template[i].tap[k]);
826 			}
827 
828 			n = template[i].tap[k];
829 			if (k == template[i].np - 1 && enc)
830 				n += authsize;
831 			if (offset_in_page(q) + n < PAGE_SIZE)
832 				q[n] = 0;
833 
834 			temp += template[i].tap[k];
835 		}
836 
837 		ret = crypto_aead_setauthsize(tfm, authsize);
838 		if (ret) {
839 			pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
840 			       d, authsize, j, algo);
841 			goto out;
842 		}
843 
844 		if (enc) {
845 			if (WARN_ON(sg[template[i].anp + k - 1].offset +
846 				    sg[template[i].anp + k - 1].length +
847 				    authsize > PAGE_SIZE)) {
848 				ret = -EINVAL;
849 				goto out;
850 			}
851 
852 			if (diff_dst)
853 				sgout[template[i].anp + k - 1].length +=
854 					authsize;
855 			sg[template[i].anp + k - 1].length += authsize;
856 		}
857 
858 		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
859 				       template[i].ilen,
860 				       iv);
861 
862 		aead_request_set_ad(req, template[i].alen);
863 
864 		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
865 				      : crypto_aead_decrypt(req), &wait);
866 
867 		switch (ret) {
868 		case 0:
869 			if (template[i].novrfy) {
870 				/* verification was supposed to fail */
871 				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
872 				       d, e, j, algo);
873 				/* so really, we got a bad message */
874 				ret = -EBADMSG;
875 				goto out;
876 			}
877 			break;
878 		case -EBADMSG:
879 			if (template[i].novrfy)
880 				/* verification failure was expected */
881 				continue;
882 			/* fall through */
883 		default:
884 			pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
885 			       d, e, j, algo, -ret);
886 			goto out;
887 		}
888 
889 		ret = -EINVAL;
890 		for (k = 0, temp = 0; k < template[i].np; k++) {
891 			if (diff_dst)
892 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
893 				    offset_in_page(IDX[k]);
894 			else
895 				q = xbuf[IDX[k] >> PAGE_SHIFT] +
896 				    offset_in_page(IDX[k]);
897 
898 			n = template[i].tap[k];
899 			if (k == template[i].np - 1)
900 				n += enc ? authsize : -authsize;
901 
902 			if (memcmp(q, template[i].result + temp, n)) {
903 				pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
904 				       d, j, e, k, algo);
905 				hexdump(q, n);
906 				goto out;
907 			}
908 
909 			q += n;
910 			if (k == template[i].np - 1 && !enc) {
911 				if (!diff_dst &&
912 					memcmp(q, template[i].input +
913 					      temp + n, authsize))
914 					n = authsize;
915 				else
916 					n = 0;
917 			} else {
918 				for (n = 0; offset_in_page(q + n) && q[n]; n++)
919 					;
920 			}
921 			if (n) {
922 				pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
923 				       d, j, e, k, algo, n);
924 				hexdump(q, n);
925 				goto out;
926 			}
927 
928 			temp += template[i].tap[k];
929 		}
930 	}
931 
932 	ret = 0;
933 
934 out:
935 	aead_request_free(req);
936 	kfree(sg);
937 out_nosg:
938 	if (diff_dst)
939 		testmgr_free_buf(xoutbuf);
940 out_nooutbuf:
941 	testmgr_free_buf(axbuf);
942 out_noaxbuf:
943 	testmgr_free_buf(xbuf);
944 out_noxbuf:
945 	kfree(key);
946 	kfree(iv);
947 	return ret;
948 }
949 
950 static int test_aead(struct crypto_aead *tfm, int enc,
951 		     const struct aead_testvec *template, unsigned int tcount)
952 {
953 	unsigned int alignmask;
954 	int ret;
955 
956 	/* test 'dst == src' case */
957 	ret = __test_aead(tfm, enc, template, tcount, false, 0);
958 	if (ret)
959 		return ret;
960 
961 	/* test 'dst != src' case */
962 	ret = __test_aead(tfm, enc, template, tcount, true, 0);
963 	if (ret)
964 		return ret;
965 
966 	/* test unaligned buffers, check with one byte offset */
967 	ret = __test_aead(tfm, enc, template, tcount, true, 1);
968 	if (ret)
969 		return ret;
970 
971 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
972 	if (alignmask) {
973 		/* Check if alignment mask for tfm is correctly set. */
974 		ret = __test_aead(tfm, enc, template, tcount, true,
975 				  alignmask + 1);
976 		if (ret)
977 			return ret;
978 	}
979 
980 	return 0;
981 }
982 
983 static int test_cipher(struct crypto_cipher *tfm, int enc,
984 		       const struct cipher_testvec *template,
985 		       unsigned int tcount)
986 {
987 	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
988 	unsigned int i, j, k;
989 	char *q;
990 	const char *e;
991 	void *data;
992 	char *xbuf[XBUFSIZE];
993 	int ret = -ENOMEM;
994 
995 	if (testmgr_alloc_buf(xbuf))
996 		goto out_nobuf;
997 
998 	if (enc == ENCRYPT)
999 	        e = "encryption";
1000 	else
1001 		e = "decryption";
1002 
1003 	j = 0;
1004 	for (i = 0; i < tcount; i++) {
1005 		if (template[i].np)
1006 			continue;
1007 
1008 		if (fips_enabled && template[i].fips_skip)
1009 			continue;
1010 
1011 		j++;
1012 
1013 		ret = -EINVAL;
1014 		if (WARN_ON(template[i].ilen > PAGE_SIZE))
1015 			goto out;
1016 
1017 		data = xbuf[0];
1018 		memcpy(data, template[i].input, template[i].ilen);
1019 
1020 		crypto_cipher_clear_flags(tfm, ~0);
1021 		if (template[i].wk)
1022 			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1023 
1024 		ret = crypto_cipher_setkey(tfm, template[i].key,
1025 					   template[i].klen);
1026 		if (template[i].fail == !ret) {
1027 			printk(KERN_ERR "alg: cipher: setkey failed "
1028 			       "on test %d for %s: flags=%x\n", j,
1029 			       algo, crypto_cipher_get_flags(tfm));
1030 			goto out;
1031 		} else if (ret)
1032 			continue;
1033 
1034 		for (k = 0; k < template[i].ilen;
1035 		     k += crypto_cipher_blocksize(tfm)) {
1036 			if (enc)
1037 				crypto_cipher_encrypt_one(tfm, data + k,
1038 							  data + k);
1039 			else
1040 				crypto_cipher_decrypt_one(tfm, data + k,
1041 							  data + k);
1042 		}
1043 
1044 		q = data;
1045 		if (memcmp(q, template[i].result, template[i].rlen)) {
1046 			printk(KERN_ERR "alg: cipher: Test %d failed "
1047 			       "on %s for %s\n", j, e, algo);
1048 			hexdump(q, template[i].rlen);
1049 			ret = -EINVAL;
1050 			goto out;
1051 		}
1052 	}
1053 
1054 	ret = 0;
1055 
1056 out:
1057 	testmgr_free_buf(xbuf);
1058 out_nobuf:
1059 	return ret;
1060 }
1061 
1062 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1063 			   const struct cipher_testvec *template,
1064 			   unsigned int tcount,
1065 			   const bool diff_dst, const int align_offset)
1066 {
1067 	const char *algo =
1068 		crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1069 	unsigned int i, j, k, n, temp;
1070 	char *q;
1071 	struct skcipher_request *req;
1072 	struct scatterlist sg[8];
1073 	struct scatterlist sgout[8];
1074 	const char *e, *d;
1075 	struct crypto_wait wait;
1076 	void *data;
1077 	char iv[MAX_IVLEN];
1078 	char *xbuf[XBUFSIZE];
1079 	char *xoutbuf[XBUFSIZE];
1080 	int ret = -ENOMEM;
1081 	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1082 
1083 	if (testmgr_alloc_buf(xbuf))
1084 		goto out_nobuf;
1085 
1086 	if (diff_dst && testmgr_alloc_buf(xoutbuf))
1087 		goto out_nooutbuf;
1088 
1089 	if (diff_dst)
1090 		d = "-ddst";
1091 	else
1092 		d = "";
1093 
1094 	if (enc == ENCRYPT)
1095 	        e = "encryption";
1096 	else
1097 		e = "decryption";
1098 
1099 	crypto_init_wait(&wait);
1100 
1101 	req = skcipher_request_alloc(tfm, GFP_KERNEL);
1102 	if (!req) {
1103 		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1104 		       d, algo);
1105 		goto out;
1106 	}
1107 
1108 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1109 				      crypto_req_done, &wait);
1110 
1111 	j = 0;
1112 	for (i = 0; i < tcount; i++) {
1113 		if (template[i].np && !template[i].also_non_np)
1114 			continue;
1115 
1116 		if (fips_enabled && template[i].fips_skip)
1117 			continue;
1118 
1119 		if (template[i].iv)
1120 			memcpy(iv, template[i].iv, ivsize);
1121 		else
1122 			memset(iv, 0, MAX_IVLEN);
1123 
1124 		j++;
1125 		ret = -EINVAL;
1126 		if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1127 			goto out;
1128 
1129 		data = xbuf[0];
1130 		data += align_offset;
1131 		memcpy(data, template[i].input, template[i].ilen);
1132 
1133 		crypto_skcipher_clear_flags(tfm, ~0);
1134 		if (template[i].wk)
1135 			crypto_skcipher_set_flags(tfm,
1136 						  CRYPTO_TFM_REQ_WEAK_KEY);
1137 
1138 		ret = crypto_skcipher_setkey(tfm, template[i].key,
1139 					     template[i].klen);
1140 		if (template[i].fail == !ret) {
1141 			pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1142 			       d, j, algo, crypto_skcipher_get_flags(tfm));
1143 			goto out;
1144 		} else if (ret)
1145 			continue;
1146 
1147 		sg_init_one(&sg[0], data, template[i].ilen);
1148 		if (diff_dst) {
1149 			data = xoutbuf[0];
1150 			data += align_offset;
1151 			sg_init_one(&sgout[0], data, template[i].ilen);
1152 		}
1153 
1154 		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1155 					   template[i].ilen, iv);
1156 		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1157 				      crypto_skcipher_decrypt(req), &wait);
1158 
1159 		if (ret) {
1160 			pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1161 			       d, e, j, algo, -ret);
1162 			goto out;
1163 		}
1164 
1165 		q = data;
1166 		if (memcmp(q, template[i].result, template[i].rlen)) {
1167 			pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1168 			       d, j, e, algo);
1169 			hexdump(q, template[i].rlen);
1170 			ret = -EINVAL;
1171 			goto out;
1172 		}
1173 
1174 		if (template[i].iv_out &&
1175 		    memcmp(iv, template[i].iv_out,
1176 			   crypto_skcipher_ivsize(tfm))) {
1177 			pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1178 			       d, j, e, algo);
1179 			hexdump(iv, crypto_skcipher_ivsize(tfm));
1180 			ret = -EINVAL;
1181 			goto out;
1182 		}
1183 	}
1184 
1185 	j = 0;
1186 	for (i = 0; i < tcount; i++) {
1187 		/* alignment tests are only done with continuous buffers */
1188 		if (align_offset != 0)
1189 			break;
1190 
1191 		if (!template[i].np)
1192 			continue;
1193 
1194 		if (fips_enabled && template[i].fips_skip)
1195 			continue;
1196 
1197 		if (template[i].iv)
1198 			memcpy(iv, template[i].iv, ivsize);
1199 		else
1200 			memset(iv, 0, MAX_IVLEN);
1201 
1202 		j++;
1203 		crypto_skcipher_clear_flags(tfm, ~0);
1204 		if (template[i].wk)
1205 			crypto_skcipher_set_flags(tfm,
1206 						  CRYPTO_TFM_REQ_WEAK_KEY);
1207 
1208 		ret = crypto_skcipher_setkey(tfm, template[i].key,
1209 					     template[i].klen);
1210 		if (template[i].fail == !ret) {
1211 			pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1212 			       d, j, algo, crypto_skcipher_get_flags(tfm));
1213 			goto out;
1214 		} else if (ret)
1215 			continue;
1216 
1217 		temp = 0;
1218 		ret = -EINVAL;
1219 		sg_init_table(sg, template[i].np);
1220 		if (diff_dst)
1221 			sg_init_table(sgout, template[i].np);
1222 		for (k = 0; k < template[i].np; k++) {
1223 			if (WARN_ON(offset_in_page(IDX[k]) +
1224 				    template[i].tap[k] > PAGE_SIZE))
1225 				goto out;
1226 
1227 			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1228 
1229 			memcpy(q, template[i].input + temp, template[i].tap[k]);
1230 
1231 			if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1232 				q[template[i].tap[k]] = 0;
1233 
1234 			sg_set_buf(&sg[k], q, template[i].tap[k]);
1235 			if (diff_dst) {
1236 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1237 				    offset_in_page(IDX[k]);
1238 
1239 				sg_set_buf(&sgout[k], q, template[i].tap[k]);
1240 
1241 				memset(q, 0, template[i].tap[k]);
1242 				if (offset_in_page(q) +
1243 				    template[i].tap[k] < PAGE_SIZE)
1244 					q[template[i].tap[k]] = 0;
1245 			}
1246 
1247 			temp += template[i].tap[k];
1248 		}
1249 
1250 		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1251 					   template[i].ilen, iv);
1252 
1253 		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1254 				      crypto_skcipher_decrypt(req), &wait);
1255 
1256 		if (ret) {
1257 			pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1258 			       d, e, j, algo, -ret);
1259 			goto out;
1260 		}
1261 
1262 		temp = 0;
1263 		ret = -EINVAL;
1264 		for (k = 0; k < template[i].np; k++) {
1265 			if (diff_dst)
1266 				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1267 				    offset_in_page(IDX[k]);
1268 			else
1269 				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1270 				    offset_in_page(IDX[k]);
1271 
1272 			if (memcmp(q, template[i].result + temp,
1273 				   template[i].tap[k])) {
1274 				pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1275 				       d, j, e, k, algo);
1276 				hexdump(q, template[i].tap[k]);
1277 				goto out;
1278 			}
1279 
1280 			q += template[i].tap[k];
1281 			for (n = 0; offset_in_page(q + n) && q[n]; n++)
1282 				;
1283 			if (n) {
1284 				pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1285 				       d, j, e, k, algo, n);
1286 				hexdump(q, n);
1287 				goto out;
1288 			}
1289 			temp += template[i].tap[k];
1290 		}
1291 	}
1292 
1293 	ret = 0;
1294 
1295 out:
1296 	skcipher_request_free(req);
1297 	if (diff_dst)
1298 		testmgr_free_buf(xoutbuf);
1299 out_nooutbuf:
1300 	testmgr_free_buf(xbuf);
1301 out_nobuf:
1302 	return ret;
1303 }
1304 
1305 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1306 			 const struct cipher_testvec *template,
1307 			 unsigned int tcount)
1308 {
1309 	unsigned int alignmask;
1310 	int ret;
1311 
1312 	/* test 'dst == src' case */
1313 	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1314 	if (ret)
1315 		return ret;
1316 
1317 	/* test 'dst != src' case */
1318 	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1319 	if (ret)
1320 		return ret;
1321 
1322 	/* test unaligned buffers, check with one byte offset */
1323 	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1324 	if (ret)
1325 		return ret;
1326 
1327 	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1328 	if (alignmask) {
1329 		/* Check if alignment mask for tfm is correctly set. */
1330 		ret = __test_skcipher(tfm, enc, template, tcount, true,
1331 				      alignmask + 1);
1332 		if (ret)
1333 			return ret;
1334 	}
1335 
1336 	return 0;
1337 }
1338 
1339 static int test_comp(struct crypto_comp *tfm,
1340 		     const struct comp_testvec *ctemplate,
1341 		     const struct comp_testvec *dtemplate,
1342 		     int ctcount, int dtcount)
1343 {
1344 	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1345 	unsigned int i;
1346 	char result[COMP_BUF_SIZE];
1347 	int ret;
1348 
1349 	for (i = 0; i < ctcount; i++) {
1350 		int ilen;
1351 		unsigned int dlen = COMP_BUF_SIZE;
1352 
1353 		memset(result, 0, sizeof (result));
1354 
1355 		ilen = ctemplate[i].inlen;
1356 		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1357 		                           ilen, result, &dlen);
1358 		if (ret) {
1359 			printk(KERN_ERR "alg: comp: compression failed "
1360 			       "on test %d for %s: ret=%d\n", i + 1, algo,
1361 			       -ret);
1362 			goto out;
1363 		}
1364 
1365 		if (dlen != ctemplate[i].outlen) {
1366 			printk(KERN_ERR "alg: comp: Compression test %d "
1367 			       "failed for %s: output len = %d\n", i + 1, algo,
1368 			       dlen);
1369 			ret = -EINVAL;
1370 			goto out;
1371 		}
1372 
1373 		if (memcmp(result, ctemplate[i].output, dlen)) {
1374 			printk(KERN_ERR "alg: comp: Compression test %d "
1375 			       "failed for %s\n", i + 1, algo);
1376 			hexdump(result, dlen);
1377 			ret = -EINVAL;
1378 			goto out;
1379 		}
1380 	}
1381 
1382 	for (i = 0; i < dtcount; i++) {
1383 		int ilen;
1384 		unsigned int dlen = COMP_BUF_SIZE;
1385 
1386 		memset(result, 0, sizeof (result));
1387 
1388 		ilen = dtemplate[i].inlen;
1389 		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1390 		                             ilen, result, &dlen);
1391 		if (ret) {
1392 			printk(KERN_ERR "alg: comp: decompression failed "
1393 			       "on test %d for %s: ret=%d\n", i + 1, algo,
1394 			       -ret);
1395 			goto out;
1396 		}
1397 
1398 		if (dlen != dtemplate[i].outlen) {
1399 			printk(KERN_ERR "alg: comp: Decompression test %d "
1400 			       "failed for %s: output len = %d\n", i + 1, algo,
1401 			       dlen);
1402 			ret = -EINVAL;
1403 			goto out;
1404 		}
1405 
1406 		if (memcmp(result, dtemplate[i].output, dlen)) {
1407 			printk(KERN_ERR "alg: comp: Decompression test %d "
1408 			       "failed for %s\n", i + 1, algo);
1409 			hexdump(result, dlen);
1410 			ret = -EINVAL;
1411 			goto out;
1412 		}
1413 	}
1414 
1415 	ret = 0;
1416 
1417 out:
1418 	return ret;
1419 }
1420 
1421 static int test_acomp(struct crypto_acomp *tfm,
1422 		      const struct comp_testvec *ctemplate,
1423 		      const struct comp_testvec *dtemplate,
1424 		      int ctcount, int dtcount)
1425 {
1426 	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1427 	unsigned int i;
1428 	char *output, *decomp_out;
1429 	int ret;
1430 	struct scatterlist src, dst;
1431 	struct acomp_req *req;
1432 	struct crypto_wait wait;
1433 
1434 	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1435 	if (!output)
1436 		return -ENOMEM;
1437 
1438 	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1439 	if (!decomp_out) {
1440 		kfree(output);
1441 		return -ENOMEM;
1442 	}
1443 
1444 	for (i = 0; i < ctcount; i++) {
1445 		unsigned int dlen = COMP_BUF_SIZE;
1446 		int ilen = ctemplate[i].inlen;
1447 		void *input_vec;
1448 
1449 		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1450 		if (!input_vec) {
1451 			ret = -ENOMEM;
1452 			goto out;
1453 		}
1454 
1455 		memset(output, 0, dlen);
1456 		crypto_init_wait(&wait);
1457 		sg_init_one(&src, input_vec, ilen);
1458 		sg_init_one(&dst, output, dlen);
1459 
1460 		req = acomp_request_alloc(tfm);
1461 		if (!req) {
1462 			pr_err("alg: acomp: request alloc failed for %s\n",
1463 			       algo);
1464 			kfree(input_vec);
1465 			ret = -ENOMEM;
1466 			goto out;
1467 		}
1468 
1469 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1470 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1471 					   crypto_req_done, &wait);
1472 
1473 		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1474 		if (ret) {
1475 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1476 			       i + 1, algo, -ret);
1477 			kfree(input_vec);
1478 			acomp_request_free(req);
1479 			goto out;
1480 		}
1481 
1482 		ilen = req->dlen;
1483 		dlen = COMP_BUF_SIZE;
1484 		sg_init_one(&src, output, ilen);
1485 		sg_init_one(&dst, decomp_out, dlen);
1486 		crypto_init_wait(&wait);
1487 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1488 
1489 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1490 		if (ret) {
1491 			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1492 			       i + 1, algo, -ret);
1493 			kfree(input_vec);
1494 			acomp_request_free(req);
1495 			goto out;
1496 		}
1497 
1498 		if (req->dlen != ctemplate[i].inlen) {
1499 			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1500 			       i + 1, algo, req->dlen);
1501 			ret = -EINVAL;
1502 			kfree(input_vec);
1503 			acomp_request_free(req);
1504 			goto out;
1505 		}
1506 
1507 		if (memcmp(input_vec, decomp_out, req->dlen)) {
1508 			pr_err("alg: acomp: Compression test %d failed for %s\n",
1509 			       i + 1, algo);
1510 			hexdump(output, req->dlen);
1511 			ret = -EINVAL;
1512 			kfree(input_vec);
1513 			acomp_request_free(req);
1514 			goto out;
1515 		}
1516 
1517 		kfree(input_vec);
1518 		acomp_request_free(req);
1519 	}
1520 
1521 	for (i = 0; i < dtcount; i++) {
1522 		unsigned int dlen = COMP_BUF_SIZE;
1523 		int ilen = dtemplate[i].inlen;
1524 		void *input_vec;
1525 
1526 		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1527 		if (!input_vec) {
1528 			ret = -ENOMEM;
1529 			goto out;
1530 		}
1531 
1532 		memset(output, 0, dlen);
1533 		crypto_init_wait(&wait);
1534 		sg_init_one(&src, input_vec, ilen);
1535 		sg_init_one(&dst, output, dlen);
1536 
1537 		req = acomp_request_alloc(tfm);
1538 		if (!req) {
1539 			pr_err("alg: acomp: request alloc failed for %s\n",
1540 			       algo);
1541 			kfree(input_vec);
1542 			ret = -ENOMEM;
1543 			goto out;
1544 		}
1545 
1546 		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1547 		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1548 					   crypto_req_done, &wait);
1549 
1550 		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1551 		if (ret) {
1552 			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1553 			       i + 1, algo, -ret);
1554 			kfree(input_vec);
1555 			acomp_request_free(req);
1556 			goto out;
1557 		}
1558 
1559 		if (req->dlen != dtemplate[i].outlen) {
1560 			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1561 			       i + 1, algo, req->dlen);
1562 			ret = -EINVAL;
1563 			kfree(input_vec);
1564 			acomp_request_free(req);
1565 			goto out;
1566 		}
1567 
1568 		if (memcmp(output, dtemplate[i].output, req->dlen)) {
1569 			pr_err("alg: acomp: Decompression test %d failed for %s\n",
1570 			       i + 1, algo);
1571 			hexdump(output, req->dlen);
1572 			ret = -EINVAL;
1573 			kfree(input_vec);
1574 			acomp_request_free(req);
1575 			goto out;
1576 		}
1577 
1578 		kfree(input_vec);
1579 		acomp_request_free(req);
1580 	}
1581 
1582 	ret = 0;
1583 
1584 out:
1585 	kfree(decomp_out);
1586 	kfree(output);
1587 	return ret;
1588 }
1589 
1590 static int test_cprng(struct crypto_rng *tfm,
1591 		      const struct cprng_testvec *template,
1592 		      unsigned int tcount)
1593 {
1594 	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1595 	int err = 0, i, j, seedsize;
1596 	u8 *seed;
1597 	char result[32];
1598 
1599 	seedsize = crypto_rng_seedsize(tfm);
1600 
1601 	seed = kmalloc(seedsize, GFP_KERNEL);
1602 	if (!seed) {
1603 		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1604 		       "for %s\n", algo);
1605 		return -ENOMEM;
1606 	}
1607 
1608 	for (i = 0; i < tcount; i++) {
1609 		memset(result, 0, 32);
1610 
1611 		memcpy(seed, template[i].v, template[i].vlen);
1612 		memcpy(seed + template[i].vlen, template[i].key,
1613 		       template[i].klen);
1614 		memcpy(seed + template[i].vlen + template[i].klen,
1615 		       template[i].dt, template[i].dtlen);
1616 
1617 		err = crypto_rng_reset(tfm, seed, seedsize);
1618 		if (err) {
1619 			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1620 			       "for %s\n", algo);
1621 			goto out;
1622 		}
1623 
1624 		for (j = 0; j < template[i].loops; j++) {
1625 			err = crypto_rng_get_bytes(tfm, result,
1626 						   template[i].rlen);
1627 			if (err < 0) {
1628 				printk(KERN_ERR "alg: cprng: Failed to obtain "
1629 				       "the correct amount of random data for "
1630 				       "%s (requested %d)\n", algo,
1631 				       template[i].rlen);
1632 				goto out;
1633 			}
1634 		}
1635 
1636 		err = memcmp(result, template[i].result,
1637 			     template[i].rlen);
1638 		if (err) {
1639 			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1640 			       i, algo);
1641 			hexdump(result, template[i].rlen);
1642 			err = -EINVAL;
1643 			goto out;
1644 		}
1645 	}
1646 
1647 out:
1648 	kfree(seed);
1649 	return err;
1650 }
1651 
1652 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1653 			 u32 type, u32 mask)
1654 {
1655 	struct crypto_aead *tfm;
1656 	int err = 0;
1657 
1658 	tfm = crypto_alloc_aead(driver, type, mask);
1659 	if (IS_ERR(tfm)) {
1660 		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1661 		       "%ld\n", driver, PTR_ERR(tfm));
1662 		return PTR_ERR(tfm);
1663 	}
1664 
1665 	if (desc->suite.aead.enc.vecs) {
1666 		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1667 				desc->suite.aead.enc.count);
1668 		if (err)
1669 			goto out;
1670 	}
1671 
1672 	if (!err && desc->suite.aead.dec.vecs)
1673 		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1674 				desc->suite.aead.dec.count);
1675 
1676 out:
1677 	crypto_free_aead(tfm);
1678 	return err;
1679 }
1680 
1681 static int alg_test_cipher(const struct alg_test_desc *desc,
1682 			   const char *driver, u32 type, u32 mask)
1683 {
1684 	struct crypto_cipher *tfm;
1685 	int err = 0;
1686 
1687 	tfm = crypto_alloc_cipher(driver, type, mask);
1688 	if (IS_ERR(tfm)) {
1689 		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1690 		       "%s: %ld\n", driver, PTR_ERR(tfm));
1691 		return PTR_ERR(tfm);
1692 	}
1693 
1694 	if (desc->suite.cipher.enc.vecs) {
1695 		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1696 				  desc->suite.cipher.enc.count);
1697 		if (err)
1698 			goto out;
1699 	}
1700 
1701 	if (desc->suite.cipher.dec.vecs)
1702 		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1703 				  desc->suite.cipher.dec.count);
1704 
1705 out:
1706 	crypto_free_cipher(tfm);
1707 	return err;
1708 }
1709 
1710 static int alg_test_skcipher(const struct alg_test_desc *desc,
1711 			     const char *driver, u32 type, u32 mask)
1712 {
1713 	struct crypto_skcipher *tfm;
1714 	int err = 0;
1715 
1716 	tfm = crypto_alloc_skcipher(driver, type, mask);
1717 	if (IS_ERR(tfm)) {
1718 		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1719 		       "%s: %ld\n", driver, PTR_ERR(tfm));
1720 		return PTR_ERR(tfm);
1721 	}
1722 
1723 	if (desc->suite.cipher.enc.vecs) {
1724 		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1725 				    desc->suite.cipher.enc.count);
1726 		if (err)
1727 			goto out;
1728 	}
1729 
1730 	if (desc->suite.cipher.dec.vecs)
1731 		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1732 				    desc->suite.cipher.dec.count);
1733 
1734 out:
1735 	crypto_free_skcipher(tfm);
1736 	return err;
1737 }
1738 
1739 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1740 			 u32 type, u32 mask)
1741 {
1742 	struct crypto_comp *comp;
1743 	struct crypto_acomp *acomp;
1744 	int err;
1745 	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1746 
1747 	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1748 		acomp = crypto_alloc_acomp(driver, type, mask);
1749 		if (IS_ERR(acomp)) {
1750 			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1751 			       driver, PTR_ERR(acomp));
1752 			return PTR_ERR(acomp);
1753 		}
1754 		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1755 				 desc->suite.comp.decomp.vecs,
1756 				 desc->suite.comp.comp.count,
1757 				 desc->suite.comp.decomp.count);
1758 		crypto_free_acomp(acomp);
1759 	} else {
1760 		comp = crypto_alloc_comp(driver, type, mask);
1761 		if (IS_ERR(comp)) {
1762 			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1763 			       driver, PTR_ERR(comp));
1764 			return PTR_ERR(comp);
1765 		}
1766 
1767 		err = test_comp(comp, desc->suite.comp.comp.vecs,
1768 				desc->suite.comp.decomp.vecs,
1769 				desc->suite.comp.comp.count,
1770 				desc->suite.comp.decomp.count);
1771 
1772 		crypto_free_comp(comp);
1773 	}
1774 	return err;
1775 }
1776 
1777 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1778 			 u32 type, u32 mask)
1779 {
1780 	struct crypto_ahash *tfm;
1781 	int err;
1782 
1783 	tfm = crypto_alloc_ahash(driver, type, mask);
1784 	if (IS_ERR(tfm)) {
1785 		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1786 		       "%ld\n", driver, PTR_ERR(tfm));
1787 		return PTR_ERR(tfm);
1788 	}
1789 
1790 	err = test_hash(tfm, desc->suite.hash.vecs,
1791 			desc->suite.hash.count, true);
1792 	if (!err)
1793 		err = test_hash(tfm, desc->suite.hash.vecs,
1794 				desc->suite.hash.count, false);
1795 
1796 	crypto_free_ahash(tfm);
1797 	return err;
1798 }
1799 
1800 static int alg_test_crc32c(const struct alg_test_desc *desc,
1801 			   const char *driver, u32 type, u32 mask)
1802 {
1803 	struct crypto_shash *tfm;
1804 	u32 val;
1805 	int err;
1806 
1807 	err = alg_test_hash(desc, driver, type, mask);
1808 	if (err)
1809 		goto out;
1810 
1811 	tfm = crypto_alloc_shash(driver, type, mask);
1812 	if (IS_ERR(tfm)) {
1813 		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1814 		       "%ld\n", driver, PTR_ERR(tfm));
1815 		err = PTR_ERR(tfm);
1816 		goto out;
1817 	}
1818 
1819 	do {
1820 		SHASH_DESC_ON_STACK(shash, tfm);
1821 		u32 *ctx = (u32 *)shash_desc_ctx(shash);
1822 
1823 		shash->tfm = tfm;
1824 		shash->flags = 0;
1825 
1826 		*ctx = le32_to_cpu(420553207);
1827 		err = crypto_shash_final(shash, (u8 *)&val);
1828 		if (err) {
1829 			printk(KERN_ERR "alg: crc32c: Operation failed for "
1830 			       "%s: %d\n", driver, err);
1831 			break;
1832 		}
1833 
1834 		if (val != ~420553207) {
1835 			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1836 			       "%d\n", driver, val);
1837 			err = -EINVAL;
1838 		}
1839 	} while (0);
1840 
1841 	crypto_free_shash(tfm);
1842 
1843 out:
1844 	return err;
1845 }
1846 
1847 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1848 			  u32 type, u32 mask)
1849 {
1850 	struct crypto_rng *rng;
1851 	int err;
1852 
1853 	rng = crypto_alloc_rng(driver, type, mask);
1854 	if (IS_ERR(rng)) {
1855 		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1856 		       "%ld\n", driver, PTR_ERR(rng));
1857 		return PTR_ERR(rng);
1858 	}
1859 
1860 	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1861 
1862 	crypto_free_rng(rng);
1863 
1864 	return err;
1865 }
1866 
1867 
1868 static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1869 			  const char *driver, u32 type, u32 mask)
1870 {
1871 	int ret = -EAGAIN;
1872 	struct crypto_rng *drng;
1873 	struct drbg_test_data test_data;
1874 	struct drbg_string addtl, pers, testentropy;
1875 	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1876 
1877 	if (!buf)
1878 		return -ENOMEM;
1879 
1880 	drng = crypto_alloc_rng(driver, type, mask);
1881 	if (IS_ERR(drng)) {
1882 		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1883 		       "%s\n", driver);
1884 		kzfree(buf);
1885 		return -ENOMEM;
1886 	}
1887 
1888 	test_data.testentropy = &testentropy;
1889 	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1890 	drbg_string_fill(&pers, test->pers, test->perslen);
1891 	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1892 	if (ret) {
1893 		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1894 		goto outbuf;
1895 	}
1896 
1897 	drbg_string_fill(&addtl, test->addtla, test->addtllen);
1898 	if (pr) {
1899 		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1900 		ret = crypto_drbg_get_bytes_addtl_test(drng,
1901 			buf, test->expectedlen, &addtl,	&test_data);
1902 	} else {
1903 		ret = crypto_drbg_get_bytes_addtl(drng,
1904 			buf, test->expectedlen, &addtl);
1905 	}
1906 	if (ret < 0) {
1907 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1908 		       "driver %s\n", driver);
1909 		goto outbuf;
1910 	}
1911 
1912 	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1913 	if (pr) {
1914 		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1915 		ret = crypto_drbg_get_bytes_addtl_test(drng,
1916 			buf, test->expectedlen, &addtl, &test_data);
1917 	} else {
1918 		ret = crypto_drbg_get_bytes_addtl(drng,
1919 			buf, test->expectedlen, &addtl);
1920 	}
1921 	if (ret < 0) {
1922 		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1923 		       "driver %s\n", driver);
1924 		goto outbuf;
1925 	}
1926 
1927 	ret = memcmp(test->expected, buf, test->expectedlen);
1928 
1929 outbuf:
1930 	crypto_free_rng(drng);
1931 	kzfree(buf);
1932 	return ret;
1933 }
1934 
1935 
1936 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1937 			 u32 type, u32 mask)
1938 {
1939 	int err = 0;
1940 	int pr = 0;
1941 	int i = 0;
1942 	const struct drbg_testvec *template = desc->suite.drbg.vecs;
1943 	unsigned int tcount = desc->suite.drbg.count;
1944 
1945 	if (0 == memcmp(driver, "drbg_pr_", 8))
1946 		pr = 1;
1947 
1948 	for (i = 0; i < tcount; i++) {
1949 		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1950 		if (err) {
1951 			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1952 			       i, driver);
1953 			err = -EINVAL;
1954 			break;
1955 		}
1956 	}
1957 	return err;
1958 
1959 }
1960 
1961 static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
1962 		       const char *alg)
1963 {
1964 	struct kpp_request *req;
1965 	void *input_buf = NULL;
1966 	void *output_buf = NULL;
1967 	void *a_public = NULL;
1968 	void *a_ss = NULL;
1969 	void *shared_secret = NULL;
1970 	struct crypto_wait wait;
1971 	unsigned int out_len_max;
1972 	int err = -ENOMEM;
1973 	struct scatterlist src, dst;
1974 
1975 	req = kpp_request_alloc(tfm, GFP_KERNEL);
1976 	if (!req)
1977 		return err;
1978 
1979 	crypto_init_wait(&wait);
1980 
1981 	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1982 	if (err < 0)
1983 		goto free_req;
1984 
1985 	out_len_max = crypto_kpp_maxsize(tfm);
1986 	output_buf = kzalloc(out_len_max, GFP_KERNEL);
1987 	if (!output_buf) {
1988 		err = -ENOMEM;
1989 		goto free_req;
1990 	}
1991 
1992 	/* Use appropriate parameter as base */
1993 	kpp_request_set_input(req, NULL, 0);
1994 	sg_init_one(&dst, output_buf, out_len_max);
1995 	kpp_request_set_output(req, &dst, out_len_max);
1996 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1997 				 crypto_req_done, &wait);
1998 
1999 	/* Compute party A's public key */
2000 	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2001 	if (err) {
2002 		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2003 		       alg, err);
2004 		goto free_output;
2005 	}
2006 
2007 	if (vec->genkey) {
2008 		/* Save party A's public key */
2009 		a_public = kzalloc(out_len_max, GFP_KERNEL);
2010 		if (!a_public) {
2011 			err = -ENOMEM;
2012 			goto free_output;
2013 		}
2014 		memcpy(a_public, sg_virt(req->dst), out_len_max);
2015 	} else {
2016 		/* Verify calculated public key */
2017 		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2018 			   vec->expected_a_public_size)) {
2019 			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2020 			       alg);
2021 			err = -EINVAL;
2022 			goto free_output;
2023 		}
2024 	}
2025 
2026 	/* Calculate shared secret key by using counter part (b) public key. */
2027 	input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2028 	if (!input_buf) {
2029 		err = -ENOMEM;
2030 		goto free_output;
2031 	}
2032 
2033 	memcpy(input_buf, vec->b_public, vec->b_public_size);
2034 	sg_init_one(&src, input_buf, vec->b_public_size);
2035 	sg_init_one(&dst, output_buf, out_len_max);
2036 	kpp_request_set_input(req, &src, vec->b_public_size);
2037 	kpp_request_set_output(req, &dst, out_len_max);
2038 	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2039 				 crypto_req_done, &wait);
2040 	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2041 	if (err) {
2042 		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2043 		       alg, err);
2044 		goto free_all;
2045 	}
2046 
2047 	if (vec->genkey) {
2048 		/* Save the shared secret obtained by party A */
2049 		a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2050 		if (!a_ss) {
2051 			err = -ENOMEM;
2052 			goto free_all;
2053 		}
2054 		memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2055 
2056 		/*
2057 		 * Calculate party B's shared secret by using party A's
2058 		 * public key.
2059 		 */
2060 		err = crypto_kpp_set_secret(tfm, vec->b_secret,
2061 					    vec->b_secret_size);
2062 		if (err < 0)
2063 			goto free_all;
2064 
2065 		sg_init_one(&src, a_public, vec->expected_a_public_size);
2066 		sg_init_one(&dst, output_buf, out_len_max);
2067 		kpp_request_set_input(req, &src, vec->expected_a_public_size);
2068 		kpp_request_set_output(req, &dst, out_len_max);
2069 		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2070 					 crypto_req_done, &wait);
2071 		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2072 				      &wait);
2073 		if (err) {
2074 			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2075 			       alg, err);
2076 			goto free_all;
2077 		}
2078 
2079 		shared_secret = a_ss;
2080 	} else {
2081 		shared_secret = (void *)vec->expected_ss;
2082 	}
2083 
2084 	/*
2085 	 * verify shared secret from which the user will derive
2086 	 * secret key by executing whatever hash it has chosen
2087 	 */
2088 	if (memcmp(shared_secret, sg_virt(req->dst),
2089 		   vec->expected_ss_size)) {
2090 		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2091 		       alg);
2092 		err = -EINVAL;
2093 	}
2094 
2095 free_all:
2096 	kfree(a_ss);
2097 	kfree(input_buf);
2098 free_output:
2099 	kfree(a_public);
2100 	kfree(output_buf);
2101 free_req:
2102 	kpp_request_free(req);
2103 	return err;
2104 }
2105 
2106 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2107 		    const struct kpp_testvec *vecs, unsigned int tcount)
2108 {
2109 	int ret, i;
2110 
2111 	for (i = 0; i < tcount; i++) {
2112 		ret = do_test_kpp(tfm, vecs++, alg);
2113 		if (ret) {
2114 			pr_err("alg: %s: test failed on vector %d, err=%d\n",
2115 			       alg, i + 1, ret);
2116 			return ret;
2117 		}
2118 	}
2119 	return 0;
2120 }
2121 
2122 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2123 			u32 type, u32 mask)
2124 {
2125 	struct crypto_kpp *tfm;
2126 	int err = 0;
2127 
2128 	tfm = crypto_alloc_kpp(driver, type, mask);
2129 	if (IS_ERR(tfm)) {
2130 		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2131 		       driver, PTR_ERR(tfm));
2132 		return PTR_ERR(tfm);
2133 	}
2134 	if (desc->suite.kpp.vecs)
2135 		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2136 			       desc->suite.kpp.count);
2137 
2138 	crypto_free_kpp(tfm);
2139 	return err;
2140 }
2141 
2142 static int test_akcipher_one(struct crypto_akcipher *tfm,
2143 			     const struct akcipher_testvec *vecs)
2144 {
2145 	char *xbuf[XBUFSIZE];
2146 	struct akcipher_request *req;
2147 	void *outbuf_enc = NULL;
2148 	void *outbuf_dec = NULL;
2149 	struct crypto_wait wait;
2150 	unsigned int out_len_max, out_len = 0;
2151 	int err = -ENOMEM;
2152 	struct scatterlist src, dst, src_tab[2];
2153 
2154 	if (testmgr_alloc_buf(xbuf))
2155 		return err;
2156 
2157 	req = akcipher_request_alloc(tfm, GFP_KERNEL);
2158 	if (!req)
2159 		goto free_xbuf;
2160 
2161 	crypto_init_wait(&wait);
2162 
2163 	if (vecs->public_key_vec)
2164 		err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2165 						  vecs->key_len);
2166 	else
2167 		err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2168 						   vecs->key_len);
2169 	if (err)
2170 		goto free_req;
2171 
2172 	err = -ENOMEM;
2173 	out_len_max = crypto_akcipher_maxsize(tfm);
2174 	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2175 	if (!outbuf_enc)
2176 		goto free_req;
2177 
2178 	if (WARN_ON(vecs->m_size > PAGE_SIZE))
2179 		goto free_all;
2180 
2181 	memcpy(xbuf[0], vecs->m, vecs->m_size);
2182 
2183 	sg_init_table(src_tab, 2);
2184 	sg_set_buf(&src_tab[0], xbuf[0], 8);
2185 	sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2186 	sg_init_one(&dst, outbuf_enc, out_len_max);
2187 	akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2188 				   out_len_max);
2189 	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2190 				      crypto_req_done, &wait);
2191 
2192 	err = crypto_wait_req(vecs->siggen_sigver_test ?
2193 			      /* Run asymmetric signature generation */
2194 			      crypto_akcipher_sign(req) :
2195 			      /* Run asymmetric encrypt */
2196 			      crypto_akcipher_encrypt(req), &wait);
2197 	if (err) {
2198 		pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2199 		goto free_all;
2200 	}
2201 	if (req->dst_len != vecs->c_size) {
2202 		pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2203 		err = -EINVAL;
2204 		goto free_all;
2205 	}
2206 	/* verify that encrypted message is equal to expected */
2207 	if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2208 		pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2209 		hexdump(outbuf_enc, vecs->c_size);
2210 		err = -EINVAL;
2211 		goto free_all;
2212 	}
2213 	/* Don't invoke decrypt for vectors with public key */
2214 	if (vecs->public_key_vec) {
2215 		err = 0;
2216 		goto free_all;
2217 	}
2218 	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2219 	if (!outbuf_dec) {
2220 		err = -ENOMEM;
2221 		goto free_all;
2222 	}
2223 
2224 	if (WARN_ON(vecs->c_size > PAGE_SIZE))
2225 		goto free_all;
2226 
2227 	memcpy(xbuf[0], vecs->c, vecs->c_size);
2228 
2229 	sg_init_one(&src, xbuf[0], vecs->c_size);
2230 	sg_init_one(&dst, outbuf_dec, out_len_max);
2231 	crypto_init_wait(&wait);
2232 	akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2233 
2234 	err = crypto_wait_req(vecs->siggen_sigver_test ?
2235 			      /* Run asymmetric signature verification */
2236 			      crypto_akcipher_verify(req) :
2237 			      /* Run asymmetric decrypt */
2238 			      crypto_akcipher_decrypt(req), &wait);
2239 	if (err) {
2240 		pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2241 		goto free_all;
2242 	}
2243 	out_len = req->dst_len;
2244 	if (out_len < vecs->m_size) {
2245 		pr_err("alg: akcipher: decrypt test failed. "
2246 		       "Invalid output len %u\n", out_len);
2247 		err = -EINVAL;
2248 		goto free_all;
2249 	}
2250 	/* verify that decrypted message is equal to the original msg */
2251 	if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2252 	    memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2253 		   vecs->m_size)) {
2254 		pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2255 		hexdump(outbuf_dec, out_len);
2256 		err = -EINVAL;
2257 	}
2258 free_all:
2259 	kfree(outbuf_dec);
2260 	kfree(outbuf_enc);
2261 free_req:
2262 	akcipher_request_free(req);
2263 free_xbuf:
2264 	testmgr_free_buf(xbuf);
2265 	return err;
2266 }
2267 
2268 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2269 			 const struct akcipher_testvec *vecs,
2270 			 unsigned int tcount)
2271 {
2272 	const char *algo =
2273 		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2274 	int ret, i;
2275 
2276 	for (i = 0; i < tcount; i++) {
2277 		ret = test_akcipher_one(tfm, vecs++);
2278 		if (!ret)
2279 			continue;
2280 
2281 		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2282 		       i + 1, algo, ret);
2283 		return ret;
2284 	}
2285 	return 0;
2286 }
2287 
2288 static int alg_test_akcipher(const struct alg_test_desc *desc,
2289 			     const char *driver, u32 type, u32 mask)
2290 {
2291 	struct crypto_akcipher *tfm;
2292 	int err = 0;
2293 
2294 	tfm = crypto_alloc_akcipher(driver, type, mask);
2295 	if (IS_ERR(tfm)) {
2296 		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2297 		       driver, PTR_ERR(tfm));
2298 		return PTR_ERR(tfm);
2299 	}
2300 	if (desc->suite.akcipher.vecs)
2301 		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2302 				    desc->suite.akcipher.count);
2303 
2304 	crypto_free_akcipher(tfm);
2305 	return err;
2306 }
2307 
2308 static int alg_test_null(const struct alg_test_desc *desc,
2309 			     const char *driver, u32 type, u32 mask)
2310 {
2311 	return 0;
2312 }
2313 
2314 #define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
2315 
2316 /* Please keep this list sorted by algorithm name. */
2317 static const struct alg_test_desc alg_test_descs[] = {
2318 	{
2319 		.alg = "ansi_cprng",
2320 		.test = alg_test_cprng,
2321 		.suite = {
2322 			.cprng = __VECS(ansi_cprng_aes_tv_template)
2323 		}
2324 	}, {
2325 		.alg = "authenc(hmac(md5),ecb(cipher_null))",
2326 		.test = alg_test_aead,
2327 		.suite = {
2328 			.aead = {
2329 				.enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2330 				.dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
2331 			}
2332 		}
2333 	}, {
2334 		.alg = "authenc(hmac(sha1),cbc(aes))",
2335 		.test = alg_test_aead,
2336 		.fips_allowed = 1,
2337 		.suite = {
2338 			.aead = {
2339 				.enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
2340 			}
2341 		}
2342 	}, {
2343 		.alg = "authenc(hmac(sha1),cbc(des))",
2344 		.test = alg_test_aead,
2345 		.suite = {
2346 			.aead = {
2347 				.enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
2348 			}
2349 		}
2350 	}, {
2351 		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
2352 		.test = alg_test_aead,
2353 		.fips_allowed = 1,
2354 		.suite = {
2355 			.aead = {
2356 				.enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
2357 			}
2358 		}
2359 	}, {
2360 		.alg = "authenc(hmac(sha1),ctr(aes))",
2361 		.test = alg_test_null,
2362 		.fips_allowed = 1,
2363 	}, {
2364 		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
2365 		.test = alg_test_aead,
2366 		.suite = {
2367 			.aead = {
2368 				.enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2369 				.dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
2370 			}
2371 		}
2372 	}, {
2373 		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2374 		.test = alg_test_null,
2375 		.fips_allowed = 1,
2376 	}, {
2377 		.alg = "authenc(hmac(sha224),cbc(des))",
2378 		.test = alg_test_aead,
2379 		.suite = {
2380 			.aead = {
2381 				.enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
2382 			}
2383 		}
2384 	}, {
2385 		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
2386 		.test = alg_test_aead,
2387 		.fips_allowed = 1,
2388 		.suite = {
2389 			.aead = {
2390 				.enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
2391 			}
2392 		}
2393 	}, {
2394 		.alg = "authenc(hmac(sha256),cbc(aes))",
2395 		.test = alg_test_aead,
2396 		.fips_allowed = 1,
2397 		.suite = {
2398 			.aead = {
2399 				.enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
2400 			}
2401 		}
2402 	}, {
2403 		.alg = "authenc(hmac(sha256),cbc(des))",
2404 		.test = alg_test_aead,
2405 		.suite = {
2406 			.aead = {
2407 				.enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
2408 			}
2409 		}
2410 	}, {
2411 		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
2412 		.test = alg_test_aead,
2413 		.fips_allowed = 1,
2414 		.suite = {
2415 			.aead = {
2416 				.enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
2417 			}
2418 		}
2419 	}, {
2420 		.alg = "authenc(hmac(sha256),ctr(aes))",
2421 		.test = alg_test_null,
2422 		.fips_allowed = 1,
2423 	}, {
2424 		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2425 		.test = alg_test_null,
2426 		.fips_allowed = 1,
2427 	}, {
2428 		.alg = "authenc(hmac(sha384),cbc(des))",
2429 		.test = alg_test_aead,
2430 		.suite = {
2431 			.aead = {
2432 				.enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
2433 			}
2434 		}
2435 	}, {
2436 		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
2437 		.test = alg_test_aead,
2438 		.fips_allowed = 1,
2439 		.suite = {
2440 			.aead = {
2441 				.enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
2442 			}
2443 		}
2444 	}, {
2445 		.alg = "authenc(hmac(sha384),ctr(aes))",
2446 		.test = alg_test_null,
2447 		.fips_allowed = 1,
2448 	}, {
2449 		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2450 		.test = alg_test_null,
2451 		.fips_allowed = 1,
2452 	}, {
2453 		.alg = "authenc(hmac(sha512),cbc(aes))",
2454 		.fips_allowed = 1,
2455 		.test = alg_test_aead,
2456 		.suite = {
2457 			.aead = {
2458 				.enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
2459 			}
2460 		}
2461 	}, {
2462 		.alg = "authenc(hmac(sha512),cbc(des))",
2463 		.test = alg_test_aead,
2464 		.suite = {
2465 			.aead = {
2466 				.enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
2467 			}
2468 		}
2469 	}, {
2470 		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
2471 		.test = alg_test_aead,
2472 		.fips_allowed = 1,
2473 		.suite = {
2474 			.aead = {
2475 				.enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
2476 			}
2477 		}
2478 	}, {
2479 		.alg = "authenc(hmac(sha512),ctr(aes))",
2480 		.test = alg_test_null,
2481 		.fips_allowed = 1,
2482 	}, {
2483 		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2484 		.test = alg_test_null,
2485 		.fips_allowed = 1,
2486 	}, {
2487 		.alg = "cbc(aes)",
2488 		.test = alg_test_skcipher,
2489 		.fips_allowed = 1,
2490 		.suite = {
2491 			.cipher = {
2492 				.enc = __VECS(aes_cbc_enc_tv_template),
2493 				.dec = __VECS(aes_cbc_dec_tv_template)
2494 			}
2495 		}
2496 	}, {
2497 		.alg = "cbc(anubis)",
2498 		.test = alg_test_skcipher,
2499 		.suite = {
2500 			.cipher = {
2501 				.enc = __VECS(anubis_cbc_enc_tv_template),
2502 				.dec = __VECS(anubis_cbc_dec_tv_template)
2503 			}
2504 		}
2505 	}, {
2506 		.alg = "cbc(blowfish)",
2507 		.test = alg_test_skcipher,
2508 		.suite = {
2509 			.cipher = {
2510 				.enc = __VECS(bf_cbc_enc_tv_template),
2511 				.dec = __VECS(bf_cbc_dec_tv_template)
2512 			}
2513 		}
2514 	}, {
2515 		.alg = "cbc(camellia)",
2516 		.test = alg_test_skcipher,
2517 		.suite = {
2518 			.cipher = {
2519 				.enc = __VECS(camellia_cbc_enc_tv_template),
2520 				.dec = __VECS(camellia_cbc_dec_tv_template)
2521 			}
2522 		}
2523 	}, {
2524 		.alg = "cbc(cast5)",
2525 		.test = alg_test_skcipher,
2526 		.suite = {
2527 			.cipher = {
2528 				.enc = __VECS(cast5_cbc_enc_tv_template),
2529 				.dec = __VECS(cast5_cbc_dec_tv_template)
2530 			}
2531 		}
2532 	}, {
2533 		.alg = "cbc(cast6)",
2534 		.test = alg_test_skcipher,
2535 		.suite = {
2536 			.cipher = {
2537 				.enc = __VECS(cast6_cbc_enc_tv_template),
2538 				.dec = __VECS(cast6_cbc_dec_tv_template)
2539 			}
2540 		}
2541 	}, {
2542 		.alg = "cbc(des)",
2543 		.test = alg_test_skcipher,
2544 		.suite = {
2545 			.cipher = {
2546 				.enc = __VECS(des_cbc_enc_tv_template),
2547 				.dec = __VECS(des_cbc_dec_tv_template)
2548 			}
2549 		}
2550 	}, {
2551 		.alg = "cbc(des3_ede)",
2552 		.test = alg_test_skcipher,
2553 		.fips_allowed = 1,
2554 		.suite = {
2555 			.cipher = {
2556 				.enc = __VECS(des3_ede_cbc_enc_tv_template),
2557 				.dec = __VECS(des3_ede_cbc_dec_tv_template)
2558 			}
2559 		}
2560 	}, {
2561 		.alg = "cbc(serpent)",
2562 		.test = alg_test_skcipher,
2563 		.suite = {
2564 			.cipher = {
2565 				.enc = __VECS(serpent_cbc_enc_tv_template),
2566 				.dec = __VECS(serpent_cbc_dec_tv_template)
2567 			}
2568 		}
2569 	}, {
2570 		.alg = "cbc(twofish)",
2571 		.test = alg_test_skcipher,
2572 		.suite = {
2573 			.cipher = {
2574 				.enc = __VECS(tf_cbc_enc_tv_template),
2575 				.dec = __VECS(tf_cbc_dec_tv_template)
2576 			}
2577 		}
2578 	}, {
2579 		.alg = "cbcmac(aes)",
2580 		.fips_allowed = 1,
2581 		.test = alg_test_hash,
2582 		.suite = {
2583 			.hash = __VECS(aes_cbcmac_tv_template)
2584 		}
2585 	}, {
2586 		.alg = "ccm(aes)",
2587 		.test = alg_test_aead,
2588 		.fips_allowed = 1,
2589 		.suite = {
2590 			.aead = {
2591 				.enc = __VECS(aes_ccm_enc_tv_template),
2592 				.dec = __VECS(aes_ccm_dec_tv_template)
2593 			}
2594 		}
2595 	}, {
2596 		.alg = "chacha20",
2597 		.test = alg_test_skcipher,
2598 		.suite = {
2599 			.cipher = {
2600 				.enc = __VECS(chacha20_enc_tv_template),
2601 				.dec = __VECS(chacha20_enc_tv_template),
2602 			}
2603 		}
2604 	}, {
2605 		.alg = "cmac(aes)",
2606 		.fips_allowed = 1,
2607 		.test = alg_test_hash,
2608 		.suite = {
2609 			.hash = __VECS(aes_cmac128_tv_template)
2610 		}
2611 	}, {
2612 		.alg = "cmac(des3_ede)",
2613 		.fips_allowed = 1,
2614 		.test = alg_test_hash,
2615 		.suite = {
2616 			.hash = __VECS(des3_ede_cmac64_tv_template)
2617 		}
2618 	}, {
2619 		.alg = "compress_null",
2620 		.test = alg_test_null,
2621 	}, {
2622 		.alg = "crc32",
2623 		.test = alg_test_hash,
2624 		.suite = {
2625 			.hash = __VECS(crc32_tv_template)
2626 		}
2627 	}, {
2628 		.alg = "crc32c",
2629 		.test = alg_test_crc32c,
2630 		.fips_allowed = 1,
2631 		.suite = {
2632 			.hash = __VECS(crc32c_tv_template)
2633 		}
2634 	}, {
2635 		.alg = "crct10dif",
2636 		.test = alg_test_hash,
2637 		.fips_allowed = 1,
2638 		.suite = {
2639 			.hash = __VECS(crct10dif_tv_template)
2640 		}
2641 	}, {
2642 		.alg = "ctr(aes)",
2643 		.test = alg_test_skcipher,
2644 		.fips_allowed = 1,
2645 		.suite = {
2646 			.cipher = {
2647 				.enc = __VECS(aes_ctr_enc_tv_template),
2648 				.dec = __VECS(aes_ctr_dec_tv_template)
2649 			}
2650 		}
2651 	}, {
2652 		.alg = "ctr(blowfish)",
2653 		.test = alg_test_skcipher,
2654 		.suite = {
2655 			.cipher = {
2656 				.enc = __VECS(bf_ctr_enc_tv_template),
2657 				.dec = __VECS(bf_ctr_dec_tv_template)
2658 			}
2659 		}
2660 	}, {
2661 		.alg = "ctr(camellia)",
2662 		.test = alg_test_skcipher,
2663 		.suite = {
2664 			.cipher = {
2665 				.enc = __VECS(camellia_ctr_enc_tv_template),
2666 				.dec = __VECS(camellia_ctr_dec_tv_template)
2667 			}
2668 		}
2669 	}, {
2670 		.alg = "ctr(cast5)",
2671 		.test = alg_test_skcipher,
2672 		.suite = {
2673 			.cipher = {
2674 				.enc = __VECS(cast5_ctr_enc_tv_template),
2675 				.dec = __VECS(cast5_ctr_dec_tv_template)
2676 			}
2677 		}
2678 	}, {
2679 		.alg = "ctr(cast6)",
2680 		.test = alg_test_skcipher,
2681 		.suite = {
2682 			.cipher = {
2683 				.enc = __VECS(cast6_ctr_enc_tv_template),
2684 				.dec = __VECS(cast6_ctr_dec_tv_template)
2685 			}
2686 		}
2687 	}, {
2688 		.alg = "ctr(des)",
2689 		.test = alg_test_skcipher,
2690 		.suite = {
2691 			.cipher = {
2692 				.enc = __VECS(des_ctr_enc_tv_template),
2693 				.dec = __VECS(des_ctr_dec_tv_template)
2694 			}
2695 		}
2696 	}, {
2697 		.alg = "ctr(des3_ede)",
2698 		.test = alg_test_skcipher,
2699 		.fips_allowed = 1,
2700 		.suite = {
2701 			.cipher = {
2702 				.enc = __VECS(des3_ede_ctr_enc_tv_template),
2703 				.dec = __VECS(des3_ede_ctr_dec_tv_template)
2704 			}
2705 		}
2706 	}, {
2707 		.alg = "ctr(serpent)",
2708 		.test = alg_test_skcipher,
2709 		.suite = {
2710 			.cipher = {
2711 				.enc = __VECS(serpent_ctr_enc_tv_template),
2712 				.dec = __VECS(serpent_ctr_dec_tv_template)
2713 			}
2714 		}
2715 	}, {
2716 		.alg = "ctr(twofish)",
2717 		.test = alg_test_skcipher,
2718 		.suite = {
2719 			.cipher = {
2720 				.enc = __VECS(tf_ctr_enc_tv_template),
2721 				.dec = __VECS(tf_ctr_dec_tv_template)
2722 			}
2723 		}
2724 	}, {
2725 		.alg = "cts(cbc(aes))",
2726 		.test = alg_test_skcipher,
2727 		.suite = {
2728 			.cipher = {
2729 				.enc = __VECS(cts_mode_enc_tv_template),
2730 				.dec = __VECS(cts_mode_dec_tv_template)
2731 			}
2732 		}
2733 	}, {
2734 		.alg = "deflate",
2735 		.test = alg_test_comp,
2736 		.fips_allowed = 1,
2737 		.suite = {
2738 			.comp = {
2739 				.comp = __VECS(deflate_comp_tv_template),
2740 				.decomp = __VECS(deflate_decomp_tv_template)
2741 			}
2742 		}
2743 	}, {
2744 		.alg = "dh",
2745 		.test = alg_test_kpp,
2746 		.fips_allowed = 1,
2747 		.suite = {
2748 			.kpp = __VECS(dh_tv_template)
2749 		}
2750 	}, {
2751 		.alg = "digest_null",
2752 		.test = alg_test_null,
2753 	}, {
2754 		.alg = "drbg_nopr_ctr_aes128",
2755 		.test = alg_test_drbg,
2756 		.fips_allowed = 1,
2757 		.suite = {
2758 			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2759 		}
2760 	}, {
2761 		.alg = "drbg_nopr_ctr_aes192",
2762 		.test = alg_test_drbg,
2763 		.fips_allowed = 1,
2764 		.suite = {
2765 			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2766 		}
2767 	}, {
2768 		.alg = "drbg_nopr_ctr_aes256",
2769 		.test = alg_test_drbg,
2770 		.fips_allowed = 1,
2771 		.suite = {
2772 			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2773 		}
2774 	}, {
2775 		/*
2776 		 * There is no need to specifically test the DRBG with every
2777 		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2778 		 */
2779 		.alg = "drbg_nopr_hmac_sha1",
2780 		.fips_allowed = 1,
2781 		.test = alg_test_null,
2782 	}, {
2783 		.alg = "drbg_nopr_hmac_sha256",
2784 		.test = alg_test_drbg,
2785 		.fips_allowed = 1,
2786 		.suite = {
2787 			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2788 		}
2789 	}, {
2790 		/* covered by drbg_nopr_hmac_sha256 test */
2791 		.alg = "drbg_nopr_hmac_sha384",
2792 		.fips_allowed = 1,
2793 		.test = alg_test_null,
2794 	}, {
2795 		.alg = "drbg_nopr_hmac_sha512",
2796 		.test = alg_test_null,
2797 		.fips_allowed = 1,
2798 	}, {
2799 		.alg = "drbg_nopr_sha1",
2800 		.fips_allowed = 1,
2801 		.test = alg_test_null,
2802 	}, {
2803 		.alg = "drbg_nopr_sha256",
2804 		.test = alg_test_drbg,
2805 		.fips_allowed = 1,
2806 		.suite = {
2807 			.drbg = __VECS(drbg_nopr_sha256_tv_template)
2808 		}
2809 	}, {
2810 		/* covered by drbg_nopr_sha256 test */
2811 		.alg = "drbg_nopr_sha384",
2812 		.fips_allowed = 1,
2813 		.test = alg_test_null,
2814 	}, {
2815 		.alg = "drbg_nopr_sha512",
2816 		.fips_allowed = 1,
2817 		.test = alg_test_null,
2818 	}, {
2819 		.alg = "drbg_pr_ctr_aes128",
2820 		.test = alg_test_drbg,
2821 		.fips_allowed = 1,
2822 		.suite = {
2823 			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2824 		}
2825 	}, {
2826 		/* covered by drbg_pr_ctr_aes128 test */
2827 		.alg = "drbg_pr_ctr_aes192",
2828 		.fips_allowed = 1,
2829 		.test = alg_test_null,
2830 	}, {
2831 		.alg = "drbg_pr_ctr_aes256",
2832 		.fips_allowed = 1,
2833 		.test = alg_test_null,
2834 	}, {
2835 		.alg = "drbg_pr_hmac_sha1",
2836 		.fips_allowed = 1,
2837 		.test = alg_test_null,
2838 	}, {
2839 		.alg = "drbg_pr_hmac_sha256",
2840 		.test = alg_test_drbg,
2841 		.fips_allowed = 1,
2842 		.suite = {
2843 			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2844 		}
2845 	}, {
2846 		/* covered by drbg_pr_hmac_sha256 test */
2847 		.alg = "drbg_pr_hmac_sha384",
2848 		.fips_allowed = 1,
2849 		.test = alg_test_null,
2850 	}, {
2851 		.alg = "drbg_pr_hmac_sha512",
2852 		.test = alg_test_null,
2853 		.fips_allowed = 1,
2854 	}, {
2855 		.alg = "drbg_pr_sha1",
2856 		.fips_allowed = 1,
2857 		.test = alg_test_null,
2858 	}, {
2859 		.alg = "drbg_pr_sha256",
2860 		.test = alg_test_drbg,
2861 		.fips_allowed = 1,
2862 		.suite = {
2863 			.drbg = __VECS(drbg_pr_sha256_tv_template)
2864 		}
2865 	}, {
2866 		/* covered by drbg_pr_sha256 test */
2867 		.alg = "drbg_pr_sha384",
2868 		.fips_allowed = 1,
2869 		.test = alg_test_null,
2870 	}, {
2871 		.alg = "drbg_pr_sha512",
2872 		.fips_allowed = 1,
2873 		.test = alg_test_null,
2874 	}, {
2875 		.alg = "ecb(aes)",
2876 		.test = alg_test_skcipher,
2877 		.fips_allowed = 1,
2878 		.suite = {
2879 			.cipher = {
2880 				.enc = __VECS(aes_enc_tv_template),
2881 				.dec = __VECS(aes_dec_tv_template)
2882 			}
2883 		}
2884 	}, {
2885 		.alg = "ecb(anubis)",
2886 		.test = alg_test_skcipher,
2887 		.suite = {
2888 			.cipher = {
2889 				.enc = __VECS(anubis_enc_tv_template),
2890 				.dec = __VECS(anubis_dec_tv_template)
2891 			}
2892 		}
2893 	}, {
2894 		.alg = "ecb(arc4)",
2895 		.test = alg_test_skcipher,
2896 		.suite = {
2897 			.cipher = {
2898 				.enc = __VECS(arc4_enc_tv_template),
2899 				.dec = __VECS(arc4_dec_tv_template)
2900 			}
2901 		}
2902 	}, {
2903 		.alg = "ecb(blowfish)",
2904 		.test = alg_test_skcipher,
2905 		.suite = {
2906 			.cipher = {
2907 				.enc = __VECS(bf_enc_tv_template),
2908 				.dec = __VECS(bf_dec_tv_template)
2909 			}
2910 		}
2911 	}, {
2912 		.alg = "ecb(camellia)",
2913 		.test = alg_test_skcipher,
2914 		.suite = {
2915 			.cipher = {
2916 				.enc = __VECS(camellia_enc_tv_template),
2917 				.dec = __VECS(camellia_dec_tv_template)
2918 			}
2919 		}
2920 	}, {
2921 		.alg = "ecb(cast5)",
2922 		.test = alg_test_skcipher,
2923 		.suite = {
2924 			.cipher = {
2925 				.enc = __VECS(cast5_enc_tv_template),
2926 				.dec = __VECS(cast5_dec_tv_template)
2927 			}
2928 		}
2929 	}, {
2930 		.alg = "ecb(cast6)",
2931 		.test = alg_test_skcipher,
2932 		.suite = {
2933 			.cipher = {
2934 				.enc = __VECS(cast6_enc_tv_template),
2935 				.dec = __VECS(cast6_dec_tv_template)
2936 			}
2937 		}
2938 	}, {
2939 		.alg = "ecb(cipher_null)",
2940 		.test = alg_test_null,
2941 		.fips_allowed = 1,
2942 	}, {
2943 		.alg = "ecb(des)",
2944 		.test = alg_test_skcipher,
2945 		.suite = {
2946 			.cipher = {
2947 				.enc = __VECS(des_enc_tv_template),
2948 				.dec = __VECS(des_dec_tv_template)
2949 			}
2950 		}
2951 	}, {
2952 		.alg = "ecb(des3_ede)",
2953 		.test = alg_test_skcipher,
2954 		.fips_allowed = 1,
2955 		.suite = {
2956 			.cipher = {
2957 				.enc = __VECS(des3_ede_enc_tv_template),
2958 				.dec = __VECS(des3_ede_dec_tv_template)
2959 			}
2960 		}
2961 	}, {
2962 		.alg = "ecb(fcrypt)",
2963 		.test = alg_test_skcipher,
2964 		.suite = {
2965 			.cipher = {
2966 				.enc = {
2967 					.vecs = fcrypt_pcbc_enc_tv_template,
2968 					.count = 1
2969 				},
2970 				.dec = {
2971 					.vecs = fcrypt_pcbc_dec_tv_template,
2972 					.count = 1
2973 				}
2974 			}
2975 		}
2976 	}, {
2977 		.alg = "ecb(khazad)",
2978 		.test = alg_test_skcipher,
2979 		.suite = {
2980 			.cipher = {
2981 				.enc = __VECS(khazad_enc_tv_template),
2982 				.dec = __VECS(khazad_dec_tv_template)
2983 			}
2984 		}
2985 	}, {
2986 		.alg = "ecb(seed)",
2987 		.test = alg_test_skcipher,
2988 		.suite = {
2989 			.cipher = {
2990 				.enc = __VECS(seed_enc_tv_template),
2991 				.dec = __VECS(seed_dec_tv_template)
2992 			}
2993 		}
2994 	}, {
2995 		.alg = "ecb(serpent)",
2996 		.test = alg_test_skcipher,
2997 		.suite = {
2998 			.cipher = {
2999 				.enc = __VECS(serpent_enc_tv_template),
3000 				.dec = __VECS(serpent_dec_tv_template)
3001 			}
3002 		}
3003 	}, {
3004 		.alg = "ecb(sm4)",
3005 		.test = alg_test_skcipher,
3006 		.suite = {
3007 			.cipher = {
3008 				.enc = __VECS(sm4_enc_tv_template),
3009 				.dec = __VECS(sm4_dec_tv_template)
3010 			}
3011 		}
3012 	}, {
3013 		.alg = "ecb(speck128)",
3014 		.test = alg_test_skcipher,
3015 		.suite = {
3016 			.cipher = {
3017 				.enc = __VECS(speck128_enc_tv_template),
3018 				.dec = __VECS(speck128_dec_tv_template)
3019 			}
3020 		}
3021 	}, {
3022 		.alg = "ecb(speck64)",
3023 		.test = alg_test_skcipher,
3024 		.suite = {
3025 			.cipher = {
3026 				.enc = __VECS(speck64_enc_tv_template),
3027 				.dec = __VECS(speck64_dec_tv_template)
3028 			}
3029 		}
3030 	}, {
3031 		.alg = "ecb(tea)",
3032 		.test = alg_test_skcipher,
3033 		.suite = {
3034 			.cipher = {
3035 				.enc = __VECS(tea_enc_tv_template),
3036 				.dec = __VECS(tea_dec_tv_template)
3037 			}
3038 		}
3039 	}, {
3040 		.alg = "ecb(tnepres)",
3041 		.test = alg_test_skcipher,
3042 		.suite = {
3043 			.cipher = {
3044 				.enc = __VECS(tnepres_enc_tv_template),
3045 				.dec = __VECS(tnepres_dec_tv_template)
3046 			}
3047 		}
3048 	}, {
3049 		.alg = "ecb(twofish)",
3050 		.test = alg_test_skcipher,
3051 		.suite = {
3052 			.cipher = {
3053 				.enc = __VECS(tf_enc_tv_template),
3054 				.dec = __VECS(tf_dec_tv_template)
3055 			}
3056 		}
3057 	}, {
3058 		.alg = "ecb(xeta)",
3059 		.test = alg_test_skcipher,
3060 		.suite = {
3061 			.cipher = {
3062 				.enc = __VECS(xeta_enc_tv_template),
3063 				.dec = __VECS(xeta_dec_tv_template)
3064 			}
3065 		}
3066 	}, {
3067 		.alg = "ecb(xtea)",
3068 		.test = alg_test_skcipher,
3069 		.suite = {
3070 			.cipher = {
3071 				.enc = __VECS(xtea_enc_tv_template),
3072 				.dec = __VECS(xtea_dec_tv_template)
3073 			}
3074 		}
3075 	}, {
3076 		.alg = "ecdh",
3077 		.test = alg_test_kpp,
3078 		.fips_allowed = 1,
3079 		.suite = {
3080 			.kpp = __VECS(ecdh_tv_template)
3081 		}
3082 	}, {
3083 		.alg = "gcm(aes)",
3084 		.test = alg_test_aead,
3085 		.fips_allowed = 1,
3086 		.suite = {
3087 			.aead = {
3088 				.enc = __VECS(aes_gcm_enc_tv_template),
3089 				.dec = __VECS(aes_gcm_dec_tv_template)
3090 			}
3091 		}
3092 	}, {
3093 		.alg = "ghash",
3094 		.test = alg_test_hash,
3095 		.fips_allowed = 1,
3096 		.suite = {
3097 			.hash = __VECS(ghash_tv_template)
3098 		}
3099 	}, {
3100 		.alg = "hmac(crc32)",
3101 		.test = alg_test_hash,
3102 		.suite = {
3103 			.hash = __VECS(bfin_crc_tv_template)
3104 		}
3105 	}, {
3106 		.alg = "hmac(md5)",
3107 		.test = alg_test_hash,
3108 		.suite = {
3109 			.hash = __VECS(hmac_md5_tv_template)
3110 		}
3111 	}, {
3112 		.alg = "hmac(rmd128)",
3113 		.test = alg_test_hash,
3114 		.suite = {
3115 			.hash = __VECS(hmac_rmd128_tv_template)
3116 		}
3117 	}, {
3118 		.alg = "hmac(rmd160)",
3119 		.test = alg_test_hash,
3120 		.suite = {
3121 			.hash = __VECS(hmac_rmd160_tv_template)
3122 		}
3123 	}, {
3124 		.alg = "hmac(sha1)",
3125 		.test = alg_test_hash,
3126 		.fips_allowed = 1,
3127 		.suite = {
3128 			.hash = __VECS(hmac_sha1_tv_template)
3129 		}
3130 	}, {
3131 		.alg = "hmac(sha224)",
3132 		.test = alg_test_hash,
3133 		.fips_allowed = 1,
3134 		.suite = {
3135 			.hash = __VECS(hmac_sha224_tv_template)
3136 		}
3137 	}, {
3138 		.alg = "hmac(sha256)",
3139 		.test = alg_test_hash,
3140 		.fips_allowed = 1,
3141 		.suite = {
3142 			.hash = __VECS(hmac_sha256_tv_template)
3143 		}
3144 	}, {
3145 		.alg = "hmac(sha3-224)",
3146 		.test = alg_test_hash,
3147 		.fips_allowed = 1,
3148 		.suite = {
3149 			.hash = __VECS(hmac_sha3_224_tv_template)
3150 		}
3151 	}, {
3152 		.alg = "hmac(sha3-256)",
3153 		.test = alg_test_hash,
3154 		.fips_allowed = 1,
3155 		.suite = {
3156 			.hash = __VECS(hmac_sha3_256_tv_template)
3157 		}
3158 	}, {
3159 		.alg = "hmac(sha3-384)",
3160 		.test = alg_test_hash,
3161 		.fips_allowed = 1,
3162 		.suite = {
3163 			.hash = __VECS(hmac_sha3_384_tv_template)
3164 		}
3165 	}, {
3166 		.alg = "hmac(sha3-512)",
3167 		.test = alg_test_hash,
3168 		.fips_allowed = 1,
3169 		.suite = {
3170 			.hash = __VECS(hmac_sha3_512_tv_template)
3171 		}
3172 	}, {
3173 		.alg = "hmac(sha384)",
3174 		.test = alg_test_hash,
3175 		.fips_allowed = 1,
3176 		.suite = {
3177 			.hash = __VECS(hmac_sha384_tv_template)
3178 		}
3179 	}, {
3180 		.alg = "hmac(sha512)",
3181 		.test = alg_test_hash,
3182 		.fips_allowed = 1,
3183 		.suite = {
3184 			.hash = __VECS(hmac_sha512_tv_template)
3185 		}
3186 	}, {
3187 		.alg = "jitterentropy_rng",
3188 		.fips_allowed = 1,
3189 		.test = alg_test_null,
3190 	}, {
3191 		.alg = "kw(aes)",
3192 		.test = alg_test_skcipher,
3193 		.fips_allowed = 1,
3194 		.suite = {
3195 			.cipher = {
3196 				.enc = __VECS(aes_kw_enc_tv_template),
3197 				.dec = __VECS(aes_kw_dec_tv_template)
3198 			}
3199 		}
3200 	}, {
3201 		.alg = "lrw(aes)",
3202 		.test = alg_test_skcipher,
3203 		.suite = {
3204 			.cipher = {
3205 				.enc = __VECS(aes_lrw_enc_tv_template),
3206 				.dec = __VECS(aes_lrw_dec_tv_template)
3207 			}
3208 		}
3209 	}, {
3210 		.alg = "lrw(camellia)",
3211 		.test = alg_test_skcipher,
3212 		.suite = {
3213 			.cipher = {
3214 				.enc = __VECS(camellia_lrw_enc_tv_template),
3215 				.dec = __VECS(camellia_lrw_dec_tv_template)
3216 			}
3217 		}
3218 	}, {
3219 		.alg = "lrw(cast6)",
3220 		.test = alg_test_skcipher,
3221 		.suite = {
3222 			.cipher = {
3223 				.enc = __VECS(cast6_lrw_enc_tv_template),
3224 				.dec = __VECS(cast6_lrw_dec_tv_template)
3225 			}
3226 		}
3227 	}, {
3228 		.alg = "lrw(serpent)",
3229 		.test = alg_test_skcipher,
3230 		.suite = {
3231 			.cipher = {
3232 				.enc = __VECS(serpent_lrw_enc_tv_template),
3233 				.dec = __VECS(serpent_lrw_dec_tv_template)
3234 			}
3235 		}
3236 	}, {
3237 		.alg = "lrw(twofish)",
3238 		.test = alg_test_skcipher,
3239 		.suite = {
3240 			.cipher = {
3241 				.enc = __VECS(tf_lrw_enc_tv_template),
3242 				.dec = __VECS(tf_lrw_dec_tv_template)
3243 			}
3244 		}
3245 	}, {
3246 		.alg = "lz4",
3247 		.test = alg_test_comp,
3248 		.fips_allowed = 1,
3249 		.suite = {
3250 			.comp = {
3251 				.comp = __VECS(lz4_comp_tv_template),
3252 				.decomp = __VECS(lz4_decomp_tv_template)
3253 			}
3254 		}
3255 	}, {
3256 		.alg = "lz4hc",
3257 		.test = alg_test_comp,
3258 		.fips_allowed = 1,
3259 		.suite = {
3260 			.comp = {
3261 				.comp = __VECS(lz4hc_comp_tv_template),
3262 				.decomp = __VECS(lz4hc_decomp_tv_template)
3263 			}
3264 		}
3265 	}, {
3266 		.alg = "lzo",
3267 		.test = alg_test_comp,
3268 		.fips_allowed = 1,
3269 		.suite = {
3270 			.comp = {
3271 				.comp = __VECS(lzo_comp_tv_template),
3272 				.decomp = __VECS(lzo_decomp_tv_template)
3273 			}
3274 		}
3275 	}, {
3276 		.alg = "md4",
3277 		.test = alg_test_hash,
3278 		.suite = {
3279 			.hash = __VECS(md4_tv_template)
3280 		}
3281 	}, {
3282 		.alg = "md5",
3283 		.test = alg_test_hash,
3284 		.suite = {
3285 			.hash = __VECS(md5_tv_template)
3286 		}
3287 	}, {
3288 		.alg = "michael_mic",
3289 		.test = alg_test_hash,
3290 		.suite = {
3291 			.hash = __VECS(michael_mic_tv_template)
3292 		}
3293 	}, {
3294 		.alg = "ofb(aes)",
3295 		.test = alg_test_skcipher,
3296 		.fips_allowed = 1,
3297 		.suite = {
3298 			.cipher = {
3299 				.enc = __VECS(aes_ofb_enc_tv_template),
3300 				.dec = __VECS(aes_ofb_dec_tv_template)
3301 			}
3302 		}
3303 	}, {
3304 		.alg = "pcbc(fcrypt)",
3305 		.test = alg_test_skcipher,
3306 		.suite = {
3307 			.cipher = {
3308 				.enc = __VECS(fcrypt_pcbc_enc_tv_template),
3309 				.dec = __VECS(fcrypt_pcbc_dec_tv_template)
3310 			}
3311 		}
3312 	}, {
3313 		.alg = "pkcs1pad(rsa,sha224)",
3314 		.test = alg_test_null,
3315 		.fips_allowed = 1,
3316 	}, {
3317 		.alg = "pkcs1pad(rsa,sha256)",
3318 		.test = alg_test_akcipher,
3319 		.fips_allowed = 1,
3320 		.suite = {
3321 			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
3322 		}
3323 	}, {
3324 		.alg = "pkcs1pad(rsa,sha384)",
3325 		.test = alg_test_null,
3326 		.fips_allowed = 1,
3327 	}, {
3328 		.alg = "pkcs1pad(rsa,sha512)",
3329 		.test = alg_test_null,
3330 		.fips_allowed = 1,
3331 	}, {
3332 		.alg = "poly1305",
3333 		.test = alg_test_hash,
3334 		.suite = {
3335 			.hash = __VECS(poly1305_tv_template)
3336 		}
3337 	}, {
3338 		.alg = "rfc3686(ctr(aes))",
3339 		.test = alg_test_skcipher,
3340 		.fips_allowed = 1,
3341 		.suite = {
3342 			.cipher = {
3343 				.enc = __VECS(aes_ctr_rfc3686_enc_tv_template),
3344 				.dec = __VECS(aes_ctr_rfc3686_dec_tv_template)
3345 			}
3346 		}
3347 	}, {
3348 		.alg = "rfc4106(gcm(aes))",
3349 		.test = alg_test_aead,
3350 		.fips_allowed = 1,
3351 		.suite = {
3352 			.aead = {
3353 				.enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3354 				.dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
3355 			}
3356 		}
3357 	}, {
3358 		.alg = "rfc4309(ccm(aes))",
3359 		.test = alg_test_aead,
3360 		.fips_allowed = 1,
3361 		.suite = {
3362 			.aead = {
3363 				.enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3364 				.dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
3365 			}
3366 		}
3367 	}, {
3368 		.alg = "rfc4543(gcm(aes))",
3369 		.test = alg_test_aead,
3370 		.suite = {
3371 			.aead = {
3372 				.enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3373 				.dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
3374 			}
3375 		}
3376 	}, {
3377 		.alg = "rfc7539(chacha20,poly1305)",
3378 		.test = alg_test_aead,
3379 		.suite = {
3380 			.aead = {
3381 				.enc = __VECS(rfc7539_enc_tv_template),
3382 				.dec = __VECS(rfc7539_dec_tv_template),
3383 			}
3384 		}
3385 	}, {
3386 		.alg = "rfc7539esp(chacha20,poly1305)",
3387 		.test = alg_test_aead,
3388 		.suite = {
3389 			.aead = {
3390 				.enc = __VECS(rfc7539esp_enc_tv_template),
3391 				.dec = __VECS(rfc7539esp_dec_tv_template),
3392 			}
3393 		}
3394 	}, {
3395 		.alg = "rmd128",
3396 		.test = alg_test_hash,
3397 		.suite = {
3398 			.hash = __VECS(rmd128_tv_template)
3399 		}
3400 	}, {
3401 		.alg = "rmd160",
3402 		.test = alg_test_hash,
3403 		.suite = {
3404 			.hash = __VECS(rmd160_tv_template)
3405 		}
3406 	}, {
3407 		.alg = "rmd256",
3408 		.test = alg_test_hash,
3409 		.suite = {
3410 			.hash = __VECS(rmd256_tv_template)
3411 		}
3412 	}, {
3413 		.alg = "rmd320",
3414 		.test = alg_test_hash,
3415 		.suite = {
3416 			.hash = __VECS(rmd320_tv_template)
3417 		}
3418 	}, {
3419 		.alg = "rsa",
3420 		.test = alg_test_akcipher,
3421 		.fips_allowed = 1,
3422 		.suite = {
3423 			.akcipher = __VECS(rsa_tv_template)
3424 		}
3425 	}, {
3426 		.alg = "salsa20",
3427 		.test = alg_test_skcipher,
3428 		.suite = {
3429 			.cipher = {
3430 				.enc = __VECS(salsa20_stream_enc_tv_template)
3431 			}
3432 		}
3433 	}, {
3434 		.alg = "sha1",
3435 		.test = alg_test_hash,
3436 		.fips_allowed = 1,
3437 		.suite = {
3438 			.hash = __VECS(sha1_tv_template)
3439 		}
3440 	}, {
3441 		.alg = "sha224",
3442 		.test = alg_test_hash,
3443 		.fips_allowed = 1,
3444 		.suite = {
3445 			.hash = __VECS(sha224_tv_template)
3446 		}
3447 	}, {
3448 		.alg = "sha256",
3449 		.test = alg_test_hash,
3450 		.fips_allowed = 1,
3451 		.suite = {
3452 			.hash = __VECS(sha256_tv_template)
3453 		}
3454 	}, {
3455 		.alg = "sha3-224",
3456 		.test = alg_test_hash,
3457 		.fips_allowed = 1,
3458 		.suite = {
3459 			.hash = __VECS(sha3_224_tv_template)
3460 		}
3461 	}, {
3462 		.alg = "sha3-256",
3463 		.test = alg_test_hash,
3464 		.fips_allowed = 1,
3465 		.suite = {
3466 			.hash = __VECS(sha3_256_tv_template)
3467 		}
3468 	}, {
3469 		.alg = "sha3-384",
3470 		.test = alg_test_hash,
3471 		.fips_allowed = 1,
3472 		.suite = {
3473 			.hash = __VECS(sha3_384_tv_template)
3474 		}
3475 	}, {
3476 		.alg = "sha3-512",
3477 		.test = alg_test_hash,
3478 		.fips_allowed = 1,
3479 		.suite = {
3480 			.hash = __VECS(sha3_512_tv_template)
3481 		}
3482 	}, {
3483 		.alg = "sha384",
3484 		.test = alg_test_hash,
3485 		.fips_allowed = 1,
3486 		.suite = {
3487 			.hash = __VECS(sha384_tv_template)
3488 		}
3489 	}, {
3490 		.alg = "sha512",
3491 		.test = alg_test_hash,
3492 		.fips_allowed = 1,
3493 		.suite = {
3494 			.hash = __VECS(sha512_tv_template)
3495 		}
3496 	}, {
3497 		.alg = "sm3",
3498 		.test = alg_test_hash,
3499 		.suite = {
3500 			.hash = __VECS(sm3_tv_template)
3501 		}
3502 	}, {
3503 		.alg = "tgr128",
3504 		.test = alg_test_hash,
3505 		.suite = {
3506 			.hash = __VECS(tgr128_tv_template)
3507 		}
3508 	}, {
3509 		.alg = "tgr160",
3510 		.test = alg_test_hash,
3511 		.suite = {
3512 			.hash = __VECS(tgr160_tv_template)
3513 		}
3514 	}, {
3515 		.alg = "tgr192",
3516 		.test = alg_test_hash,
3517 		.suite = {
3518 			.hash = __VECS(tgr192_tv_template)
3519 		}
3520 	}, {
3521 		.alg = "vmac(aes)",
3522 		.test = alg_test_hash,
3523 		.suite = {
3524 			.hash = __VECS(aes_vmac128_tv_template)
3525 		}
3526 	}, {
3527 		.alg = "wp256",
3528 		.test = alg_test_hash,
3529 		.suite = {
3530 			.hash = __VECS(wp256_tv_template)
3531 		}
3532 	}, {
3533 		.alg = "wp384",
3534 		.test = alg_test_hash,
3535 		.suite = {
3536 			.hash = __VECS(wp384_tv_template)
3537 		}
3538 	}, {
3539 		.alg = "wp512",
3540 		.test = alg_test_hash,
3541 		.suite = {
3542 			.hash = __VECS(wp512_tv_template)
3543 		}
3544 	}, {
3545 		.alg = "xcbc(aes)",
3546 		.test = alg_test_hash,
3547 		.suite = {
3548 			.hash = __VECS(aes_xcbc128_tv_template)
3549 		}
3550 	}, {
3551 		.alg = "xts(aes)",
3552 		.test = alg_test_skcipher,
3553 		.fips_allowed = 1,
3554 		.suite = {
3555 			.cipher = {
3556 				.enc = __VECS(aes_xts_enc_tv_template),
3557 				.dec = __VECS(aes_xts_dec_tv_template)
3558 			}
3559 		}
3560 	}, {
3561 		.alg = "xts(camellia)",
3562 		.test = alg_test_skcipher,
3563 		.suite = {
3564 			.cipher = {
3565 				.enc = __VECS(camellia_xts_enc_tv_template),
3566 				.dec = __VECS(camellia_xts_dec_tv_template)
3567 			}
3568 		}
3569 	}, {
3570 		.alg = "xts(cast6)",
3571 		.test = alg_test_skcipher,
3572 		.suite = {
3573 			.cipher = {
3574 				.enc = __VECS(cast6_xts_enc_tv_template),
3575 				.dec = __VECS(cast6_xts_dec_tv_template)
3576 			}
3577 		}
3578 	}, {
3579 		.alg = "xts(serpent)",
3580 		.test = alg_test_skcipher,
3581 		.suite = {
3582 			.cipher = {
3583 				.enc = __VECS(serpent_xts_enc_tv_template),
3584 				.dec = __VECS(serpent_xts_dec_tv_template)
3585 			}
3586 		}
3587 	}, {
3588 		.alg = "xts(speck128)",
3589 		.test = alg_test_skcipher,
3590 		.suite = {
3591 			.cipher = {
3592 				.enc = __VECS(speck128_xts_enc_tv_template),
3593 				.dec = __VECS(speck128_xts_dec_tv_template)
3594 			}
3595 		}
3596 	}, {
3597 		.alg = "xts(speck64)",
3598 		.test = alg_test_skcipher,
3599 		.suite = {
3600 			.cipher = {
3601 				.enc = __VECS(speck64_xts_enc_tv_template),
3602 				.dec = __VECS(speck64_xts_dec_tv_template)
3603 			}
3604 		}
3605 	}, {
3606 		.alg = "xts(twofish)",
3607 		.test = alg_test_skcipher,
3608 		.suite = {
3609 			.cipher = {
3610 				.enc = __VECS(tf_xts_enc_tv_template),
3611 				.dec = __VECS(tf_xts_dec_tv_template)
3612 			}
3613 		}
3614 	}, {
3615 		.alg = "zlib-deflate",
3616 		.test = alg_test_comp,
3617 		.fips_allowed = 1,
3618 		.suite = {
3619 			.comp = {
3620 				.comp = __VECS(zlib_deflate_comp_tv_template),
3621 				.decomp = __VECS(zlib_deflate_decomp_tv_template)
3622 			}
3623 		}
3624 	}
3625 };
3626 
3627 static bool alg_test_descs_checked;
3628 
3629 static void alg_test_descs_check_order(void)
3630 {
3631 	int i;
3632 
3633 	/* only check once */
3634 	if (alg_test_descs_checked)
3635 		return;
3636 
3637 	alg_test_descs_checked = true;
3638 
3639 	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3640 		int diff = strcmp(alg_test_descs[i - 1].alg,
3641 				  alg_test_descs[i].alg);
3642 
3643 		if (WARN_ON(diff > 0)) {
3644 			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3645 				alg_test_descs[i - 1].alg,
3646 				alg_test_descs[i].alg);
3647 		}
3648 
3649 		if (WARN_ON(diff == 0)) {
3650 			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3651 				alg_test_descs[i].alg);
3652 		}
3653 	}
3654 }
3655 
3656 static int alg_find_test(const char *alg)
3657 {
3658 	int start = 0;
3659 	int end = ARRAY_SIZE(alg_test_descs);
3660 
3661 	while (start < end) {
3662 		int i = (start + end) / 2;
3663 		int diff = strcmp(alg_test_descs[i].alg, alg);
3664 
3665 		if (diff > 0) {
3666 			end = i;
3667 			continue;
3668 		}
3669 
3670 		if (diff < 0) {
3671 			start = i + 1;
3672 			continue;
3673 		}
3674 
3675 		return i;
3676 	}
3677 
3678 	return -1;
3679 }
3680 
3681 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3682 {
3683 	int i;
3684 	int j;
3685 	int rc;
3686 
3687 	if (!fips_enabled && notests) {
3688 		printk_once(KERN_INFO "alg: self-tests disabled\n");
3689 		return 0;
3690 	}
3691 
3692 	alg_test_descs_check_order();
3693 
3694 	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3695 		char nalg[CRYPTO_MAX_ALG_NAME];
3696 
3697 		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3698 		    sizeof(nalg))
3699 			return -ENAMETOOLONG;
3700 
3701 		i = alg_find_test(nalg);
3702 		if (i < 0)
3703 			goto notest;
3704 
3705 		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3706 			goto non_fips_alg;
3707 
3708 		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3709 		goto test_done;
3710 	}
3711 
3712 	i = alg_find_test(alg);
3713 	j = alg_find_test(driver);
3714 	if (i < 0 && j < 0)
3715 		goto notest;
3716 
3717 	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3718 			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3719 		goto non_fips_alg;
3720 
3721 	rc = 0;
3722 	if (i >= 0)
3723 		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3724 					     type, mask);
3725 	if (j >= 0 && j != i)
3726 		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3727 					     type, mask);
3728 
3729 test_done:
3730 	if (fips_enabled && rc)
3731 		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3732 
3733 	if (fips_enabled && !rc)
3734 		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3735 
3736 	return rc;
3737 
3738 notest:
3739 	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3740 	return 0;
3741 non_fips_alg:
3742 	return -EINVAL;
3743 }
3744 
3745 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3746 
3747 EXPORT_SYMBOL_GPL(alg_test);
3748