xref: /openbmc/linux/drivers/crypto/caam/caamalg.c (revision 752beb5e)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * caam - Freescale FSL CAAM support for crypto API
4  *
5  * Copyright 2008-2011 Freescale Semiconductor, Inc.
6  * Copyright 2016-2019 NXP
7  *
8  * Based on talitos crypto API driver.
9  *
10  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11  *
12  * ---------------                     ---------------
13  * | JobDesc #1  |-------------------->|  ShareDesc  |
14  * | *(packet 1) |                     |   (PDB)     |
15  * ---------------      |------------->|  (hashKey)  |
16  *       .              |              | (cipherKey) |
17  *       .              |    |-------->| (operation) |
18  * ---------------      |    |         ---------------
19  * | JobDesc #2  |------|    |
20  * | *(packet 2) |           |
21  * ---------------           |
22  *       .                   |
23  *       .                   |
24  * ---------------           |
25  * | JobDesc #3  |------------
26  * | *(packet 3) |
27  * ---------------
28  *
29  * The SharedDesc never changes for a connection unless rekeyed, but
30  * each packet will likely be in a different place. So all we need
31  * to know to process the packet is where the input is, where the
32  * output goes, and what context we want to process with. Context is
33  * in the SharedDesc, packet references in the JobDesc.
34  *
35  * So, a job desc looks like:
36  *
37  * ---------------------
38  * | Header            |
39  * | ShareDesc Pointer |
40  * | SEQ_OUT_PTR       |
41  * | (output buffer)   |
42  * | (output length)   |
43  * | SEQ_IN_PTR        |
44  * | (input buffer)    |
45  * | (input length)    |
46  * ---------------------
47  */
48 
49 #include "compat.h"
50 
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 
60 /*
61  * crypto alg
62  */
63 #define CAAM_CRA_PRIORITY		3000
64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
66 					 CTR_RFC3686_NONCE_SIZE + \
67 					 SHA512_DIGEST_SIZE * 2)
68 
69 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
71 					 CAAM_CMD_SZ * 4)
72 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
73 					 CAAM_CMD_SZ * 5)
74 
75 #define CHACHAPOLY_DESC_JOB_IO_LEN	(AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
76 
77 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
78 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
79 
80 #ifdef DEBUG
81 /* for print_hex_dumps with line references */
82 #define debug(format, arg...) printk(format, arg)
83 #else
84 #define debug(format, arg...)
85 #endif
86 
87 struct caam_alg_entry {
88 	int class1_alg_type;
89 	int class2_alg_type;
90 	bool rfc3686;
91 	bool geniv;
92 };
93 
94 struct caam_aead_alg {
95 	struct aead_alg aead;
96 	struct caam_alg_entry caam;
97 	bool registered;
98 };
99 
100 struct caam_skcipher_alg {
101 	struct skcipher_alg skcipher;
102 	struct caam_alg_entry caam;
103 	bool registered;
104 };
105 
106 /*
107  * per-session context
108  */
109 struct caam_ctx {
110 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
111 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
112 	u8 key[CAAM_MAX_KEY_SIZE];
113 	dma_addr_t sh_desc_enc_dma;
114 	dma_addr_t sh_desc_dec_dma;
115 	dma_addr_t key_dma;
116 	enum dma_data_direction dir;
117 	struct device *jrdev;
118 	struct alginfo adata;
119 	struct alginfo cdata;
120 	unsigned int authsize;
121 };
122 
123 static int aead_null_set_sh_desc(struct crypto_aead *aead)
124 {
125 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
126 	struct device *jrdev = ctx->jrdev;
127 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
128 	u32 *desc;
129 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
130 			ctx->adata.keylen_pad;
131 
132 	/*
133 	 * Job Descriptor and Shared Descriptors
134 	 * must all fit into the 64-word Descriptor h/w Buffer
135 	 */
136 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
137 		ctx->adata.key_inline = true;
138 		ctx->adata.key_virt = ctx->key;
139 	} else {
140 		ctx->adata.key_inline = false;
141 		ctx->adata.key_dma = ctx->key_dma;
142 	}
143 
144 	/* aead_encrypt shared descriptor */
145 	desc = ctx->sh_desc_enc;
146 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
147 				    ctrlpriv->era);
148 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
149 				   desc_bytes(desc), ctx->dir);
150 
151 	/*
152 	 * Job Descriptor and Shared Descriptors
153 	 * must all fit into the 64-word Descriptor h/w Buffer
154 	 */
155 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
156 		ctx->adata.key_inline = true;
157 		ctx->adata.key_virt = ctx->key;
158 	} else {
159 		ctx->adata.key_inline = false;
160 		ctx->adata.key_dma = ctx->key_dma;
161 	}
162 
163 	/* aead_decrypt shared descriptor */
164 	desc = ctx->sh_desc_dec;
165 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
166 				    ctrlpriv->era);
167 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
168 				   desc_bytes(desc), ctx->dir);
169 
170 	return 0;
171 }
172 
173 static int aead_set_sh_desc(struct crypto_aead *aead)
174 {
175 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
176 						 struct caam_aead_alg, aead);
177 	unsigned int ivsize = crypto_aead_ivsize(aead);
178 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
179 	struct device *jrdev = ctx->jrdev;
180 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
181 	u32 ctx1_iv_off = 0;
182 	u32 *desc, *nonce = NULL;
183 	u32 inl_mask;
184 	unsigned int data_len[2];
185 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
186 			       OP_ALG_AAI_CTR_MOD128);
187 	const bool is_rfc3686 = alg->caam.rfc3686;
188 
189 	if (!ctx->authsize)
190 		return 0;
191 
192 	/* NULL encryption / decryption */
193 	if (!ctx->cdata.keylen)
194 		return aead_null_set_sh_desc(aead);
195 
196 	/*
197 	 * AES-CTR needs to load IV in CONTEXT1 reg
198 	 * at an offset of 128bits (16bytes)
199 	 * CONTEXT1[255:128] = IV
200 	 */
201 	if (ctr_mode)
202 		ctx1_iv_off = 16;
203 
204 	/*
205 	 * RFC3686 specific:
206 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
207 	 */
208 	if (is_rfc3686) {
209 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
210 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
211 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
212 	}
213 
214 	data_len[0] = ctx->adata.keylen_pad;
215 	data_len[1] = ctx->cdata.keylen;
216 
217 	if (alg->caam.geniv)
218 		goto skip_enc;
219 
220 	/*
221 	 * Job Descriptor and Shared Descriptors
222 	 * must all fit into the 64-word Descriptor h/w Buffer
223 	 */
224 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
225 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
226 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
227 			      ARRAY_SIZE(data_len)) < 0)
228 		return -EINVAL;
229 
230 	if (inl_mask & 1)
231 		ctx->adata.key_virt = ctx->key;
232 	else
233 		ctx->adata.key_dma = ctx->key_dma;
234 
235 	if (inl_mask & 2)
236 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
237 	else
238 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
239 
240 	ctx->adata.key_inline = !!(inl_mask & 1);
241 	ctx->cdata.key_inline = !!(inl_mask & 2);
242 
243 	/* aead_encrypt shared descriptor */
244 	desc = ctx->sh_desc_enc;
245 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
246 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
247 			       false, ctrlpriv->era);
248 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
249 				   desc_bytes(desc), ctx->dir);
250 
251 skip_enc:
252 	/*
253 	 * Job Descriptor and Shared Descriptors
254 	 * must all fit into the 64-word Descriptor h/w Buffer
255 	 */
256 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
257 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
258 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
259 			      ARRAY_SIZE(data_len)) < 0)
260 		return -EINVAL;
261 
262 	if (inl_mask & 1)
263 		ctx->adata.key_virt = ctx->key;
264 	else
265 		ctx->adata.key_dma = ctx->key_dma;
266 
267 	if (inl_mask & 2)
268 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
269 	else
270 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
271 
272 	ctx->adata.key_inline = !!(inl_mask & 1);
273 	ctx->cdata.key_inline = !!(inl_mask & 2);
274 
275 	/* aead_decrypt shared descriptor */
276 	desc = ctx->sh_desc_dec;
277 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
278 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
279 			       nonce, ctx1_iv_off, false, ctrlpriv->era);
280 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
281 				   desc_bytes(desc), ctx->dir);
282 
283 	if (!alg->caam.geniv)
284 		goto skip_givenc;
285 
286 	/*
287 	 * Job Descriptor and Shared Descriptors
288 	 * must all fit into the 64-word Descriptor h/w Buffer
289 	 */
290 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
291 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
292 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
293 			      ARRAY_SIZE(data_len)) < 0)
294 		return -EINVAL;
295 
296 	if (inl_mask & 1)
297 		ctx->adata.key_virt = ctx->key;
298 	else
299 		ctx->adata.key_dma = ctx->key_dma;
300 
301 	if (inl_mask & 2)
302 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
303 	else
304 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
305 
306 	ctx->adata.key_inline = !!(inl_mask & 1);
307 	ctx->cdata.key_inline = !!(inl_mask & 2);
308 
309 	/* aead_givencrypt shared descriptor */
310 	desc = ctx->sh_desc_enc;
311 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
312 				  ctx->authsize, is_rfc3686, nonce,
313 				  ctx1_iv_off, false, ctrlpriv->era);
314 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
315 				   desc_bytes(desc), ctx->dir);
316 
317 skip_givenc:
318 	return 0;
319 }
320 
321 static int aead_setauthsize(struct crypto_aead *authenc,
322 				    unsigned int authsize)
323 {
324 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
325 
326 	ctx->authsize = authsize;
327 	aead_set_sh_desc(authenc);
328 
329 	return 0;
330 }
331 
332 static int gcm_set_sh_desc(struct crypto_aead *aead)
333 {
334 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
335 	struct device *jrdev = ctx->jrdev;
336 	unsigned int ivsize = crypto_aead_ivsize(aead);
337 	u32 *desc;
338 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
339 			ctx->cdata.keylen;
340 
341 	if (!ctx->cdata.keylen || !ctx->authsize)
342 		return 0;
343 
344 	/*
345 	 * AES GCM encrypt shared descriptor
346 	 * Job Descriptor and Shared Descriptor
347 	 * must fit into the 64-word Descriptor h/w Buffer
348 	 */
349 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
350 		ctx->cdata.key_inline = true;
351 		ctx->cdata.key_virt = ctx->key;
352 	} else {
353 		ctx->cdata.key_inline = false;
354 		ctx->cdata.key_dma = ctx->key_dma;
355 	}
356 
357 	desc = ctx->sh_desc_enc;
358 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
359 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
360 				   desc_bytes(desc), ctx->dir);
361 
362 	/*
363 	 * Job Descriptor and Shared Descriptors
364 	 * must all fit into the 64-word Descriptor h/w Buffer
365 	 */
366 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
367 		ctx->cdata.key_inline = true;
368 		ctx->cdata.key_virt = ctx->key;
369 	} else {
370 		ctx->cdata.key_inline = false;
371 		ctx->cdata.key_dma = ctx->key_dma;
372 	}
373 
374 	desc = ctx->sh_desc_dec;
375 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
376 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
377 				   desc_bytes(desc), ctx->dir);
378 
379 	return 0;
380 }
381 
382 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
383 {
384 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
385 
386 	ctx->authsize = authsize;
387 	gcm_set_sh_desc(authenc);
388 
389 	return 0;
390 }
391 
392 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
393 {
394 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
395 	struct device *jrdev = ctx->jrdev;
396 	unsigned int ivsize = crypto_aead_ivsize(aead);
397 	u32 *desc;
398 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
399 			ctx->cdata.keylen;
400 
401 	if (!ctx->cdata.keylen || !ctx->authsize)
402 		return 0;
403 
404 	/*
405 	 * RFC4106 encrypt shared descriptor
406 	 * Job Descriptor and Shared Descriptor
407 	 * must fit into the 64-word Descriptor h/w Buffer
408 	 */
409 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
410 		ctx->cdata.key_inline = true;
411 		ctx->cdata.key_virt = ctx->key;
412 	} else {
413 		ctx->cdata.key_inline = false;
414 		ctx->cdata.key_dma = ctx->key_dma;
415 	}
416 
417 	desc = ctx->sh_desc_enc;
418 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
419 				  false);
420 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
421 				   desc_bytes(desc), ctx->dir);
422 
423 	/*
424 	 * Job Descriptor and Shared Descriptors
425 	 * must all fit into the 64-word Descriptor h/w Buffer
426 	 */
427 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
428 		ctx->cdata.key_inline = true;
429 		ctx->cdata.key_virt = ctx->key;
430 	} else {
431 		ctx->cdata.key_inline = false;
432 		ctx->cdata.key_dma = ctx->key_dma;
433 	}
434 
435 	desc = ctx->sh_desc_dec;
436 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
437 				  false);
438 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
439 				   desc_bytes(desc), ctx->dir);
440 
441 	return 0;
442 }
443 
444 static int rfc4106_setauthsize(struct crypto_aead *authenc,
445 			       unsigned int authsize)
446 {
447 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
448 
449 	ctx->authsize = authsize;
450 	rfc4106_set_sh_desc(authenc);
451 
452 	return 0;
453 }
454 
455 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
456 {
457 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
458 	struct device *jrdev = ctx->jrdev;
459 	unsigned int ivsize = crypto_aead_ivsize(aead);
460 	u32 *desc;
461 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
462 			ctx->cdata.keylen;
463 
464 	if (!ctx->cdata.keylen || !ctx->authsize)
465 		return 0;
466 
467 	/*
468 	 * RFC4543 encrypt shared descriptor
469 	 * Job Descriptor and Shared Descriptor
470 	 * must fit into the 64-word Descriptor h/w Buffer
471 	 */
472 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
473 		ctx->cdata.key_inline = true;
474 		ctx->cdata.key_virt = ctx->key;
475 	} else {
476 		ctx->cdata.key_inline = false;
477 		ctx->cdata.key_dma = ctx->key_dma;
478 	}
479 
480 	desc = ctx->sh_desc_enc;
481 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
482 				  false);
483 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
484 				   desc_bytes(desc), ctx->dir);
485 
486 	/*
487 	 * Job Descriptor and Shared Descriptors
488 	 * must all fit into the 64-word Descriptor h/w Buffer
489 	 */
490 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
491 		ctx->cdata.key_inline = true;
492 		ctx->cdata.key_virt = ctx->key;
493 	} else {
494 		ctx->cdata.key_inline = false;
495 		ctx->cdata.key_dma = ctx->key_dma;
496 	}
497 
498 	desc = ctx->sh_desc_dec;
499 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
500 				  false);
501 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
502 				   desc_bytes(desc), ctx->dir);
503 
504 	return 0;
505 }
506 
507 static int rfc4543_setauthsize(struct crypto_aead *authenc,
508 			       unsigned int authsize)
509 {
510 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
511 
512 	ctx->authsize = authsize;
513 	rfc4543_set_sh_desc(authenc);
514 
515 	return 0;
516 }
517 
518 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
519 {
520 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
521 	struct device *jrdev = ctx->jrdev;
522 	unsigned int ivsize = crypto_aead_ivsize(aead);
523 	u32 *desc;
524 
525 	if (!ctx->cdata.keylen || !ctx->authsize)
526 		return 0;
527 
528 	desc = ctx->sh_desc_enc;
529 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
530 			       ctx->authsize, true, false);
531 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
532 				   desc_bytes(desc), ctx->dir);
533 
534 	desc = ctx->sh_desc_dec;
535 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
536 			       ctx->authsize, false, false);
537 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
538 				   desc_bytes(desc), ctx->dir);
539 
540 	return 0;
541 }
542 
543 static int chachapoly_setauthsize(struct crypto_aead *aead,
544 				  unsigned int authsize)
545 {
546 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
547 
548 	if (authsize != POLY1305_DIGEST_SIZE)
549 		return -EINVAL;
550 
551 	ctx->authsize = authsize;
552 	return chachapoly_set_sh_desc(aead);
553 }
554 
555 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
556 			     unsigned int keylen)
557 {
558 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
559 	unsigned int ivsize = crypto_aead_ivsize(aead);
560 	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
561 
562 	if (keylen != CHACHA_KEY_SIZE + saltlen) {
563 		crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
564 		return -EINVAL;
565 	}
566 
567 	ctx->cdata.key_virt = key;
568 	ctx->cdata.keylen = keylen - saltlen;
569 
570 	return chachapoly_set_sh_desc(aead);
571 }
572 
573 static int aead_setkey(struct crypto_aead *aead,
574 			       const u8 *key, unsigned int keylen)
575 {
576 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
577 	struct device *jrdev = ctx->jrdev;
578 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
579 	struct crypto_authenc_keys keys;
580 	int ret = 0;
581 
582 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
583 		goto badkey;
584 
585 #ifdef DEBUG
586 	printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
587 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
588 	       keys.authkeylen);
589 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
590 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
591 #endif
592 
593 	/*
594 	 * If DKP is supported, use it in the shared descriptor to generate
595 	 * the split key.
596 	 */
597 	if (ctrlpriv->era >= 6) {
598 		ctx->adata.keylen = keys.authkeylen;
599 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
600 						      OP_ALG_ALGSEL_MASK);
601 
602 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
603 			goto badkey;
604 
605 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
606 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
607 		       keys.enckeylen);
608 		dma_sync_single_for_device(jrdev, ctx->key_dma,
609 					   ctx->adata.keylen_pad +
610 					   keys.enckeylen, ctx->dir);
611 		goto skip_split_key;
612 	}
613 
614 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
615 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
616 			    keys.enckeylen);
617 	if (ret) {
618 		goto badkey;
619 	}
620 
621 	/* postpend encryption key to auth split key */
622 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
623 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
624 				   keys.enckeylen, ctx->dir);
625 #ifdef DEBUG
626 	print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
627 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
628 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
629 #endif
630 
631 skip_split_key:
632 	ctx->cdata.keylen = keys.enckeylen;
633 	memzero_explicit(&keys, sizeof(keys));
634 	return aead_set_sh_desc(aead);
635 badkey:
636 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
637 	memzero_explicit(&keys, sizeof(keys));
638 	return -EINVAL;
639 }
640 
641 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
642 			    unsigned int keylen)
643 {
644 	struct crypto_authenc_keys keys;
645 	u32 flags;
646 	int err;
647 
648 	err = crypto_authenc_extractkeys(&keys, key, keylen);
649 	if (unlikely(err))
650 		goto badkey;
651 
652 	err = -EINVAL;
653 	if (keys.enckeylen != DES3_EDE_KEY_SIZE)
654 		goto badkey;
655 
656 	flags = crypto_aead_get_flags(aead);
657 	err = __des3_verify_key(&flags, keys.enckey);
658 	if (unlikely(err)) {
659 		crypto_aead_set_flags(aead, flags);
660 		goto out;
661 	}
662 
663 	err = aead_setkey(aead, key, keylen);
664 
665 out:
666 	memzero_explicit(&keys, sizeof(keys));
667 	return err;
668 
669 badkey:
670 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
671 	goto out;
672 }
673 
674 static int gcm_setkey(struct crypto_aead *aead,
675 		      const u8 *key, unsigned int keylen)
676 {
677 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
678 	struct device *jrdev = ctx->jrdev;
679 
680 #ifdef DEBUG
681 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
682 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
683 #endif
684 
685 	memcpy(ctx->key, key, keylen);
686 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
687 	ctx->cdata.keylen = keylen;
688 
689 	return gcm_set_sh_desc(aead);
690 }
691 
692 static int rfc4106_setkey(struct crypto_aead *aead,
693 			  const u8 *key, unsigned int keylen)
694 {
695 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
696 	struct device *jrdev = ctx->jrdev;
697 
698 	if (keylen < 4)
699 		return -EINVAL;
700 
701 #ifdef DEBUG
702 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
703 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
704 #endif
705 
706 	memcpy(ctx->key, key, keylen);
707 
708 	/*
709 	 * The last four bytes of the key material are used as the salt value
710 	 * in the nonce. Update the AES key length.
711 	 */
712 	ctx->cdata.keylen = keylen - 4;
713 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
714 				   ctx->dir);
715 	return rfc4106_set_sh_desc(aead);
716 }
717 
718 static int rfc4543_setkey(struct crypto_aead *aead,
719 			  const u8 *key, unsigned int keylen)
720 {
721 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
722 	struct device *jrdev = ctx->jrdev;
723 
724 	if (keylen < 4)
725 		return -EINVAL;
726 
727 #ifdef DEBUG
728 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
729 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
730 #endif
731 
732 	memcpy(ctx->key, key, keylen);
733 
734 	/*
735 	 * The last four bytes of the key material are used as the salt value
736 	 * in the nonce. Update the AES key length.
737 	 */
738 	ctx->cdata.keylen = keylen - 4;
739 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
740 				   ctx->dir);
741 	return rfc4543_set_sh_desc(aead);
742 }
743 
744 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
745 			   unsigned int keylen)
746 {
747 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
748 	struct caam_skcipher_alg *alg =
749 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
750 			     skcipher);
751 	struct device *jrdev = ctx->jrdev;
752 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
753 	u32 *desc;
754 	u32 ctx1_iv_off = 0;
755 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
756 			       OP_ALG_AAI_CTR_MOD128);
757 	const bool is_rfc3686 = alg->caam.rfc3686;
758 
759 #ifdef DEBUG
760 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
761 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
762 #endif
763 	/*
764 	 * AES-CTR needs to load IV in CONTEXT1 reg
765 	 * at an offset of 128bits (16bytes)
766 	 * CONTEXT1[255:128] = IV
767 	 */
768 	if (ctr_mode)
769 		ctx1_iv_off = 16;
770 
771 	/*
772 	 * RFC3686 specific:
773 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
774 	 *	| *key = {KEY, NONCE}
775 	 */
776 	if (is_rfc3686) {
777 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
778 		keylen -= CTR_RFC3686_NONCE_SIZE;
779 	}
780 
781 	ctx->cdata.keylen = keylen;
782 	ctx->cdata.key_virt = key;
783 	ctx->cdata.key_inline = true;
784 
785 	/* skcipher_encrypt shared descriptor */
786 	desc = ctx->sh_desc_enc;
787 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
788 				   ctx1_iv_off);
789 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
790 				   desc_bytes(desc), ctx->dir);
791 
792 	/* skcipher_decrypt shared descriptor */
793 	desc = ctx->sh_desc_dec;
794 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
795 				   ctx1_iv_off);
796 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
797 				   desc_bytes(desc), ctx->dir);
798 
799 	return 0;
800 }
801 
802 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
803 			       const u8 *key, unsigned int keylen)
804 {
805 	u32 tmp[DES3_EDE_EXPKEY_WORDS];
806 	struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
807 
808 	if (keylen == DES3_EDE_KEY_SIZE &&
809 	    __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) {
810 		return -EINVAL;
811 	}
812 
813 	if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) &
814 	    CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
815 		crypto_skcipher_set_flags(skcipher,
816 					  CRYPTO_TFM_RES_WEAK_KEY);
817 		return -EINVAL;
818 	}
819 
820 	return skcipher_setkey(skcipher, key, keylen);
821 }
822 
823 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
824 			       unsigned int keylen)
825 {
826 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
827 	struct device *jrdev = ctx->jrdev;
828 	u32 *desc;
829 
830 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
831 		crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
832 		dev_err(jrdev, "key size mismatch\n");
833 		return -EINVAL;
834 	}
835 
836 	ctx->cdata.keylen = keylen;
837 	ctx->cdata.key_virt = key;
838 	ctx->cdata.key_inline = true;
839 
840 	/* xts_skcipher_encrypt shared descriptor */
841 	desc = ctx->sh_desc_enc;
842 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
843 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
844 				   desc_bytes(desc), ctx->dir);
845 
846 	/* xts_skcipher_decrypt shared descriptor */
847 	desc = ctx->sh_desc_dec;
848 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
849 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
850 				   desc_bytes(desc), ctx->dir);
851 
852 	return 0;
853 }
854 
855 /*
856  * aead_edesc - s/w-extended aead descriptor
857  * @src_nents: number of segments in input s/w scatterlist
858  * @dst_nents: number of segments in output s/w scatterlist
859  * @mapped_src_nents: number of segments in input h/w link table
860  * @mapped_dst_nents: number of segments in output h/w link table
861  * @sec4_sg_bytes: length of dma mapped sec4_sg space
862  * @sec4_sg_dma: bus physical mapped address of h/w link table
863  * @sec4_sg: pointer to h/w link table
864  * @hw_desc: the h/w job descriptor followed by any referenced link tables
865  */
866 struct aead_edesc {
867 	int src_nents;
868 	int dst_nents;
869 	int mapped_src_nents;
870 	int mapped_dst_nents;
871 	int sec4_sg_bytes;
872 	dma_addr_t sec4_sg_dma;
873 	struct sec4_sg_entry *sec4_sg;
874 	u32 hw_desc[];
875 };
876 
877 /*
878  * skcipher_edesc - s/w-extended skcipher descriptor
879  * @src_nents: number of segments in input s/w scatterlist
880  * @dst_nents: number of segments in output s/w scatterlist
881  * @mapped_src_nents: number of segments in input h/w link table
882  * @mapped_dst_nents: number of segments in output h/w link table
883  * @iv_dma: dma address of iv for checking continuity and link table
884  * @sec4_sg_bytes: length of dma mapped sec4_sg space
885  * @sec4_sg_dma: bus physical mapped address of h/w link table
886  * @sec4_sg: pointer to h/w link table
887  * @hw_desc: the h/w job descriptor followed by any referenced link tables
888  *	     and IV
889  */
890 struct skcipher_edesc {
891 	int src_nents;
892 	int dst_nents;
893 	int mapped_src_nents;
894 	int mapped_dst_nents;
895 	dma_addr_t iv_dma;
896 	int sec4_sg_bytes;
897 	dma_addr_t sec4_sg_dma;
898 	struct sec4_sg_entry *sec4_sg;
899 	u32 hw_desc[0];
900 };
901 
902 static void caam_unmap(struct device *dev, struct scatterlist *src,
903 		       struct scatterlist *dst, int src_nents,
904 		       int dst_nents,
905 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
906 		       int sec4_sg_bytes)
907 {
908 	if (dst != src) {
909 		if (src_nents)
910 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
911 		if (dst_nents)
912 			dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
913 	} else {
914 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
915 	}
916 
917 	if (iv_dma)
918 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
919 	if (sec4_sg_bytes)
920 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
921 				 DMA_TO_DEVICE);
922 }
923 
924 static void aead_unmap(struct device *dev,
925 		       struct aead_edesc *edesc,
926 		       struct aead_request *req)
927 {
928 	caam_unmap(dev, req->src, req->dst,
929 		   edesc->src_nents, edesc->dst_nents, 0, 0,
930 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
931 }
932 
933 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
934 			   struct skcipher_request *req)
935 {
936 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
937 	int ivsize = crypto_skcipher_ivsize(skcipher);
938 
939 	caam_unmap(dev, req->src, req->dst,
940 		   edesc->src_nents, edesc->dst_nents,
941 		   edesc->iv_dma, ivsize,
942 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
943 }
944 
945 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
946 				   void *context)
947 {
948 	struct aead_request *req = context;
949 	struct aead_edesc *edesc;
950 
951 #ifdef DEBUG
952 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
953 #endif
954 
955 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
956 
957 	if (err)
958 		caam_jr_strstatus(jrdev, err);
959 
960 	aead_unmap(jrdev, edesc, req);
961 
962 	kfree(edesc);
963 
964 	aead_request_complete(req, err);
965 }
966 
967 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
968 				   void *context)
969 {
970 	struct aead_request *req = context;
971 	struct aead_edesc *edesc;
972 
973 #ifdef DEBUG
974 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
975 #endif
976 
977 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
978 
979 	if (err)
980 		caam_jr_strstatus(jrdev, err);
981 
982 	aead_unmap(jrdev, edesc, req);
983 
984 	/*
985 	 * verify hw auth check passed else return -EBADMSG
986 	 */
987 	if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
988 		err = -EBADMSG;
989 
990 	kfree(edesc);
991 
992 	aead_request_complete(req, err);
993 }
994 
995 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
996 				  void *context)
997 {
998 	struct skcipher_request *req = context;
999 	struct skcipher_edesc *edesc;
1000 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1001 	int ivsize = crypto_skcipher_ivsize(skcipher);
1002 
1003 #ifdef DEBUG
1004 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1005 #endif
1006 
1007 	edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
1008 
1009 	if (err)
1010 		caam_jr_strstatus(jrdev, err);
1011 
1012 #ifdef DEBUG
1013 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
1014 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1015 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
1016 #endif
1017 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
1018 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1019 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1020 
1021 	skcipher_unmap(jrdev, edesc, req);
1022 
1023 	/*
1024 	 * The crypto API expects us to set the IV (req->iv) to the last
1025 	 * ciphertext block. This is used e.g. by the CTS mode.
1026 	 */
1027 	if (ivsize)
1028 		scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen -
1029 					 ivsize, ivsize, 0);
1030 
1031 	kfree(edesc);
1032 
1033 	skcipher_request_complete(req, err);
1034 }
1035 
1036 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1037 				  void *context)
1038 {
1039 	struct skcipher_request *req = context;
1040 	struct skcipher_edesc *edesc;
1041 #ifdef DEBUG
1042 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1043 	int ivsize = crypto_skcipher_ivsize(skcipher);
1044 
1045 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1046 #endif
1047 
1048 	edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
1049 	if (err)
1050 		caam_jr_strstatus(jrdev, err);
1051 
1052 #ifdef DEBUG
1053 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
1054 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1055 #endif
1056 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
1057 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1058 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1059 
1060 	skcipher_unmap(jrdev, edesc, req);
1061 	kfree(edesc);
1062 
1063 	skcipher_request_complete(req, err);
1064 }
1065 
1066 /*
1067  * Fill in aead job descriptor
1068  */
1069 static void init_aead_job(struct aead_request *req,
1070 			  struct aead_edesc *edesc,
1071 			  bool all_contig, bool encrypt)
1072 {
1073 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1074 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1075 	int authsize = ctx->authsize;
1076 	u32 *desc = edesc->hw_desc;
1077 	u32 out_options, in_options;
1078 	dma_addr_t dst_dma, src_dma;
1079 	int len, sec4_sg_index = 0;
1080 	dma_addr_t ptr;
1081 	u32 *sh_desc;
1082 
1083 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1084 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1085 
1086 	len = desc_len(sh_desc);
1087 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1088 
1089 	if (all_contig) {
1090 		src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1091 						    0;
1092 		in_options = 0;
1093 	} else {
1094 		src_dma = edesc->sec4_sg_dma;
1095 		sec4_sg_index += edesc->mapped_src_nents;
1096 		in_options = LDST_SGF;
1097 	}
1098 
1099 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1100 			  in_options);
1101 
1102 	dst_dma = src_dma;
1103 	out_options = in_options;
1104 
1105 	if (unlikely(req->src != req->dst)) {
1106 		if (!edesc->mapped_dst_nents) {
1107 			dst_dma = 0;
1108 		} else if (edesc->mapped_dst_nents == 1) {
1109 			dst_dma = sg_dma_address(req->dst);
1110 			out_options = 0;
1111 		} else {
1112 			dst_dma = edesc->sec4_sg_dma +
1113 				  sec4_sg_index *
1114 				  sizeof(struct sec4_sg_entry);
1115 			out_options = LDST_SGF;
1116 		}
1117 	}
1118 
1119 	if (encrypt)
1120 		append_seq_out_ptr(desc, dst_dma,
1121 				   req->assoclen + req->cryptlen + authsize,
1122 				   out_options);
1123 	else
1124 		append_seq_out_ptr(desc, dst_dma,
1125 				   req->assoclen + req->cryptlen - authsize,
1126 				   out_options);
1127 }
1128 
1129 static void init_gcm_job(struct aead_request *req,
1130 			 struct aead_edesc *edesc,
1131 			 bool all_contig, bool encrypt)
1132 {
1133 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1134 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1135 	unsigned int ivsize = crypto_aead_ivsize(aead);
1136 	u32 *desc = edesc->hw_desc;
1137 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1138 	unsigned int last;
1139 
1140 	init_aead_job(req, edesc, all_contig, encrypt);
1141 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1142 
1143 	/* BUG This should not be specific to generic GCM. */
1144 	last = 0;
1145 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1146 		last = FIFOLD_TYPE_LAST1;
1147 
1148 	/* Read GCM IV */
1149 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1150 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1151 	/* Append Salt */
1152 	if (!generic_gcm)
1153 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1154 	/* Append IV */
1155 	append_data(desc, req->iv, ivsize);
1156 	/* End of blank commands */
1157 }
1158 
1159 static void init_chachapoly_job(struct aead_request *req,
1160 				struct aead_edesc *edesc, bool all_contig,
1161 				bool encrypt)
1162 {
1163 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1164 	unsigned int ivsize = crypto_aead_ivsize(aead);
1165 	unsigned int assoclen = req->assoclen;
1166 	u32 *desc = edesc->hw_desc;
1167 	u32 ctx_iv_off = 4;
1168 
1169 	init_aead_job(req, edesc, all_contig, encrypt);
1170 
1171 	if (ivsize != CHACHAPOLY_IV_SIZE) {
1172 		/* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1173 		ctx_iv_off += 4;
1174 
1175 		/*
1176 		 * The associated data comes already with the IV but we need
1177 		 * to skip it when we authenticate or encrypt...
1178 		 */
1179 		assoclen -= ivsize;
1180 	}
1181 
1182 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1183 
1184 	/*
1185 	 * For IPsec load the IV further in the same register.
1186 	 * For RFC7539 simply load the 12 bytes nonce in a single operation
1187 	 */
1188 	append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1189 			   LDST_SRCDST_BYTE_CONTEXT |
1190 			   ctx_iv_off << LDST_OFFSET_SHIFT);
1191 }
1192 
1193 static void init_authenc_job(struct aead_request *req,
1194 			     struct aead_edesc *edesc,
1195 			     bool all_contig, bool encrypt)
1196 {
1197 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1198 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1199 						 struct caam_aead_alg, aead);
1200 	unsigned int ivsize = crypto_aead_ivsize(aead);
1201 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1202 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1203 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1204 			       OP_ALG_AAI_CTR_MOD128);
1205 	const bool is_rfc3686 = alg->caam.rfc3686;
1206 	u32 *desc = edesc->hw_desc;
1207 	u32 ivoffset = 0;
1208 
1209 	/*
1210 	 * AES-CTR needs to load IV in CONTEXT1 reg
1211 	 * at an offset of 128bits (16bytes)
1212 	 * CONTEXT1[255:128] = IV
1213 	 */
1214 	if (ctr_mode)
1215 		ivoffset = 16;
1216 
1217 	/*
1218 	 * RFC3686 specific:
1219 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1220 	 */
1221 	if (is_rfc3686)
1222 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1223 
1224 	init_aead_job(req, edesc, all_contig, encrypt);
1225 
1226 	/*
1227 	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1228 	 * having DPOVRD as destination.
1229 	 */
1230 	if (ctrlpriv->era < 3)
1231 		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1232 	else
1233 		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1234 
1235 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1236 		append_load_as_imm(desc, req->iv, ivsize,
1237 				   LDST_CLASS_1_CCB |
1238 				   LDST_SRCDST_BYTE_CONTEXT |
1239 				   (ivoffset << LDST_OFFSET_SHIFT));
1240 }
1241 
1242 /*
1243  * Fill in skcipher job descriptor
1244  */
1245 static void init_skcipher_job(struct skcipher_request *req,
1246 			      struct skcipher_edesc *edesc,
1247 			      const bool encrypt)
1248 {
1249 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1250 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1251 	int ivsize = crypto_skcipher_ivsize(skcipher);
1252 	u32 *desc = edesc->hw_desc;
1253 	u32 *sh_desc;
1254 	u32 in_options = 0, out_options = 0;
1255 	dma_addr_t src_dma, dst_dma, ptr;
1256 	int len, sec4_sg_index = 0;
1257 
1258 #ifdef DEBUG
1259 	print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1260 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1261 	pr_err("asked=%d, cryptlen%d\n",
1262 	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1263 #endif
1264 	caam_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__)": ",
1265 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1266 		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1267 
1268 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1269 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1270 
1271 	len = desc_len(sh_desc);
1272 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1273 
1274 	if (ivsize || edesc->mapped_src_nents > 1) {
1275 		src_dma = edesc->sec4_sg_dma;
1276 		sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1277 		in_options = LDST_SGF;
1278 	} else {
1279 		src_dma = sg_dma_address(req->src);
1280 	}
1281 
1282 	append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1283 
1284 	if (likely(req->src == req->dst)) {
1285 		dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1286 		out_options = in_options;
1287 	} else if (edesc->mapped_dst_nents == 1) {
1288 		dst_dma = sg_dma_address(req->dst);
1289 	} else {
1290 		dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1291 			  sizeof(struct sec4_sg_entry);
1292 		out_options = LDST_SGF;
1293 	}
1294 
1295 	append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1296 }
1297 
1298 /*
1299  * allocate and map the aead extended descriptor
1300  */
1301 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1302 					   int desc_bytes, bool *all_contig_ptr,
1303 					   bool encrypt)
1304 {
1305 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1306 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1307 	struct device *jrdev = ctx->jrdev;
1308 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1309 		       GFP_KERNEL : GFP_ATOMIC;
1310 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1311 	struct aead_edesc *edesc;
1312 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1313 	unsigned int authsize = ctx->authsize;
1314 
1315 	if (unlikely(req->dst != req->src)) {
1316 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1317 					     req->cryptlen);
1318 		if (unlikely(src_nents < 0)) {
1319 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1320 				req->assoclen + req->cryptlen);
1321 			return ERR_PTR(src_nents);
1322 		}
1323 
1324 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1325 					     req->cryptlen +
1326 						(encrypt ? authsize :
1327 							   (-authsize)));
1328 		if (unlikely(dst_nents < 0)) {
1329 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1330 				req->assoclen + req->cryptlen +
1331 				(encrypt ? authsize : (-authsize)));
1332 			return ERR_PTR(dst_nents);
1333 		}
1334 	} else {
1335 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1336 					     req->cryptlen +
1337 					     (encrypt ? authsize : 0));
1338 		if (unlikely(src_nents < 0)) {
1339 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1340 				req->assoclen + req->cryptlen +
1341 				(encrypt ? authsize : 0));
1342 			return ERR_PTR(src_nents);
1343 		}
1344 	}
1345 
1346 	if (likely(req->src == req->dst)) {
1347 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1348 					      DMA_BIDIRECTIONAL);
1349 		if (unlikely(!mapped_src_nents)) {
1350 			dev_err(jrdev, "unable to map source\n");
1351 			return ERR_PTR(-ENOMEM);
1352 		}
1353 	} else {
1354 		/* Cover also the case of null (zero length) input data */
1355 		if (src_nents) {
1356 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1357 						      src_nents, DMA_TO_DEVICE);
1358 			if (unlikely(!mapped_src_nents)) {
1359 				dev_err(jrdev, "unable to map source\n");
1360 				return ERR_PTR(-ENOMEM);
1361 			}
1362 		} else {
1363 			mapped_src_nents = 0;
1364 		}
1365 
1366 		/* Cover also the case of null (zero length) output data */
1367 		if (dst_nents) {
1368 			mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1369 						      dst_nents,
1370 						      DMA_FROM_DEVICE);
1371 			if (unlikely(!mapped_dst_nents)) {
1372 				dev_err(jrdev, "unable to map destination\n");
1373 				dma_unmap_sg(jrdev, req->src, src_nents,
1374 					     DMA_TO_DEVICE);
1375 				return ERR_PTR(-ENOMEM);
1376 			}
1377 		} else {
1378 			mapped_dst_nents = 0;
1379 		}
1380 	}
1381 
1382 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1383 	sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1384 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1385 
1386 	/* allocate space for base edesc and hw desc commands, link tables */
1387 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1388 			GFP_DMA | flags);
1389 	if (!edesc) {
1390 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1391 			   0, 0, 0);
1392 		return ERR_PTR(-ENOMEM);
1393 	}
1394 
1395 	edesc->src_nents = src_nents;
1396 	edesc->dst_nents = dst_nents;
1397 	edesc->mapped_src_nents = mapped_src_nents;
1398 	edesc->mapped_dst_nents = mapped_dst_nents;
1399 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1400 			 desc_bytes;
1401 	*all_contig_ptr = !(mapped_src_nents > 1);
1402 
1403 	sec4_sg_index = 0;
1404 	if (mapped_src_nents > 1) {
1405 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1406 				   edesc->sec4_sg + sec4_sg_index, 0);
1407 		sec4_sg_index += mapped_src_nents;
1408 	}
1409 	if (mapped_dst_nents > 1) {
1410 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1411 				   edesc->sec4_sg + sec4_sg_index, 0);
1412 	}
1413 
1414 	if (!sec4_sg_bytes)
1415 		return edesc;
1416 
1417 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1418 					    sec4_sg_bytes, DMA_TO_DEVICE);
1419 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1420 		dev_err(jrdev, "unable to map S/G table\n");
1421 		aead_unmap(jrdev, edesc, req);
1422 		kfree(edesc);
1423 		return ERR_PTR(-ENOMEM);
1424 	}
1425 
1426 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1427 
1428 	return edesc;
1429 }
1430 
1431 static int gcm_encrypt(struct aead_request *req)
1432 {
1433 	struct aead_edesc *edesc;
1434 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1435 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1436 	struct device *jrdev = ctx->jrdev;
1437 	bool all_contig;
1438 	u32 *desc;
1439 	int ret = 0;
1440 
1441 	/* allocate extended descriptor */
1442 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1443 	if (IS_ERR(edesc))
1444 		return PTR_ERR(edesc);
1445 
1446 	/* Create and submit job descriptor */
1447 	init_gcm_job(req, edesc, all_contig, true);
1448 #ifdef DEBUG
1449 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1450 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1451 		       desc_bytes(edesc->hw_desc), 1);
1452 #endif
1453 
1454 	desc = edesc->hw_desc;
1455 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1456 	if (!ret) {
1457 		ret = -EINPROGRESS;
1458 	} else {
1459 		aead_unmap(jrdev, edesc, req);
1460 		kfree(edesc);
1461 	}
1462 
1463 	return ret;
1464 }
1465 
1466 static int chachapoly_encrypt(struct aead_request *req)
1467 {
1468 	struct aead_edesc *edesc;
1469 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1470 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1471 	struct device *jrdev = ctx->jrdev;
1472 	bool all_contig;
1473 	u32 *desc;
1474 	int ret;
1475 
1476 	edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1477 				 true);
1478 	if (IS_ERR(edesc))
1479 		return PTR_ERR(edesc);
1480 
1481 	desc = edesc->hw_desc;
1482 
1483 	init_chachapoly_job(req, edesc, all_contig, true);
1484 	print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1485 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1486 			     1);
1487 
1488 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1489 	if (!ret) {
1490 		ret = -EINPROGRESS;
1491 	} else {
1492 		aead_unmap(jrdev, edesc, req);
1493 		kfree(edesc);
1494 	}
1495 
1496 	return ret;
1497 }
1498 
1499 static int chachapoly_decrypt(struct aead_request *req)
1500 {
1501 	struct aead_edesc *edesc;
1502 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1503 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1504 	struct device *jrdev = ctx->jrdev;
1505 	bool all_contig;
1506 	u32 *desc;
1507 	int ret;
1508 
1509 	edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1510 				 false);
1511 	if (IS_ERR(edesc))
1512 		return PTR_ERR(edesc);
1513 
1514 	desc = edesc->hw_desc;
1515 
1516 	init_chachapoly_job(req, edesc, all_contig, false);
1517 	print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1518 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1519 			     1);
1520 
1521 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1522 	if (!ret) {
1523 		ret = -EINPROGRESS;
1524 	} else {
1525 		aead_unmap(jrdev, edesc, req);
1526 		kfree(edesc);
1527 	}
1528 
1529 	return ret;
1530 }
1531 
1532 static int ipsec_gcm_encrypt(struct aead_request *req)
1533 {
1534 	if (req->assoclen < 8)
1535 		return -EINVAL;
1536 
1537 	return gcm_encrypt(req);
1538 }
1539 
1540 static int aead_encrypt(struct aead_request *req)
1541 {
1542 	struct aead_edesc *edesc;
1543 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1544 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1545 	struct device *jrdev = ctx->jrdev;
1546 	bool all_contig;
1547 	u32 *desc;
1548 	int ret = 0;
1549 
1550 	/* allocate extended descriptor */
1551 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1552 				 &all_contig, true);
1553 	if (IS_ERR(edesc))
1554 		return PTR_ERR(edesc);
1555 
1556 	/* Create and submit job descriptor */
1557 	init_authenc_job(req, edesc, all_contig, true);
1558 #ifdef DEBUG
1559 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1560 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 		       desc_bytes(edesc->hw_desc), 1);
1562 #endif
1563 
1564 	desc = edesc->hw_desc;
1565 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1566 	if (!ret) {
1567 		ret = -EINPROGRESS;
1568 	} else {
1569 		aead_unmap(jrdev, edesc, req);
1570 		kfree(edesc);
1571 	}
1572 
1573 	return ret;
1574 }
1575 
1576 static int gcm_decrypt(struct aead_request *req)
1577 {
1578 	struct aead_edesc *edesc;
1579 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1580 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1581 	struct device *jrdev = ctx->jrdev;
1582 	bool all_contig;
1583 	u32 *desc;
1584 	int ret = 0;
1585 
1586 	/* allocate extended descriptor */
1587 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1588 	if (IS_ERR(edesc))
1589 		return PTR_ERR(edesc);
1590 
1591 	/* Create and submit job descriptor*/
1592 	init_gcm_job(req, edesc, all_contig, false);
1593 #ifdef DEBUG
1594 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1595 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1596 		       desc_bytes(edesc->hw_desc), 1);
1597 #endif
1598 
1599 	desc = edesc->hw_desc;
1600 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1601 	if (!ret) {
1602 		ret = -EINPROGRESS;
1603 	} else {
1604 		aead_unmap(jrdev, edesc, req);
1605 		kfree(edesc);
1606 	}
1607 
1608 	return ret;
1609 }
1610 
1611 static int ipsec_gcm_decrypt(struct aead_request *req)
1612 {
1613 	if (req->assoclen < 8)
1614 		return -EINVAL;
1615 
1616 	return gcm_decrypt(req);
1617 }
1618 
1619 static int aead_decrypt(struct aead_request *req)
1620 {
1621 	struct aead_edesc *edesc;
1622 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1623 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1624 	struct device *jrdev = ctx->jrdev;
1625 	bool all_contig;
1626 	u32 *desc;
1627 	int ret = 0;
1628 
1629 	caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1630 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1631 		     req->assoclen + req->cryptlen, 1);
1632 
1633 	/* allocate extended descriptor */
1634 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1635 				 &all_contig, false);
1636 	if (IS_ERR(edesc))
1637 		return PTR_ERR(edesc);
1638 
1639 	/* Create and submit job descriptor*/
1640 	init_authenc_job(req, edesc, all_contig, false);
1641 #ifdef DEBUG
1642 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1643 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1644 		       desc_bytes(edesc->hw_desc), 1);
1645 #endif
1646 
1647 	desc = edesc->hw_desc;
1648 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1649 	if (!ret) {
1650 		ret = -EINPROGRESS;
1651 	} else {
1652 		aead_unmap(jrdev, edesc, req);
1653 		kfree(edesc);
1654 	}
1655 
1656 	return ret;
1657 }
1658 
1659 /*
1660  * allocate and map the skcipher extended descriptor for skcipher
1661  */
1662 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1663 						   int desc_bytes)
1664 {
1665 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1666 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1667 	struct device *jrdev = ctx->jrdev;
1668 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1669 		       GFP_KERNEL : GFP_ATOMIC;
1670 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1671 	struct skcipher_edesc *edesc;
1672 	dma_addr_t iv_dma = 0;
1673 	u8 *iv;
1674 	int ivsize = crypto_skcipher_ivsize(skcipher);
1675 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1676 
1677 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1678 	if (unlikely(src_nents < 0)) {
1679 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1680 			req->cryptlen);
1681 		return ERR_PTR(src_nents);
1682 	}
1683 
1684 	if (req->dst != req->src) {
1685 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1686 		if (unlikely(dst_nents < 0)) {
1687 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1688 				req->cryptlen);
1689 			return ERR_PTR(dst_nents);
1690 		}
1691 	}
1692 
1693 	if (likely(req->src == req->dst)) {
1694 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1695 					      DMA_BIDIRECTIONAL);
1696 		if (unlikely(!mapped_src_nents)) {
1697 			dev_err(jrdev, "unable to map source\n");
1698 			return ERR_PTR(-ENOMEM);
1699 		}
1700 	} else {
1701 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1702 					      DMA_TO_DEVICE);
1703 		if (unlikely(!mapped_src_nents)) {
1704 			dev_err(jrdev, "unable to map source\n");
1705 			return ERR_PTR(-ENOMEM);
1706 		}
1707 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1708 					      DMA_FROM_DEVICE);
1709 		if (unlikely(!mapped_dst_nents)) {
1710 			dev_err(jrdev, "unable to map destination\n");
1711 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1712 			return ERR_PTR(-ENOMEM);
1713 		}
1714 	}
1715 
1716 	if (!ivsize && mapped_src_nents == 1)
1717 		sec4_sg_ents = 0; // no need for an input hw s/g table
1718 	else
1719 		sec4_sg_ents = mapped_src_nents + !!ivsize;
1720 	dst_sg_idx = sec4_sg_ents;
1721 	sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1722 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1723 
1724 	/*
1725 	 * allocate space for base edesc and hw desc commands, link tables, IV
1726 	 */
1727 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1728 			GFP_DMA | flags);
1729 	if (!edesc) {
1730 		dev_err(jrdev, "could not allocate extended descriptor\n");
1731 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1732 			   0, 0, 0);
1733 		return ERR_PTR(-ENOMEM);
1734 	}
1735 
1736 	edesc->src_nents = src_nents;
1737 	edesc->dst_nents = dst_nents;
1738 	edesc->mapped_src_nents = mapped_src_nents;
1739 	edesc->mapped_dst_nents = mapped_dst_nents;
1740 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1741 	edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1742 						  desc_bytes);
1743 
1744 	/* Make sure IV is located in a DMAable area */
1745 	if (ivsize) {
1746 		iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1747 		memcpy(iv, req->iv, ivsize);
1748 
1749 		iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1750 		if (dma_mapping_error(jrdev, iv_dma)) {
1751 			dev_err(jrdev, "unable to map IV\n");
1752 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1753 				   dst_nents, 0, 0, 0, 0);
1754 			kfree(edesc);
1755 			return ERR_PTR(-ENOMEM);
1756 		}
1757 
1758 		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1759 	}
1760 	if (dst_sg_idx)
1761 		sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg +
1762 				   !!ivsize, 0);
1763 
1764 	if (mapped_dst_nents > 1) {
1765 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1766 				   edesc->sec4_sg + dst_sg_idx, 0);
1767 	}
1768 
1769 	if (sec4_sg_bytes) {
1770 		edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1771 						    sec4_sg_bytes,
1772 						    DMA_TO_DEVICE);
1773 		if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1774 			dev_err(jrdev, "unable to map S/G table\n");
1775 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1776 				   dst_nents, iv_dma, ivsize, 0, 0);
1777 			kfree(edesc);
1778 			return ERR_PTR(-ENOMEM);
1779 		}
1780 	}
1781 
1782 	edesc->iv_dma = iv_dma;
1783 
1784 #ifdef DEBUG
1785 	print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
1786 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1787 		       sec4_sg_bytes, 1);
1788 #endif
1789 
1790 	return edesc;
1791 }
1792 
1793 static int skcipher_encrypt(struct skcipher_request *req)
1794 {
1795 	struct skcipher_edesc *edesc;
1796 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1797 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1798 	struct device *jrdev = ctx->jrdev;
1799 	u32 *desc;
1800 	int ret = 0;
1801 
1802 	/* allocate extended descriptor */
1803 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1804 	if (IS_ERR(edesc))
1805 		return PTR_ERR(edesc);
1806 
1807 	/* Create and submit job descriptor*/
1808 	init_skcipher_job(req, edesc, true);
1809 #ifdef DEBUG
1810 	print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1811 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1812 		       desc_bytes(edesc->hw_desc), 1);
1813 #endif
1814 	desc = edesc->hw_desc;
1815 	ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
1816 
1817 	if (!ret) {
1818 		ret = -EINPROGRESS;
1819 	} else {
1820 		skcipher_unmap(jrdev, edesc, req);
1821 		kfree(edesc);
1822 	}
1823 
1824 	return ret;
1825 }
1826 
1827 static int skcipher_decrypt(struct skcipher_request *req)
1828 {
1829 	struct skcipher_edesc *edesc;
1830 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1831 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1832 	int ivsize = crypto_skcipher_ivsize(skcipher);
1833 	struct device *jrdev = ctx->jrdev;
1834 	u32 *desc;
1835 	int ret = 0;
1836 
1837 	/* allocate extended descriptor */
1838 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1839 	if (IS_ERR(edesc))
1840 		return PTR_ERR(edesc);
1841 
1842 	/*
1843 	 * The crypto API expects us to set the IV (req->iv) to the last
1844 	 * ciphertext block.
1845 	 */
1846 	if (ivsize)
1847 		scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen -
1848 					 ivsize, ivsize, 0);
1849 
1850 	/* Create and submit job descriptor*/
1851 	init_skcipher_job(req, edesc, false);
1852 	desc = edesc->hw_desc;
1853 #ifdef DEBUG
1854 	print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1855 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1856 		       desc_bytes(edesc->hw_desc), 1);
1857 #endif
1858 
1859 	ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
1860 	if (!ret) {
1861 		ret = -EINPROGRESS;
1862 	} else {
1863 		skcipher_unmap(jrdev, edesc, req);
1864 		kfree(edesc);
1865 	}
1866 
1867 	return ret;
1868 }
1869 
1870 static struct caam_skcipher_alg driver_algs[] = {
1871 	{
1872 		.skcipher = {
1873 			.base = {
1874 				.cra_name = "cbc(aes)",
1875 				.cra_driver_name = "cbc-aes-caam",
1876 				.cra_blocksize = AES_BLOCK_SIZE,
1877 			},
1878 			.setkey = skcipher_setkey,
1879 			.encrypt = skcipher_encrypt,
1880 			.decrypt = skcipher_decrypt,
1881 			.min_keysize = AES_MIN_KEY_SIZE,
1882 			.max_keysize = AES_MAX_KEY_SIZE,
1883 			.ivsize = AES_BLOCK_SIZE,
1884 		},
1885 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1886 	},
1887 	{
1888 		.skcipher = {
1889 			.base = {
1890 				.cra_name = "cbc(des3_ede)",
1891 				.cra_driver_name = "cbc-3des-caam",
1892 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1893 			},
1894 			.setkey = des_skcipher_setkey,
1895 			.encrypt = skcipher_encrypt,
1896 			.decrypt = skcipher_decrypt,
1897 			.min_keysize = DES3_EDE_KEY_SIZE,
1898 			.max_keysize = DES3_EDE_KEY_SIZE,
1899 			.ivsize = DES3_EDE_BLOCK_SIZE,
1900 		},
1901 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1902 	},
1903 	{
1904 		.skcipher = {
1905 			.base = {
1906 				.cra_name = "cbc(des)",
1907 				.cra_driver_name = "cbc-des-caam",
1908 				.cra_blocksize = DES_BLOCK_SIZE,
1909 			},
1910 			.setkey = des_skcipher_setkey,
1911 			.encrypt = skcipher_encrypt,
1912 			.decrypt = skcipher_decrypt,
1913 			.min_keysize = DES_KEY_SIZE,
1914 			.max_keysize = DES_KEY_SIZE,
1915 			.ivsize = DES_BLOCK_SIZE,
1916 		},
1917 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1918 	},
1919 	{
1920 		.skcipher = {
1921 			.base = {
1922 				.cra_name = "ctr(aes)",
1923 				.cra_driver_name = "ctr-aes-caam",
1924 				.cra_blocksize = 1,
1925 			},
1926 			.setkey = skcipher_setkey,
1927 			.encrypt = skcipher_encrypt,
1928 			.decrypt = skcipher_decrypt,
1929 			.min_keysize = AES_MIN_KEY_SIZE,
1930 			.max_keysize = AES_MAX_KEY_SIZE,
1931 			.ivsize = AES_BLOCK_SIZE,
1932 			.chunksize = AES_BLOCK_SIZE,
1933 		},
1934 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1935 					OP_ALG_AAI_CTR_MOD128,
1936 	},
1937 	{
1938 		.skcipher = {
1939 			.base = {
1940 				.cra_name = "rfc3686(ctr(aes))",
1941 				.cra_driver_name = "rfc3686-ctr-aes-caam",
1942 				.cra_blocksize = 1,
1943 			},
1944 			.setkey = skcipher_setkey,
1945 			.encrypt = skcipher_encrypt,
1946 			.decrypt = skcipher_decrypt,
1947 			.min_keysize = AES_MIN_KEY_SIZE +
1948 				       CTR_RFC3686_NONCE_SIZE,
1949 			.max_keysize = AES_MAX_KEY_SIZE +
1950 				       CTR_RFC3686_NONCE_SIZE,
1951 			.ivsize = CTR_RFC3686_IV_SIZE,
1952 			.chunksize = AES_BLOCK_SIZE,
1953 		},
1954 		.caam = {
1955 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1956 					   OP_ALG_AAI_CTR_MOD128,
1957 			.rfc3686 = true,
1958 		},
1959 	},
1960 	{
1961 		.skcipher = {
1962 			.base = {
1963 				.cra_name = "xts(aes)",
1964 				.cra_driver_name = "xts-aes-caam",
1965 				.cra_blocksize = AES_BLOCK_SIZE,
1966 			},
1967 			.setkey = xts_skcipher_setkey,
1968 			.encrypt = skcipher_encrypt,
1969 			.decrypt = skcipher_decrypt,
1970 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1971 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1972 			.ivsize = AES_BLOCK_SIZE,
1973 		},
1974 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1975 	},
1976 	{
1977 		.skcipher = {
1978 			.base = {
1979 				.cra_name = "ecb(des)",
1980 				.cra_driver_name = "ecb-des-caam",
1981 				.cra_blocksize = DES_BLOCK_SIZE,
1982 			},
1983 			.setkey = des_skcipher_setkey,
1984 			.encrypt = skcipher_encrypt,
1985 			.decrypt = skcipher_decrypt,
1986 			.min_keysize = DES_KEY_SIZE,
1987 			.max_keysize = DES_KEY_SIZE,
1988 		},
1989 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1990 	},
1991 	{
1992 		.skcipher = {
1993 			.base = {
1994 				.cra_name = "ecb(aes)",
1995 				.cra_driver_name = "ecb-aes-caam",
1996 				.cra_blocksize = AES_BLOCK_SIZE,
1997 			},
1998 			.setkey = skcipher_setkey,
1999 			.encrypt = skcipher_encrypt,
2000 			.decrypt = skcipher_decrypt,
2001 			.min_keysize = AES_MIN_KEY_SIZE,
2002 			.max_keysize = AES_MAX_KEY_SIZE,
2003 		},
2004 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2005 	},
2006 	{
2007 		.skcipher = {
2008 			.base = {
2009 				.cra_name = "ecb(des3_ede)",
2010 				.cra_driver_name = "ecb-des3-caam",
2011 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2012 			},
2013 			.setkey = des_skcipher_setkey,
2014 			.encrypt = skcipher_encrypt,
2015 			.decrypt = skcipher_decrypt,
2016 			.min_keysize = DES3_EDE_KEY_SIZE,
2017 			.max_keysize = DES3_EDE_KEY_SIZE,
2018 		},
2019 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2020 	},
2021 	{
2022 		.skcipher = {
2023 			.base = {
2024 				.cra_name = "ecb(arc4)",
2025 				.cra_driver_name = "ecb-arc4-caam",
2026 				.cra_blocksize = ARC4_BLOCK_SIZE,
2027 			},
2028 			.setkey = skcipher_setkey,
2029 			.encrypt = skcipher_encrypt,
2030 			.decrypt = skcipher_decrypt,
2031 			.min_keysize = ARC4_MIN_KEY_SIZE,
2032 			.max_keysize = ARC4_MAX_KEY_SIZE,
2033 		},
2034 		.caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
2035 	},
2036 };
2037 
2038 static struct caam_aead_alg driver_aeads[] = {
2039 	{
2040 		.aead = {
2041 			.base = {
2042 				.cra_name = "rfc4106(gcm(aes))",
2043 				.cra_driver_name = "rfc4106-gcm-aes-caam",
2044 				.cra_blocksize = 1,
2045 			},
2046 			.setkey = rfc4106_setkey,
2047 			.setauthsize = rfc4106_setauthsize,
2048 			.encrypt = ipsec_gcm_encrypt,
2049 			.decrypt = ipsec_gcm_decrypt,
2050 			.ivsize = GCM_RFC4106_IV_SIZE,
2051 			.maxauthsize = AES_BLOCK_SIZE,
2052 		},
2053 		.caam = {
2054 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2055 		},
2056 	},
2057 	{
2058 		.aead = {
2059 			.base = {
2060 				.cra_name = "rfc4543(gcm(aes))",
2061 				.cra_driver_name = "rfc4543-gcm-aes-caam",
2062 				.cra_blocksize = 1,
2063 			},
2064 			.setkey = rfc4543_setkey,
2065 			.setauthsize = rfc4543_setauthsize,
2066 			.encrypt = ipsec_gcm_encrypt,
2067 			.decrypt = ipsec_gcm_decrypt,
2068 			.ivsize = GCM_RFC4543_IV_SIZE,
2069 			.maxauthsize = AES_BLOCK_SIZE,
2070 		},
2071 		.caam = {
2072 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2073 		},
2074 	},
2075 	/* Galois Counter Mode */
2076 	{
2077 		.aead = {
2078 			.base = {
2079 				.cra_name = "gcm(aes)",
2080 				.cra_driver_name = "gcm-aes-caam",
2081 				.cra_blocksize = 1,
2082 			},
2083 			.setkey = gcm_setkey,
2084 			.setauthsize = gcm_setauthsize,
2085 			.encrypt = gcm_encrypt,
2086 			.decrypt = gcm_decrypt,
2087 			.ivsize = GCM_AES_IV_SIZE,
2088 			.maxauthsize = AES_BLOCK_SIZE,
2089 		},
2090 		.caam = {
2091 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2092 		},
2093 	},
2094 	/* single-pass ipsec_esp descriptor */
2095 	{
2096 		.aead = {
2097 			.base = {
2098 				.cra_name = "authenc(hmac(md5),"
2099 					    "ecb(cipher_null))",
2100 				.cra_driver_name = "authenc-hmac-md5-"
2101 						   "ecb-cipher_null-caam",
2102 				.cra_blocksize = NULL_BLOCK_SIZE,
2103 			},
2104 			.setkey = aead_setkey,
2105 			.setauthsize = aead_setauthsize,
2106 			.encrypt = aead_encrypt,
2107 			.decrypt = aead_decrypt,
2108 			.ivsize = NULL_IV_SIZE,
2109 			.maxauthsize = MD5_DIGEST_SIZE,
2110 		},
2111 		.caam = {
2112 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2113 					   OP_ALG_AAI_HMAC_PRECOMP,
2114 		},
2115 	},
2116 	{
2117 		.aead = {
2118 			.base = {
2119 				.cra_name = "authenc(hmac(sha1),"
2120 					    "ecb(cipher_null))",
2121 				.cra_driver_name = "authenc-hmac-sha1-"
2122 						   "ecb-cipher_null-caam",
2123 				.cra_blocksize = NULL_BLOCK_SIZE,
2124 			},
2125 			.setkey = aead_setkey,
2126 			.setauthsize = aead_setauthsize,
2127 			.encrypt = aead_encrypt,
2128 			.decrypt = aead_decrypt,
2129 			.ivsize = NULL_IV_SIZE,
2130 			.maxauthsize = SHA1_DIGEST_SIZE,
2131 		},
2132 		.caam = {
2133 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2134 					   OP_ALG_AAI_HMAC_PRECOMP,
2135 		},
2136 	},
2137 	{
2138 		.aead = {
2139 			.base = {
2140 				.cra_name = "authenc(hmac(sha224),"
2141 					    "ecb(cipher_null))",
2142 				.cra_driver_name = "authenc-hmac-sha224-"
2143 						   "ecb-cipher_null-caam",
2144 				.cra_blocksize = NULL_BLOCK_SIZE,
2145 			},
2146 			.setkey = aead_setkey,
2147 			.setauthsize = aead_setauthsize,
2148 			.encrypt = aead_encrypt,
2149 			.decrypt = aead_decrypt,
2150 			.ivsize = NULL_IV_SIZE,
2151 			.maxauthsize = SHA224_DIGEST_SIZE,
2152 		},
2153 		.caam = {
2154 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2155 					   OP_ALG_AAI_HMAC_PRECOMP,
2156 		},
2157 	},
2158 	{
2159 		.aead = {
2160 			.base = {
2161 				.cra_name = "authenc(hmac(sha256),"
2162 					    "ecb(cipher_null))",
2163 				.cra_driver_name = "authenc-hmac-sha256-"
2164 						   "ecb-cipher_null-caam",
2165 				.cra_blocksize = NULL_BLOCK_SIZE,
2166 			},
2167 			.setkey = aead_setkey,
2168 			.setauthsize = aead_setauthsize,
2169 			.encrypt = aead_encrypt,
2170 			.decrypt = aead_decrypt,
2171 			.ivsize = NULL_IV_SIZE,
2172 			.maxauthsize = SHA256_DIGEST_SIZE,
2173 		},
2174 		.caam = {
2175 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2176 					   OP_ALG_AAI_HMAC_PRECOMP,
2177 		},
2178 	},
2179 	{
2180 		.aead = {
2181 			.base = {
2182 				.cra_name = "authenc(hmac(sha384),"
2183 					    "ecb(cipher_null))",
2184 				.cra_driver_name = "authenc-hmac-sha384-"
2185 						   "ecb-cipher_null-caam",
2186 				.cra_blocksize = NULL_BLOCK_SIZE,
2187 			},
2188 			.setkey = aead_setkey,
2189 			.setauthsize = aead_setauthsize,
2190 			.encrypt = aead_encrypt,
2191 			.decrypt = aead_decrypt,
2192 			.ivsize = NULL_IV_SIZE,
2193 			.maxauthsize = SHA384_DIGEST_SIZE,
2194 		},
2195 		.caam = {
2196 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2197 					   OP_ALG_AAI_HMAC_PRECOMP,
2198 		},
2199 	},
2200 	{
2201 		.aead = {
2202 			.base = {
2203 				.cra_name = "authenc(hmac(sha512),"
2204 					    "ecb(cipher_null))",
2205 				.cra_driver_name = "authenc-hmac-sha512-"
2206 						   "ecb-cipher_null-caam",
2207 				.cra_blocksize = NULL_BLOCK_SIZE,
2208 			},
2209 			.setkey = aead_setkey,
2210 			.setauthsize = aead_setauthsize,
2211 			.encrypt = aead_encrypt,
2212 			.decrypt = aead_decrypt,
2213 			.ivsize = NULL_IV_SIZE,
2214 			.maxauthsize = SHA512_DIGEST_SIZE,
2215 		},
2216 		.caam = {
2217 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2218 					   OP_ALG_AAI_HMAC_PRECOMP,
2219 		},
2220 	},
2221 	{
2222 		.aead = {
2223 			.base = {
2224 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2225 				.cra_driver_name = "authenc-hmac-md5-"
2226 						   "cbc-aes-caam",
2227 				.cra_blocksize = AES_BLOCK_SIZE,
2228 			},
2229 			.setkey = aead_setkey,
2230 			.setauthsize = aead_setauthsize,
2231 			.encrypt = aead_encrypt,
2232 			.decrypt = aead_decrypt,
2233 			.ivsize = AES_BLOCK_SIZE,
2234 			.maxauthsize = MD5_DIGEST_SIZE,
2235 		},
2236 		.caam = {
2237 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2238 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2239 					   OP_ALG_AAI_HMAC_PRECOMP,
2240 		},
2241 	},
2242 	{
2243 		.aead = {
2244 			.base = {
2245 				.cra_name = "echainiv(authenc(hmac(md5),"
2246 					    "cbc(aes)))",
2247 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2248 						   "cbc-aes-caam",
2249 				.cra_blocksize = AES_BLOCK_SIZE,
2250 			},
2251 			.setkey = aead_setkey,
2252 			.setauthsize = aead_setauthsize,
2253 			.encrypt = aead_encrypt,
2254 			.decrypt = aead_decrypt,
2255 			.ivsize = AES_BLOCK_SIZE,
2256 			.maxauthsize = MD5_DIGEST_SIZE,
2257 		},
2258 		.caam = {
2259 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2260 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2261 					   OP_ALG_AAI_HMAC_PRECOMP,
2262 			.geniv = true,
2263 		},
2264 	},
2265 	{
2266 		.aead = {
2267 			.base = {
2268 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 				.cra_driver_name = "authenc-hmac-sha1-"
2270 						   "cbc-aes-caam",
2271 				.cra_blocksize = AES_BLOCK_SIZE,
2272 			},
2273 			.setkey = aead_setkey,
2274 			.setauthsize = aead_setauthsize,
2275 			.encrypt = aead_encrypt,
2276 			.decrypt = aead_decrypt,
2277 			.ivsize = AES_BLOCK_SIZE,
2278 			.maxauthsize = SHA1_DIGEST_SIZE,
2279 		},
2280 		.caam = {
2281 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2282 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2283 					   OP_ALG_AAI_HMAC_PRECOMP,
2284 		},
2285 	},
2286 	{
2287 		.aead = {
2288 			.base = {
2289 				.cra_name = "echainiv(authenc(hmac(sha1),"
2290 					    "cbc(aes)))",
2291 				.cra_driver_name = "echainiv-authenc-"
2292 						   "hmac-sha1-cbc-aes-caam",
2293 				.cra_blocksize = AES_BLOCK_SIZE,
2294 			},
2295 			.setkey = aead_setkey,
2296 			.setauthsize = aead_setauthsize,
2297 			.encrypt = aead_encrypt,
2298 			.decrypt = aead_decrypt,
2299 			.ivsize = AES_BLOCK_SIZE,
2300 			.maxauthsize = SHA1_DIGEST_SIZE,
2301 		},
2302 		.caam = {
2303 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2304 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2305 					   OP_ALG_AAI_HMAC_PRECOMP,
2306 			.geniv = true,
2307 		},
2308 	},
2309 	{
2310 		.aead = {
2311 			.base = {
2312 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2313 				.cra_driver_name = "authenc-hmac-sha224-"
2314 						   "cbc-aes-caam",
2315 				.cra_blocksize = AES_BLOCK_SIZE,
2316 			},
2317 			.setkey = aead_setkey,
2318 			.setauthsize = aead_setauthsize,
2319 			.encrypt = aead_encrypt,
2320 			.decrypt = aead_decrypt,
2321 			.ivsize = AES_BLOCK_SIZE,
2322 			.maxauthsize = SHA224_DIGEST_SIZE,
2323 		},
2324 		.caam = {
2325 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2326 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2327 					   OP_ALG_AAI_HMAC_PRECOMP,
2328 		},
2329 	},
2330 	{
2331 		.aead = {
2332 			.base = {
2333 				.cra_name = "echainiv(authenc(hmac(sha224),"
2334 					    "cbc(aes)))",
2335 				.cra_driver_name = "echainiv-authenc-"
2336 						   "hmac-sha224-cbc-aes-caam",
2337 				.cra_blocksize = AES_BLOCK_SIZE,
2338 			},
2339 			.setkey = aead_setkey,
2340 			.setauthsize = aead_setauthsize,
2341 			.encrypt = aead_encrypt,
2342 			.decrypt = aead_decrypt,
2343 			.ivsize = AES_BLOCK_SIZE,
2344 			.maxauthsize = SHA224_DIGEST_SIZE,
2345 		},
2346 		.caam = {
2347 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2348 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2349 					   OP_ALG_AAI_HMAC_PRECOMP,
2350 			.geniv = true,
2351 		},
2352 	},
2353 	{
2354 		.aead = {
2355 			.base = {
2356 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2357 				.cra_driver_name = "authenc-hmac-sha256-"
2358 						   "cbc-aes-caam",
2359 				.cra_blocksize = AES_BLOCK_SIZE,
2360 			},
2361 			.setkey = aead_setkey,
2362 			.setauthsize = aead_setauthsize,
2363 			.encrypt = aead_encrypt,
2364 			.decrypt = aead_decrypt,
2365 			.ivsize = AES_BLOCK_SIZE,
2366 			.maxauthsize = SHA256_DIGEST_SIZE,
2367 		},
2368 		.caam = {
2369 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2370 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2371 					   OP_ALG_AAI_HMAC_PRECOMP,
2372 		},
2373 	},
2374 	{
2375 		.aead = {
2376 			.base = {
2377 				.cra_name = "echainiv(authenc(hmac(sha256),"
2378 					    "cbc(aes)))",
2379 				.cra_driver_name = "echainiv-authenc-"
2380 						   "hmac-sha256-cbc-aes-caam",
2381 				.cra_blocksize = AES_BLOCK_SIZE,
2382 			},
2383 			.setkey = aead_setkey,
2384 			.setauthsize = aead_setauthsize,
2385 			.encrypt = aead_encrypt,
2386 			.decrypt = aead_decrypt,
2387 			.ivsize = AES_BLOCK_SIZE,
2388 			.maxauthsize = SHA256_DIGEST_SIZE,
2389 		},
2390 		.caam = {
2391 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2392 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2393 					   OP_ALG_AAI_HMAC_PRECOMP,
2394 			.geniv = true,
2395 		},
2396 	},
2397 	{
2398 		.aead = {
2399 			.base = {
2400 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2401 				.cra_driver_name = "authenc-hmac-sha384-"
2402 						   "cbc-aes-caam",
2403 				.cra_blocksize = AES_BLOCK_SIZE,
2404 			},
2405 			.setkey = aead_setkey,
2406 			.setauthsize = aead_setauthsize,
2407 			.encrypt = aead_encrypt,
2408 			.decrypt = aead_decrypt,
2409 			.ivsize = AES_BLOCK_SIZE,
2410 			.maxauthsize = SHA384_DIGEST_SIZE,
2411 		},
2412 		.caam = {
2413 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2414 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2415 					   OP_ALG_AAI_HMAC_PRECOMP,
2416 		},
2417 	},
2418 	{
2419 		.aead = {
2420 			.base = {
2421 				.cra_name = "echainiv(authenc(hmac(sha384),"
2422 					    "cbc(aes)))",
2423 				.cra_driver_name = "echainiv-authenc-"
2424 						   "hmac-sha384-cbc-aes-caam",
2425 				.cra_blocksize = AES_BLOCK_SIZE,
2426 			},
2427 			.setkey = aead_setkey,
2428 			.setauthsize = aead_setauthsize,
2429 			.encrypt = aead_encrypt,
2430 			.decrypt = aead_decrypt,
2431 			.ivsize = AES_BLOCK_SIZE,
2432 			.maxauthsize = SHA384_DIGEST_SIZE,
2433 		},
2434 		.caam = {
2435 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2436 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2437 					   OP_ALG_AAI_HMAC_PRECOMP,
2438 			.geniv = true,
2439 		},
2440 	},
2441 	{
2442 		.aead = {
2443 			.base = {
2444 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2445 				.cra_driver_name = "authenc-hmac-sha512-"
2446 						   "cbc-aes-caam",
2447 				.cra_blocksize = AES_BLOCK_SIZE,
2448 			},
2449 			.setkey = aead_setkey,
2450 			.setauthsize = aead_setauthsize,
2451 			.encrypt = aead_encrypt,
2452 			.decrypt = aead_decrypt,
2453 			.ivsize = AES_BLOCK_SIZE,
2454 			.maxauthsize = SHA512_DIGEST_SIZE,
2455 		},
2456 		.caam = {
2457 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2458 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2459 					   OP_ALG_AAI_HMAC_PRECOMP,
2460 		},
2461 	},
2462 	{
2463 		.aead = {
2464 			.base = {
2465 				.cra_name = "echainiv(authenc(hmac(sha512),"
2466 					    "cbc(aes)))",
2467 				.cra_driver_name = "echainiv-authenc-"
2468 						   "hmac-sha512-cbc-aes-caam",
2469 				.cra_blocksize = AES_BLOCK_SIZE,
2470 			},
2471 			.setkey = aead_setkey,
2472 			.setauthsize = aead_setauthsize,
2473 			.encrypt = aead_encrypt,
2474 			.decrypt = aead_decrypt,
2475 			.ivsize = AES_BLOCK_SIZE,
2476 			.maxauthsize = SHA512_DIGEST_SIZE,
2477 		},
2478 		.caam = {
2479 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2480 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2481 					   OP_ALG_AAI_HMAC_PRECOMP,
2482 			.geniv = true,
2483 		},
2484 	},
2485 	{
2486 		.aead = {
2487 			.base = {
2488 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2489 				.cra_driver_name = "authenc-hmac-md5-"
2490 						   "cbc-des3_ede-caam",
2491 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2492 			},
2493 			.setkey = des3_aead_setkey,
2494 			.setauthsize = aead_setauthsize,
2495 			.encrypt = aead_encrypt,
2496 			.decrypt = aead_decrypt,
2497 			.ivsize = DES3_EDE_BLOCK_SIZE,
2498 			.maxauthsize = MD5_DIGEST_SIZE,
2499 		},
2500 		.caam = {
2501 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2502 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2503 					   OP_ALG_AAI_HMAC_PRECOMP,
2504 		}
2505 	},
2506 	{
2507 		.aead = {
2508 			.base = {
2509 				.cra_name = "echainiv(authenc(hmac(md5),"
2510 					    "cbc(des3_ede)))",
2511 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2512 						   "cbc-des3_ede-caam",
2513 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2514 			},
2515 			.setkey = des3_aead_setkey,
2516 			.setauthsize = aead_setauthsize,
2517 			.encrypt = aead_encrypt,
2518 			.decrypt = aead_decrypt,
2519 			.ivsize = DES3_EDE_BLOCK_SIZE,
2520 			.maxauthsize = MD5_DIGEST_SIZE,
2521 		},
2522 		.caam = {
2523 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2524 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2525 					   OP_ALG_AAI_HMAC_PRECOMP,
2526 			.geniv = true,
2527 		}
2528 	},
2529 	{
2530 		.aead = {
2531 			.base = {
2532 				.cra_name = "authenc(hmac(sha1),"
2533 					    "cbc(des3_ede))",
2534 				.cra_driver_name = "authenc-hmac-sha1-"
2535 						   "cbc-des3_ede-caam",
2536 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2537 			},
2538 			.setkey = des3_aead_setkey,
2539 			.setauthsize = aead_setauthsize,
2540 			.encrypt = aead_encrypt,
2541 			.decrypt = aead_decrypt,
2542 			.ivsize = DES3_EDE_BLOCK_SIZE,
2543 			.maxauthsize = SHA1_DIGEST_SIZE,
2544 		},
2545 		.caam = {
2546 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2547 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2548 					   OP_ALG_AAI_HMAC_PRECOMP,
2549 		},
2550 	},
2551 	{
2552 		.aead = {
2553 			.base = {
2554 				.cra_name = "echainiv(authenc(hmac(sha1),"
2555 					    "cbc(des3_ede)))",
2556 				.cra_driver_name = "echainiv-authenc-"
2557 						   "hmac-sha1-"
2558 						   "cbc-des3_ede-caam",
2559 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2560 			},
2561 			.setkey = des3_aead_setkey,
2562 			.setauthsize = aead_setauthsize,
2563 			.encrypt = aead_encrypt,
2564 			.decrypt = aead_decrypt,
2565 			.ivsize = DES3_EDE_BLOCK_SIZE,
2566 			.maxauthsize = SHA1_DIGEST_SIZE,
2567 		},
2568 		.caam = {
2569 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2570 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2571 					   OP_ALG_AAI_HMAC_PRECOMP,
2572 			.geniv = true,
2573 		},
2574 	},
2575 	{
2576 		.aead = {
2577 			.base = {
2578 				.cra_name = "authenc(hmac(sha224),"
2579 					    "cbc(des3_ede))",
2580 				.cra_driver_name = "authenc-hmac-sha224-"
2581 						   "cbc-des3_ede-caam",
2582 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2583 			},
2584 			.setkey = des3_aead_setkey,
2585 			.setauthsize = aead_setauthsize,
2586 			.encrypt = aead_encrypt,
2587 			.decrypt = aead_decrypt,
2588 			.ivsize = DES3_EDE_BLOCK_SIZE,
2589 			.maxauthsize = SHA224_DIGEST_SIZE,
2590 		},
2591 		.caam = {
2592 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2593 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2594 					   OP_ALG_AAI_HMAC_PRECOMP,
2595 		},
2596 	},
2597 	{
2598 		.aead = {
2599 			.base = {
2600 				.cra_name = "echainiv(authenc(hmac(sha224),"
2601 					    "cbc(des3_ede)))",
2602 				.cra_driver_name = "echainiv-authenc-"
2603 						   "hmac-sha224-"
2604 						   "cbc-des3_ede-caam",
2605 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2606 			},
2607 			.setkey = des3_aead_setkey,
2608 			.setauthsize = aead_setauthsize,
2609 			.encrypt = aead_encrypt,
2610 			.decrypt = aead_decrypt,
2611 			.ivsize = DES3_EDE_BLOCK_SIZE,
2612 			.maxauthsize = SHA224_DIGEST_SIZE,
2613 		},
2614 		.caam = {
2615 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2616 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2617 					   OP_ALG_AAI_HMAC_PRECOMP,
2618 			.geniv = true,
2619 		},
2620 	},
2621 	{
2622 		.aead = {
2623 			.base = {
2624 				.cra_name = "authenc(hmac(sha256),"
2625 					    "cbc(des3_ede))",
2626 				.cra_driver_name = "authenc-hmac-sha256-"
2627 						   "cbc-des3_ede-caam",
2628 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2629 			},
2630 			.setkey = des3_aead_setkey,
2631 			.setauthsize = aead_setauthsize,
2632 			.encrypt = aead_encrypt,
2633 			.decrypt = aead_decrypt,
2634 			.ivsize = DES3_EDE_BLOCK_SIZE,
2635 			.maxauthsize = SHA256_DIGEST_SIZE,
2636 		},
2637 		.caam = {
2638 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2639 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2640 					   OP_ALG_AAI_HMAC_PRECOMP,
2641 		},
2642 	},
2643 	{
2644 		.aead = {
2645 			.base = {
2646 				.cra_name = "echainiv(authenc(hmac(sha256),"
2647 					    "cbc(des3_ede)))",
2648 				.cra_driver_name = "echainiv-authenc-"
2649 						   "hmac-sha256-"
2650 						   "cbc-des3_ede-caam",
2651 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2652 			},
2653 			.setkey = des3_aead_setkey,
2654 			.setauthsize = aead_setauthsize,
2655 			.encrypt = aead_encrypt,
2656 			.decrypt = aead_decrypt,
2657 			.ivsize = DES3_EDE_BLOCK_SIZE,
2658 			.maxauthsize = SHA256_DIGEST_SIZE,
2659 		},
2660 		.caam = {
2661 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2662 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2663 					   OP_ALG_AAI_HMAC_PRECOMP,
2664 			.geniv = true,
2665 		},
2666 	},
2667 	{
2668 		.aead = {
2669 			.base = {
2670 				.cra_name = "authenc(hmac(sha384),"
2671 					    "cbc(des3_ede))",
2672 				.cra_driver_name = "authenc-hmac-sha384-"
2673 						   "cbc-des3_ede-caam",
2674 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2675 			},
2676 			.setkey = des3_aead_setkey,
2677 			.setauthsize = aead_setauthsize,
2678 			.encrypt = aead_encrypt,
2679 			.decrypt = aead_decrypt,
2680 			.ivsize = DES3_EDE_BLOCK_SIZE,
2681 			.maxauthsize = SHA384_DIGEST_SIZE,
2682 		},
2683 		.caam = {
2684 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2685 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2686 					   OP_ALG_AAI_HMAC_PRECOMP,
2687 		},
2688 	},
2689 	{
2690 		.aead = {
2691 			.base = {
2692 				.cra_name = "echainiv(authenc(hmac(sha384),"
2693 					    "cbc(des3_ede)))",
2694 				.cra_driver_name = "echainiv-authenc-"
2695 						   "hmac-sha384-"
2696 						   "cbc-des3_ede-caam",
2697 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2698 			},
2699 			.setkey = des3_aead_setkey,
2700 			.setauthsize = aead_setauthsize,
2701 			.encrypt = aead_encrypt,
2702 			.decrypt = aead_decrypt,
2703 			.ivsize = DES3_EDE_BLOCK_SIZE,
2704 			.maxauthsize = SHA384_DIGEST_SIZE,
2705 		},
2706 		.caam = {
2707 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2708 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2709 					   OP_ALG_AAI_HMAC_PRECOMP,
2710 			.geniv = true,
2711 		},
2712 	},
2713 	{
2714 		.aead = {
2715 			.base = {
2716 				.cra_name = "authenc(hmac(sha512),"
2717 					    "cbc(des3_ede))",
2718 				.cra_driver_name = "authenc-hmac-sha512-"
2719 						   "cbc-des3_ede-caam",
2720 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2721 			},
2722 			.setkey = des3_aead_setkey,
2723 			.setauthsize = aead_setauthsize,
2724 			.encrypt = aead_encrypt,
2725 			.decrypt = aead_decrypt,
2726 			.ivsize = DES3_EDE_BLOCK_SIZE,
2727 			.maxauthsize = SHA512_DIGEST_SIZE,
2728 		},
2729 		.caam = {
2730 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2731 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2732 					   OP_ALG_AAI_HMAC_PRECOMP,
2733 		},
2734 	},
2735 	{
2736 		.aead = {
2737 			.base = {
2738 				.cra_name = "echainiv(authenc(hmac(sha512),"
2739 					    "cbc(des3_ede)))",
2740 				.cra_driver_name = "echainiv-authenc-"
2741 						   "hmac-sha512-"
2742 						   "cbc-des3_ede-caam",
2743 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2744 			},
2745 			.setkey = des3_aead_setkey,
2746 			.setauthsize = aead_setauthsize,
2747 			.encrypt = aead_encrypt,
2748 			.decrypt = aead_decrypt,
2749 			.ivsize = DES3_EDE_BLOCK_SIZE,
2750 			.maxauthsize = SHA512_DIGEST_SIZE,
2751 		},
2752 		.caam = {
2753 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2754 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2755 					   OP_ALG_AAI_HMAC_PRECOMP,
2756 			.geniv = true,
2757 		},
2758 	},
2759 	{
2760 		.aead = {
2761 			.base = {
2762 				.cra_name = "authenc(hmac(md5),cbc(des))",
2763 				.cra_driver_name = "authenc-hmac-md5-"
2764 						   "cbc-des-caam",
2765 				.cra_blocksize = DES_BLOCK_SIZE,
2766 			},
2767 			.setkey = aead_setkey,
2768 			.setauthsize = aead_setauthsize,
2769 			.encrypt = aead_encrypt,
2770 			.decrypt = aead_decrypt,
2771 			.ivsize = DES_BLOCK_SIZE,
2772 			.maxauthsize = MD5_DIGEST_SIZE,
2773 		},
2774 		.caam = {
2775 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2776 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2777 					   OP_ALG_AAI_HMAC_PRECOMP,
2778 		},
2779 	},
2780 	{
2781 		.aead = {
2782 			.base = {
2783 				.cra_name = "echainiv(authenc(hmac(md5),"
2784 					    "cbc(des)))",
2785 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2786 						   "cbc-des-caam",
2787 				.cra_blocksize = DES_BLOCK_SIZE,
2788 			},
2789 			.setkey = aead_setkey,
2790 			.setauthsize = aead_setauthsize,
2791 			.encrypt = aead_encrypt,
2792 			.decrypt = aead_decrypt,
2793 			.ivsize = DES_BLOCK_SIZE,
2794 			.maxauthsize = MD5_DIGEST_SIZE,
2795 		},
2796 		.caam = {
2797 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2798 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2799 					   OP_ALG_AAI_HMAC_PRECOMP,
2800 			.geniv = true,
2801 		},
2802 	},
2803 	{
2804 		.aead = {
2805 			.base = {
2806 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2807 				.cra_driver_name = "authenc-hmac-sha1-"
2808 						   "cbc-des-caam",
2809 				.cra_blocksize = DES_BLOCK_SIZE,
2810 			},
2811 			.setkey = aead_setkey,
2812 			.setauthsize = aead_setauthsize,
2813 			.encrypt = aead_encrypt,
2814 			.decrypt = aead_decrypt,
2815 			.ivsize = DES_BLOCK_SIZE,
2816 			.maxauthsize = SHA1_DIGEST_SIZE,
2817 		},
2818 		.caam = {
2819 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2820 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2821 					   OP_ALG_AAI_HMAC_PRECOMP,
2822 		},
2823 	},
2824 	{
2825 		.aead = {
2826 			.base = {
2827 				.cra_name = "echainiv(authenc(hmac(sha1),"
2828 					    "cbc(des)))",
2829 				.cra_driver_name = "echainiv-authenc-"
2830 						   "hmac-sha1-cbc-des-caam",
2831 				.cra_blocksize = DES_BLOCK_SIZE,
2832 			},
2833 			.setkey = aead_setkey,
2834 			.setauthsize = aead_setauthsize,
2835 			.encrypt = aead_encrypt,
2836 			.decrypt = aead_decrypt,
2837 			.ivsize = DES_BLOCK_SIZE,
2838 			.maxauthsize = SHA1_DIGEST_SIZE,
2839 		},
2840 		.caam = {
2841 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2842 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2843 					   OP_ALG_AAI_HMAC_PRECOMP,
2844 			.geniv = true,
2845 		},
2846 	},
2847 	{
2848 		.aead = {
2849 			.base = {
2850 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2851 				.cra_driver_name = "authenc-hmac-sha224-"
2852 						   "cbc-des-caam",
2853 				.cra_blocksize = DES_BLOCK_SIZE,
2854 			},
2855 			.setkey = aead_setkey,
2856 			.setauthsize = aead_setauthsize,
2857 			.encrypt = aead_encrypt,
2858 			.decrypt = aead_decrypt,
2859 			.ivsize = DES_BLOCK_SIZE,
2860 			.maxauthsize = SHA224_DIGEST_SIZE,
2861 		},
2862 		.caam = {
2863 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2864 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2865 					   OP_ALG_AAI_HMAC_PRECOMP,
2866 		},
2867 	},
2868 	{
2869 		.aead = {
2870 			.base = {
2871 				.cra_name = "echainiv(authenc(hmac(sha224),"
2872 					    "cbc(des)))",
2873 				.cra_driver_name = "echainiv-authenc-"
2874 						   "hmac-sha224-cbc-des-caam",
2875 				.cra_blocksize = DES_BLOCK_SIZE,
2876 			},
2877 			.setkey = aead_setkey,
2878 			.setauthsize = aead_setauthsize,
2879 			.encrypt = aead_encrypt,
2880 			.decrypt = aead_decrypt,
2881 			.ivsize = DES_BLOCK_SIZE,
2882 			.maxauthsize = SHA224_DIGEST_SIZE,
2883 		},
2884 		.caam = {
2885 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2886 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2887 					   OP_ALG_AAI_HMAC_PRECOMP,
2888 			.geniv = true,
2889 		},
2890 	},
2891 	{
2892 		.aead = {
2893 			.base = {
2894 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2895 				.cra_driver_name = "authenc-hmac-sha256-"
2896 						   "cbc-des-caam",
2897 				.cra_blocksize = DES_BLOCK_SIZE,
2898 			},
2899 			.setkey = aead_setkey,
2900 			.setauthsize = aead_setauthsize,
2901 			.encrypt = aead_encrypt,
2902 			.decrypt = aead_decrypt,
2903 			.ivsize = DES_BLOCK_SIZE,
2904 			.maxauthsize = SHA256_DIGEST_SIZE,
2905 		},
2906 		.caam = {
2907 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2908 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2909 					   OP_ALG_AAI_HMAC_PRECOMP,
2910 		},
2911 	},
2912 	{
2913 		.aead = {
2914 			.base = {
2915 				.cra_name = "echainiv(authenc(hmac(sha256),"
2916 					    "cbc(des)))",
2917 				.cra_driver_name = "echainiv-authenc-"
2918 						   "hmac-sha256-cbc-des-caam",
2919 				.cra_blocksize = DES_BLOCK_SIZE,
2920 			},
2921 			.setkey = aead_setkey,
2922 			.setauthsize = aead_setauthsize,
2923 			.encrypt = aead_encrypt,
2924 			.decrypt = aead_decrypt,
2925 			.ivsize = DES_BLOCK_SIZE,
2926 			.maxauthsize = SHA256_DIGEST_SIZE,
2927 		},
2928 		.caam = {
2929 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2930 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2931 					   OP_ALG_AAI_HMAC_PRECOMP,
2932 			.geniv = true,
2933 		},
2934 	},
2935 	{
2936 		.aead = {
2937 			.base = {
2938 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2939 				.cra_driver_name = "authenc-hmac-sha384-"
2940 						   "cbc-des-caam",
2941 				.cra_blocksize = DES_BLOCK_SIZE,
2942 			},
2943 			.setkey = aead_setkey,
2944 			.setauthsize = aead_setauthsize,
2945 			.encrypt = aead_encrypt,
2946 			.decrypt = aead_decrypt,
2947 			.ivsize = DES_BLOCK_SIZE,
2948 			.maxauthsize = SHA384_DIGEST_SIZE,
2949 		},
2950 		.caam = {
2951 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2952 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2953 					   OP_ALG_AAI_HMAC_PRECOMP,
2954 		},
2955 	},
2956 	{
2957 		.aead = {
2958 			.base = {
2959 				.cra_name = "echainiv(authenc(hmac(sha384),"
2960 					    "cbc(des)))",
2961 				.cra_driver_name = "echainiv-authenc-"
2962 						   "hmac-sha384-cbc-des-caam",
2963 				.cra_blocksize = DES_BLOCK_SIZE,
2964 			},
2965 			.setkey = aead_setkey,
2966 			.setauthsize = aead_setauthsize,
2967 			.encrypt = aead_encrypt,
2968 			.decrypt = aead_decrypt,
2969 			.ivsize = DES_BLOCK_SIZE,
2970 			.maxauthsize = SHA384_DIGEST_SIZE,
2971 		},
2972 		.caam = {
2973 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2974 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2975 					   OP_ALG_AAI_HMAC_PRECOMP,
2976 			.geniv = true,
2977 		},
2978 	},
2979 	{
2980 		.aead = {
2981 			.base = {
2982 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2983 				.cra_driver_name = "authenc-hmac-sha512-"
2984 						   "cbc-des-caam",
2985 				.cra_blocksize = DES_BLOCK_SIZE,
2986 			},
2987 			.setkey = aead_setkey,
2988 			.setauthsize = aead_setauthsize,
2989 			.encrypt = aead_encrypt,
2990 			.decrypt = aead_decrypt,
2991 			.ivsize = DES_BLOCK_SIZE,
2992 			.maxauthsize = SHA512_DIGEST_SIZE,
2993 		},
2994 		.caam = {
2995 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2996 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2997 					   OP_ALG_AAI_HMAC_PRECOMP,
2998 		},
2999 	},
3000 	{
3001 		.aead = {
3002 			.base = {
3003 				.cra_name = "echainiv(authenc(hmac(sha512),"
3004 					    "cbc(des)))",
3005 				.cra_driver_name = "echainiv-authenc-"
3006 						   "hmac-sha512-cbc-des-caam",
3007 				.cra_blocksize = DES_BLOCK_SIZE,
3008 			},
3009 			.setkey = aead_setkey,
3010 			.setauthsize = aead_setauthsize,
3011 			.encrypt = aead_encrypt,
3012 			.decrypt = aead_decrypt,
3013 			.ivsize = DES_BLOCK_SIZE,
3014 			.maxauthsize = SHA512_DIGEST_SIZE,
3015 		},
3016 		.caam = {
3017 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3018 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3019 					   OP_ALG_AAI_HMAC_PRECOMP,
3020 			.geniv = true,
3021 		},
3022 	},
3023 	{
3024 		.aead = {
3025 			.base = {
3026 				.cra_name = "authenc(hmac(md5),"
3027 					    "rfc3686(ctr(aes)))",
3028 				.cra_driver_name = "authenc-hmac-md5-"
3029 						   "rfc3686-ctr-aes-caam",
3030 				.cra_blocksize = 1,
3031 			},
3032 			.setkey = aead_setkey,
3033 			.setauthsize = aead_setauthsize,
3034 			.encrypt = aead_encrypt,
3035 			.decrypt = aead_decrypt,
3036 			.ivsize = CTR_RFC3686_IV_SIZE,
3037 			.maxauthsize = MD5_DIGEST_SIZE,
3038 		},
3039 		.caam = {
3040 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3041 					   OP_ALG_AAI_CTR_MOD128,
3042 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3043 					   OP_ALG_AAI_HMAC_PRECOMP,
3044 			.rfc3686 = true,
3045 		},
3046 	},
3047 	{
3048 		.aead = {
3049 			.base = {
3050 				.cra_name = "seqiv(authenc("
3051 					    "hmac(md5),rfc3686(ctr(aes))))",
3052 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
3053 						   "rfc3686-ctr-aes-caam",
3054 				.cra_blocksize = 1,
3055 			},
3056 			.setkey = aead_setkey,
3057 			.setauthsize = aead_setauthsize,
3058 			.encrypt = aead_encrypt,
3059 			.decrypt = aead_decrypt,
3060 			.ivsize = CTR_RFC3686_IV_SIZE,
3061 			.maxauthsize = MD5_DIGEST_SIZE,
3062 		},
3063 		.caam = {
3064 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3065 					   OP_ALG_AAI_CTR_MOD128,
3066 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3067 					   OP_ALG_AAI_HMAC_PRECOMP,
3068 			.rfc3686 = true,
3069 			.geniv = true,
3070 		},
3071 	},
3072 	{
3073 		.aead = {
3074 			.base = {
3075 				.cra_name = "authenc(hmac(sha1),"
3076 					    "rfc3686(ctr(aes)))",
3077 				.cra_driver_name = "authenc-hmac-sha1-"
3078 						   "rfc3686-ctr-aes-caam",
3079 				.cra_blocksize = 1,
3080 			},
3081 			.setkey = aead_setkey,
3082 			.setauthsize = aead_setauthsize,
3083 			.encrypt = aead_encrypt,
3084 			.decrypt = aead_decrypt,
3085 			.ivsize = CTR_RFC3686_IV_SIZE,
3086 			.maxauthsize = SHA1_DIGEST_SIZE,
3087 		},
3088 		.caam = {
3089 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3090 					   OP_ALG_AAI_CTR_MOD128,
3091 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3092 					   OP_ALG_AAI_HMAC_PRECOMP,
3093 			.rfc3686 = true,
3094 		},
3095 	},
3096 	{
3097 		.aead = {
3098 			.base = {
3099 				.cra_name = "seqiv(authenc("
3100 					    "hmac(sha1),rfc3686(ctr(aes))))",
3101 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3102 						   "rfc3686-ctr-aes-caam",
3103 				.cra_blocksize = 1,
3104 			},
3105 			.setkey = aead_setkey,
3106 			.setauthsize = aead_setauthsize,
3107 			.encrypt = aead_encrypt,
3108 			.decrypt = aead_decrypt,
3109 			.ivsize = CTR_RFC3686_IV_SIZE,
3110 			.maxauthsize = SHA1_DIGEST_SIZE,
3111 		},
3112 		.caam = {
3113 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3114 					   OP_ALG_AAI_CTR_MOD128,
3115 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3116 					   OP_ALG_AAI_HMAC_PRECOMP,
3117 			.rfc3686 = true,
3118 			.geniv = true,
3119 		},
3120 	},
3121 	{
3122 		.aead = {
3123 			.base = {
3124 				.cra_name = "authenc(hmac(sha224),"
3125 					    "rfc3686(ctr(aes)))",
3126 				.cra_driver_name = "authenc-hmac-sha224-"
3127 						   "rfc3686-ctr-aes-caam",
3128 				.cra_blocksize = 1,
3129 			},
3130 			.setkey = aead_setkey,
3131 			.setauthsize = aead_setauthsize,
3132 			.encrypt = aead_encrypt,
3133 			.decrypt = aead_decrypt,
3134 			.ivsize = CTR_RFC3686_IV_SIZE,
3135 			.maxauthsize = SHA224_DIGEST_SIZE,
3136 		},
3137 		.caam = {
3138 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3139 					   OP_ALG_AAI_CTR_MOD128,
3140 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3141 					   OP_ALG_AAI_HMAC_PRECOMP,
3142 			.rfc3686 = true,
3143 		},
3144 	},
3145 	{
3146 		.aead = {
3147 			.base = {
3148 				.cra_name = "seqiv(authenc("
3149 					    "hmac(sha224),rfc3686(ctr(aes))))",
3150 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3151 						   "rfc3686-ctr-aes-caam",
3152 				.cra_blocksize = 1,
3153 			},
3154 			.setkey = aead_setkey,
3155 			.setauthsize = aead_setauthsize,
3156 			.encrypt = aead_encrypt,
3157 			.decrypt = aead_decrypt,
3158 			.ivsize = CTR_RFC3686_IV_SIZE,
3159 			.maxauthsize = SHA224_DIGEST_SIZE,
3160 		},
3161 		.caam = {
3162 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3163 					   OP_ALG_AAI_CTR_MOD128,
3164 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3165 					   OP_ALG_AAI_HMAC_PRECOMP,
3166 			.rfc3686 = true,
3167 			.geniv = true,
3168 		},
3169 	},
3170 	{
3171 		.aead = {
3172 			.base = {
3173 				.cra_name = "authenc(hmac(sha256),"
3174 					    "rfc3686(ctr(aes)))",
3175 				.cra_driver_name = "authenc-hmac-sha256-"
3176 						   "rfc3686-ctr-aes-caam",
3177 				.cra_blocksize = 1,
3178 			},
3179 			.setkey = aead_setkey,
3180 			.setauthsize = aead_setauthsize,
3181 			.encrypt = aead_encrypt,
3182 			.decrypt = aead_decrypt,
3183 			.ivsize = CTR_RFC3686_IV_SIZE,
3184 			.maxauthsize = SHA256_DIGEST_SIZE,
3185 		},
3186 		.caam = {
3187 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3188 					   OP_ALG_AAI_CTR_MOD128,
3189 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3190 					   OP_ALG_AAI_HMAC_PRECOMP,
3191 			.rfc3686 = true,
3192 		},
3193 	},
3194 	{
3195 		.aead = {
3196 			.base = {
3197 				.cra_name = "seqiv(authenc(hmac(sha256),"
3198 					    "rfc3686(ctr(aes))))",
3199 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3200 						   "rfc3686-ctr-aes-caam",
3201 				.cra_blocksize = 1,
3202 			},
3203 			.setkey = aead_setkey,
3204 			.setauthsize = aead_setauthsize,
3205 			.encrypt = aead_encrypt,
3206 			.decrypt = aead_decrypt,
3207 			.ivsize = CTR_RFC3686_IV_SIZE,
3208 			.maxauthsize = SHA256_DIGEST_SIZE,
3209 		},
3210 		.caam = {
3211 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3212 					   OP_ALG_AAI_CTR_MOD128,
3213 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3214 					   OP_ALG_AAI_HMAC_PRECOMP,
3215 			.rfc3686 = true,
3216 			.geniv = true,
3217 		},
3218 	},
3219 	{
3220 		.aead = {
3221 			.base = {
3222 				.cra_name = "authenc(hmac(sha384),"
3223 					    "rfc3686(ctr(aes)))",
3224 				.cra_driver_name = "authenc-hmac-sha384-"
3225 						   "rfc3686-ctr-aes-caam",
3226 				.cra_blocksize = 1,
3227 			},
3228 			.setkey = aead_setkey,
3229 			.setauthsize = aead_setauthsize,
3230 			.encrypt = aead_encrypt,
3231 			.decrypt = aead_decrypt,
3232 			.ivsize = CTR_RFC3686_IV_SIZE,
3233 			.maxauthsize = SHA384_DIGEST_SIZE,
3234 		},
3235 		.caam = {
3236 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3237 					   OP_ALG_AAI_CTR_MOD128,
3238 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3239 					   OP_ALG_AAI_HMAC_PRECOMP,
3240 			.rfc3686 = true,
3241 		},
3242 	},
3243 	{
3244 		.aead = {
3245 			.base = {
3246 				.cra_name = "seqiv(authenc(hmac(sha384),"
3247 					    "rfc3686(ctr(aes))))",
3248 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3249 						   "rfc3686-ctr-aes-caam",
3250 				.cra_blocksize = 1,
3251 			},
3252 			.setkey = aead_setkey,
3253 			.setauthsize = aead_setauthsize,
3254 			.encrypt = aead_encrypt,
3255 			.decrypt = aead_decrypt,
3256 			.ivsize = CTR_RFC3686_IV_SIZE,
3257 			.maxauthsize = SHA384_DIGEST_SIZE,
3258 		},
3259 		.caam = {
3260 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3261 					   OP_ALG_AAI_CTR_MOD128,
3262 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3263 					   OP_ALG_AAI_HMAC_PRECOMP,
3264 			.rfc3686 = true,
3265 			.geniv = true,
3266 		},
3267 	},
3268 	{
3269 		.aead = {
3270 			.base = {
3271 				.cra_name = "authenc(hmac(sha512),"
3272 					    "rfc3686(ctr(aes)))",
3273 				.cra_driver_name = "authenc-hmac-sha512-"
3274 						   "rfc3686-ctr-aes-caam",
3275 				.cra_blocksize = 1,
3276 			},
3277 			.setkey = aead_setkey,
3278 			.setauthsize = aead_setauthsize,
3279 			.encrypt = aead_encrypt,
3280 			.decrypt = aead_decrypt,
3281 			.ivsize = CTR_RFC3686_IV_SIZE,
3282 			.maxauthsize = SHA512_DIGEST_SIZE,
3283 		},
3284 		.caam = {
3285 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3286 					   OP_ALG_AAI_CTR_MOD128,
3287 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3288 					   OP_ALG_AAI_HMAC_PRECOMP,
3289 			.rfc3686 = true,
3290 		},
3291 	},
3292 	{
3293 		.aead = {
3294 			.base = {
3295 				.cra_name = "seqiv(authenc(hmac(sha512),"
3296 					    "rfc3686(ctr(aes))))",
3297 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3298 						   "rfc3686-ctr-aes-caam",
3299 				.cra_blocksize = 1,
3300 			},
3301 			.setkey = aead_setkey,
3302 			.setauthsize = aead_setauthsize,
3303 			.encrypt = aead_encrypt,
3304 			.decrypt = aead_decrypt,
3305 			.ivsize = CTR_RFC3686_IV_SIZE,
3306 			.maxauthsize = SHA512_DIGEST_SIZE,
3307 		},
3308 		.caam = {
3309 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3310 					   OP_ALG_AAI_CTR_MOD128,
3311 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3312 					   OP_ALG_AAI_HMAC_PRECOMP,
3313 			.rfc3686 = true,
3314 			.geniv = true,
3315 		},
3316 	},
3317 	{
3318 		.aead = {
3319 			.base = {
3320 				.cra_name = "rfc7539(chacha20,poly1305)",
3321 				.cra_driver_name = "rfc7539-chacha20-poly1305-"
3322 						   "caam",
3323 				.cra_blocksize = 1,
3324 			},
3325 			.setkey = chachapoly_setkey,
3326 			.setauthsize = chachapoly_setauthsize,
3327 			.encrypt = chachapoly_encrypt,
3328 			.decrypt = chachapoly_decrypt,
3329 			.ivsize = CHACHAPOLY_IV_SIZE,
3330 			.maxauthsize = POLY1305_DIGEST_SIZE,
3331 		},
3332 		.caam = {
3333 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3334 					   OP_ALG_AAI_AEAD,
3335 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3336 					   OP_ALG_AAI_AEAD,
3337 		},
3338 	},
3339 	{
3340 		.aead = {
3341 			.base = {
3342 				.cra_name = "rfc7539esp(chacha20,poly1305)",
3343 				.cra_driver_name = "rfc7539esp-chacha20-"
3344 						   "poly1305-caam",
3345 				.cra_blocksize = 1,
3346 			},
3347 			.setkey = chachapoly_setkey,
3348 			.setauthsize = chachapoly_setauthsize,
3349 			.encrypt = chachapoly_encrypt,
3350 			.decrypt = chachapoly_decrypt,
3351 			.ivsize = 8,
3352 			.maxauthsize = POLY1305_DIGEST_SIZE,
3353 		},
3354 		.caam = {
3355 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3356 					   OP_ALG_AAI_AEAD,
3357 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3358 					   OP_ALG_AAI_AEAD,
3359 		},
3360 	},
3361 };
3362 
3363 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3364 			    bool uses_dkp)
3365 {
3366 	dma_addr_t dma_addr;
3367 	struct caam_drv_private *priv;
3368 
3369 	ctx->jrdev = caam_jr_alloc();
3370 	if (IS_ERR(ctx->jrdev)) {
3371 		pr_err("Job Ring Device allocation for transform failed\n");
3372 		return PTR_ERR(ctx->jrdev);
3373 	}
3374 
3375 	priv = dev_get_drvdata(ctx->jrdev->parent);
3376 	if (priv->era >= 6 && uses_dkp)
3377 		ctx->dir = DMA_BIDIRECTIONAL;
3378 	else
3379 		ctx->dir = DMA_TO_DEVICE;
3380 
3381 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3382 					offsetof(struct caam_ctx,
3383 						 sh_desc_enc_dma),
3384 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3385 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3386 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3387 		caam_jr_free(ctx->jrdev);
3388 		return -ENOMEM;
3389 	}
3390 
3391 	ctx->sh_desc_enc_dma = dma_addr;
3392 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3393 						   sh_desc_dec);
3394 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3395 
3396 	/* copy descriptor header template value */
3397 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3398 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3399 
3400 	return 0;
3401 }
3402 
3403 static int caam_cra_init(struct crypto_skcipher *tfm)
3404 {
3405 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3406 	struct caam_skcipher_alg *caam_alg =
3407 		container_of(alg, typeof(*caam_alg), skcipher);
3408 
3409 	return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3410 				false);
3411 }
3412 
3413 static int caam_aead_init(struct crypto_aead *tfm)
3414 {
3415 	struct aead_alg *alg = crypto_aead_alg(tfm);
3416 	struct caam_aead_alg *caam_alg =
3417 		 container_of(alg, struct caam_aead_alg, aead);
3418 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3419 
3420 	return caam_init_common(ctx, &caam_alg->caam,
3421 				alg->setkey == aead_setkey);
3422 }
3423 
3424 static void caam_exit_common(struct caam_ctx *ctx)
3425 {
3426 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3427 			       offsetof(struct caam_ctx, sh_desc_enc_dma),
3428 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3429 	caam_jr_free(ctx->jrdev);
3430 }
3431 
3432 static void caam_cra_exit(struct crypto_skcipher *tfm)
3433 {
3434 	caam_exit_common(crypto_skcipher_ctx(tfm));
3435 }
3436 
3437 static void caam_aead_exit(struct crypto_aead *tfm)
3438 {
3439 	caam_exit_common(crypto_aead_ctx(tfm));
3440 }
3441 
3442 static void __exit caam_algapi_exit(void)
3443 {
3444 	int i;
3445 
3446 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3447 		struct caam_aead_alg *t_alg = driver_aeads + i;
3448 
3449 		if (t_alg->registered)
3450 			crypto_unregister_aead(&t_alg->aead);
3451 	}
3452 
3453 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3454 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3455 
3456 		if (t_alg->registered)
3457 			crypto_unregister_skcipher(&t_alg->skcipher);
3458 	}
3459 }
3460 
3461 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3462 {
3463 	struct skcipher_alg *alg = &t_alg->skcipher;
3464 
3465 	alg->base.cra_module = THIS_MODULE;
3466 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3467 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3468 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3469 
3470 	alg->init = caam_cra_init;
3471 	alg->exit = caam_cra_exit;
3472 }
3473 
3474 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3475 {
3476 	struct aead_alg *alg = &t_alg->aead;
3477 
3478 	alg->base.cra_module = THIS_MODULE;
3479 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3480 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3481 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3482 
3483 	alg->init = caam_aead_init;
3484 	alg->exit = caam_aead_exit;
3485 }
3486 
3487 static int __init caam_algapi_init(void)
3488 {
3489 	struct device_node *dev_node;
3490 	struct platform_device *pdev;
3491 	struct caam_drv_private *priv;
3492 	int i = 0, err = 0;
3493 	u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3494 	u32 arc4_inst;
3495 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3496 	bool registered = false, gcm_support;
3497 
3498 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3499 	if (!dev_node) {
3500 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3501 		if (!dev_node)
3502 			return -ENODEV;
3503 	}
3504 
3505 	pdev = of_find_device_by_node(dev_node);
3506 	if (!pdev) {
3507 		of_node_put(dev_node);
3508 		return -ENODEV;
3509 	}
3510 
3511 	priv = dev_get_drvdata(&pdev->dev);
3512 	of_node_put(dev_node);
3513 
3514 	/*
3515 	 * If priv is NULL, it's probably because the caam driver wasn't
3516 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3517 	 */
3518 	if (!priv) {
3519 		err = -ENODEV;
3520 		goto out_put_dev;
3521 	}
3522 
3523 
3524 	/*
3525 	 * Register crypto algorithms the device supports.
3526 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3527 	 */
3528 	if (priv->era < 10) {
3529 		u32 cha_vid, cha_inst, aes_rn;
3530 
3531 		cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3532 		aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3533 		md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3534 
3535 		cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3536 		des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3537 			   CHA_ID_LS_DES_SHIFT;
3538 		aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3539 		md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3540 		arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3541 			    CHA_ID_LS_ARC4_SHIFT;
3542 		ccha_inst = 0;
3543 		ptha_inst = 0;
3544 
3545 		aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3546 			 CHA_ID_LS_AES_MASK;
3547 		gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3548 	} else {
3549 		u32 aesa, mdha;
3550 
3551 		aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3552 		mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3553 
3554 		aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3555 		md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3556 
3557 		des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3558 		aes_inst = aesa & CHA_VER_NUM_MASK;
3559 		md_inst = mdha & CHA_VER_NUM_MASK;
3560 		ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3561 		ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3562 		arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
3563 
3564 		gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3565 	}
3566 
3567 	/* If MD is present, limit digest size based on LP256 */
3568 	if (md_inst && md_vid  == CHA_VER_VID_MD_LP256)
3569 		md_limit = SHA256_DIGEST_SIZE;
3570 
3571 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3572 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3573 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3574 
3575 		/* Skip DES algorithms if not supported by device */
3576 		if (!des_inst &&
3577 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3578 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3579 				continue;
3580 
3581 		/* Skip AES algorithms if not supported by device */
3582 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3583 				continue;
3584 
3585 		/* Skip ARC4 algorithms if not supported by device */
3586 		if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3587 			continue;
3588 
3589 		/*
3590 		 * Check support for AES modes not available
3591 		 * on LP devices.
3592 		 */
3593 		if (aes_vid == CHA_VER_VID_AES_LP &&
3594 		    (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3595 		    OP_ALG_AAI_XTS)
3596 			continue;
3597 
3598 		caam_skcipher_alg_init(t_alg);
3599 
3600 		err = crypto_register_skcipher(&t_alg->skcipher);
3601 		if (err) {
3602 			pr_warn("%s alg registration failed\n",
3603 				t_alg->skcipher.base.cra_driver_name);
3604 			continue;
3605 		}
3606 
3607 		t_alg->registered = true;
3608 		registered = true;
3609 	}
3610 
3611 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3612 		struct caam_aead_alg *t_alg = driver_aeads + i;
3613 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3614 				 OP_ALG_ALGSEL_MASK;
3615 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3616 				 OP_ALG_ALGSEL_MASK;
3617 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3618 
3619 		/* Skip DES algorithms if not supported by device */
3620 		if (!des_inst &&
3621 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3622 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3623 				continue;
3624 
3625 		/* Skip AES algorithms if not supported by device */
3626 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3627 				continue;
3628 
3629 		/* Skip CHACHA20 algorithms if not supported by device */
3630 		if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3631 			continue;
3632 
3633 		/* Skip POLY1305 algorithms if not supported by device */
3634 		if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3635 			continue;
3636 
3637 		/* Skip GCM algorithms if not supported by device */
3638 		if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3639 		    alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3640 			continue;
3641 
3642 		/*
3643 		 * Skip algorithms requiring message digests
3644 		 * if MD or MD size is not supported by device.
3645 		 */
3646 		if (is_mdha(c2_alg_sel) &&
3647 		    (!md_inst || t_alg->aead.maxauthsize > md_limit))
3648 			continue;
3649 
3650 		caam_aead_alg_init(t_alg);
3651 
3652 		err = crypto_register_aead(&t_alg->aead);
3653 		if (err) {
3654 			pr_warn("%s alg registration failed\n",
3655 				t_alg->aead.base.cra_driver_name);
3656 			continue;
3657 		}
3658 
3659 		t_alg->registered = true;
3660 		registered = true;
3661 	}
3662 
3663 	if (registered)
3664 		pr_info("caam algorithms registered in /proc/crypto\n");
3665 
3666 out_put_dev:
3667 	put_device(&pdev->dev);
3668 	return err;
3669 }
3670 
3671 module_init(caam_algapi_init);
3672 module_exit(caam_algapi_exit);
3673 
3674 MODULE_LICENSE("GPL");
3675 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3676 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
3677