xref: /openbmc/linux/drivers/crypto/caam/caamalg.c (revision 15e3ae36)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * caam - Freescale FSL CAAM support for crypto API
4  *
5  * Copyright 2008-2011 Freescale Semiconductor, Inc.
6  * Copyright 2016-2019 NXP
7  *
8  * Based on talitos crypto API driver.
9  *
10  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11  *
12  * ---------------                     ---------------
13  * | JobDesc #1  |-------------------->|  ShareDesc  |
14  * | *(packet 1) |                     |   (PDB)     |
15  * ---------------      |------------->|  (hashKey)  |
16  *       .              |              | (cipherKey) |
17  *       .              |    |-------->| (operation) |
18  * ---------------      |    |         ---------------
19  * | JobDesc #2  |------|    |
20  * | *(packet 2) |           |
21  * ---------------           |
22  *       .                   |
23  *       .                   |
24  * ---------------           |
25  * | JobDesc #3  |------------
26  * | *(packet 3) |
27  * ---------------
28  *
29  * The SharedDesc never changes for a connection unless rekeyed, but
30  * each packet will likely be in a different place. So all we need
31  * to know to process the packet is where the input is, where the
32  * output goes, and what context we want to process with. Context is
33  * in the SharedDesc, packet references in the JobDesc.
34  *
35  * So, a job desc looks like:
36  *
37  * ---------------------
38  * | Header            |
39  * | ShareDesc Pointer |
40  * | SEQ_OUT_PTR       |
41  * | (output buffer)   |
42  * | (output length)   |
43  * | SEQ_IN_PTR        |
44  * | (input buffer)    |
45  * | (input length)    |
46  * ---------------------
47  */
48 
49 #include "compat.h"
50 
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 #include <crypto/engine.h>
60 
61 /*
62  * crypto alg
63  */
64 #define CAAM_CRA_PRIORITY		3000
65 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
66 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
67 					 CTR_RFC3686_NONCE_SIZE + \
68 					 SHA512_DIGEST_SIZE * 2)
69 
70 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
71 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
72 					 CAAM_CMD_SZ * 4)
73 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
74 					 CAAM_CMD_SZ * 5)
75 
76 #define CHACHAPOLY_DESC_JOB_IO_LEN	(AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
77 
78 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
79 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
80 
81 struct caam_alg_entry {
82 	int class1_alg_type;
83 	int class2_alg_type;
84 	bool rfc3686;
85 	bool geniv;
86 	bool nodkp;
87 };
88 
89 struct caam_aead_alg {
90 	struct aead_alg aead;
91 	struct caam_alg_entry caam;
92 	bool registered;
93 };
94 
95 struct caam_skcipher_alg {
96 	struct skcipher_alg skcipher;
97 	struct caam_alg_entry caam;
98 	bool registered;
99 };
100 
101 /*
102  * per-session context
103  */
104 struct caam_ctx {
105 	struct crypto_engine_ctx enginectx;
106 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
107 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
108 	u8 key[CAAM_MAX_KEY_SIZE];
109 	dma_addr_t sh_desc_enc_dma;
110 	dma_addr_t sh_desc_dec_dma;
111 	dma_addr_t key_dma;
112 	enum dma_data_direction dir;
113 	struct device *jrdev;
114 	struct alginfo adata;
115 	struct alginfo cdata;
116 	unsigned int authsize;
117 };
118 
119 struct caam_skcipher_req_ctx {
120 	struct skcipher_edesc *edesc;
121 };
122 
123 struct caam_aead_req_ctx {
124 	struct aead_edesc *edesc;
125 };
126 
127 static int aead_null_set_sh_desc(struct crypto_aead *aead)
128 {
129 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
130 	struct device *jrdev = ctx->jrdev;
131 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
132 	u32 *desc;
133 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
134 			ctx->adata.keylen_pad;
135 
136 	/*
137 	 * Job Descriptor and Shared Descriptors
138 	 * must all fit into the 64-word Descriptor h/w Buffer
139 	 */
140 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
141 		ctx->adata.key_inline = true;
142 		ctx->adata.key_virt = ctx->key;
143 	} else {
144 		ctx->adata.key_inline = false;
145 		ctx->adata.key_dma = ctx->key_dma;
146 	}
147 
148 	/* aead_encrypt shared descriptor */
149 	desc = ctx->sh_desc_enc;
150 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
151 				    ctrlpriv->era);
152 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
153 				   desc_bytes(desc), ctx->dir);
154 
155 	/*
156 	 * Job Descriptor and Shared Descriptors
157 	 * must all fit into the 64-word Descriptor h/w Buffer
158 	 */
159 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
160 		ctx->adata.key_inline = true;
161 		ctx->adata.key_virt = ctx->key;
162 	} else {
163 		ctx->adata.key_inline = false;
164 		ctx->adata.key_dma = ctx->key_dma;
165 	}
166 
167 	/* aead_decrypt shared descriptor */
168 	desc = ctx->sh_desc_dec;
169 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
170 				    ctrlpriv->era);
171 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
172 				   desc_bytes(desc), ctx->dir);
173 
174 	return 0;
175 }
176 
177 static int aead_set_sh_desc(struct crypto_aead *aead)
178 {
179 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
180 						 struct caam_aead_alg, aead);
181 	unsigned int ivsize = crypto_aead_ivsize(aead);
182 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
183 	struct device *jrdev = ctx->jrdev;
184 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
185 	u32 ctx1_iv_off = 0;
186 	u32 *desc, *nonce = NULL;
187 	u32 inl_mask;
188 	unsigned int data_len[2];
189 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
190 			       OP_ALG_AAI_CTR_MOD128);
191 	const bool is_rfc3686 = alg->caam.rfc3686;
192 
193 	if (!ctx->authsize)
194 		return 0;
195 
196 	/* NULL encryption / decryption */
197 	if (!ctx->cdata.keylen)
198 		return aead_null_set_sh_desc(aead);
199 
200 	/*
201 	 * AES-CTR needs to load IV in CONTEXT1 reg
202 	 * at an offset of 128bits (16bytes)
203 	 * CONTEXT1[255:128] = IV
204 	 */
205 	if (ctr_mode)
206 		ctx1_iv_off = 16;
207 
208 	/*
209 	 * RFC3686 specific:
210 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
211 	 */
212 	if (is_rfc3686) {
213 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
214 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
215 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
216 	}
217 
218 	/*
219 	 * In case |user key| > |derived key|, using DKP<imm,imm>
220 	 * would result in invalid opcodes (last bytes of user key) in
221 	 * the resulting descriptor. Use DKP<ptr,imm> instead => both
222 	 * virtual and dma key addresses are needed.
223 	 */
224 	ctx->adata.key_virt = ctx->key;
225 	ctx->adata.key_dma = ctx->key_dma;
226 
227 	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
228 	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
229 
230 	data_len[0] = ctx->adata.keylen_pad;
231 	data_len[1] = ctx->cdata.keylen;
232 
233 	if (alg->caam.geniv)
234 		goto skip_enc;
235 
236 	/*
237 	 * Job Descriptor and Shared Descriptors
238 	 * must all fit into the 64-word Descriptor h/w Buffer
239 	 */
240 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
241 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
242 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
243 			      ARRAY_SIZE(data_len)) < 0)
244 		return -EINVAL;
245 
246 	ctx->adata.key_inline = !!(inl_mask & 1);
247 	ctx->cdata.key_inline = !!(inl_mask & 2);
248 
249 	/* aead_encrypt shared descriptor */
250 	desc = ctx->sh_desc_enc;
251 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
252 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
253 			       false, ctrlpriv->era);
254 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
255 				   desc_bytes(desc), ctx->dir);
256 
257 skip_enc:
258 	/*
259 	 * Job Descriptor and Shared Descriptors
260 	 * must all fit into the 64-word Descriptor h/w Buffer
261 	 */
262 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
263 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
264 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
265 			      ARRAY_SIZE(data_len)) < 0)
266 		return -EINVAL;
267 
268 	ctx->adata.key_inline = !!(inl_mask & 1);
269 	ctx->cdata.key_inline = !!(inl_mask & 2);
270 
271 	/* aead_decrypt shared descriptor */
272 	desc = ctx->sh_desc_dec;
273 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
274 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
275 			       nonce, ctx1_iv_off, false, ctrlpriv->era);
276 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
277 				   desc_bytes(desc), ctx->dir);
278 
279 	if (!alg->caam.geniv)
280 		goto skip_givenc;
281 
282 	/*
283 	 * Job Descriptor and Shared Descriptors
284 	 * must all fit into the 64-word Descriptor h/w Buffer
285 	 */
286 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
287 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
288 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
289 			      ARRAY_SIZE(data_len)) < 0)
290 		return -EINVAL;
291 
292 	ctx->adata.key_inline = !!(inl_mask & 1);
293 	ctx->cdata.key_inline = !!(inl_mask & 2);
294 
295 	/* aead_givencrypt shared descriptor */
296 	desc = ctx->sh_desc_enc;
297 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
298 				  ctx->authsize, is_rfc3686, nonce,
299 				  ctx1_iv_off, false, ctrlpriv->era);
300 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
301 				   desc_bytes(desc), ctx->dir);
302 
303 skip_givenc:
304 	return 0;
305 }
306 
307 static int aead_setauthsize(struct crypto_aead *authenc,
308 				    unsigned int authsize)
309 {
310 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
311 
312 	ctx->authsize = authsize;
313 	aead_set_sh_desc(authenc);
314 
315 	return 0;
316 }
317 
318 static int gcm_set_sh_desc(struct crypto_aead *aead)
319 {
320 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
321 	struct device *jrdev = ctx->jrdev;
322 	unsigned int ivsize = crypto_aead_ivsize(aead);
323 	u32 *desc;
324 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
325 			ctx->cdata.keylen;
326 
327 	if (!ctx->cdata.keylen || !ctx->authsize)
328 		return 0;
329 
330 	/*
331 	 * AES GCM encrypt shared descriptor
332 	 * Job Descriptor and Shared Descriptor
333 	 * must fit into the 64-word Descriptor h/w Buffer
334 	 */
335 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
336 		ctx->cdata.key_inline = true;
337 		ctx->cdata.key_virt = ctx->key;
338 	} else {
339 		ctx->cdata.key_inline = false;
340 		ctx->cdata.key_dma = ctx->key_dma;
341 	}
342 
343 	desc = ctx->sh_desc_enc;
344 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
345 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
346 				   desc_bytes(desc), ctx->dir);
347 
348 	/*
349 	 * Job Descriptor and Shared Descriptors
350 	 * must all fit into the 64-word Descriptor h/w Buffer
351 	 */
352 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
353 		ctx->cdata.key_inline = true;
354 		ctx->cdata.key_virt = ctx->key;
355 	} else {
356 		ctx->cdata.key_inline = false;
357 		ctx->cdata.key_dma = ctx->key_dma;
358 	}
359 
360 	desc = ctx->sh_desc_dec;
361 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
362 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
363 				   desc_bytes(desc), ctx->dir);
364 
365 	return 0;
366 }
367 
368 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
369 {
370 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
371 	int err;
372 
373 	err = crypto_gcm_check_authsize(authsize);
374 	if (err)
375 		return err;
376 
377 	ctx->authsize = authsize;
378 	gcm_set_sh_desc(authenc);
379 
380 	return 0;
381 }
382 
383 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
384 {
385 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
386 	struct device *jrdev = ctx->jrdev;
387 	unsigned int ivsize = crypto_aead_ivsize(aead);
388 	u32 *desc;
389 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
390 			ctx->cdata.keylen;
391 
392 	if (!ctx->cdata.keylen || !ctx->authsize)
393 		return 0;
394 
395 	/*
396 	 * RFC4106 encrypt shared descriptor
397 	 * Job Descriptor and Shared Descriptor
398 	 * must fit into the 64-word Descriptor h/w Buffer
399 	 */
400 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
401 		ctx->cdata.key_inline = true;
402 		ctx->cdata.key_virt = ctx->key;
403 	} else {
404 		ctx->cdata.key_inline = false;
405 		ctx->cdata.key_dma = ctx->key_dma;
406 	}
407 
408 	desc = ctx->sh_desc_enc;
409 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
410 				  false);
411 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
412 				   desc_bytes(desc), ctx->dir);
413 
414 	/*
415 	 * Job Descriptor and Shared Descriptors
416 	 * must all fit into the 64-word Descriptor h/w Buffer
417 	 */
418 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
419 		ctx->cdata.key_inline = true;
420 		ctx->cdata.key_virt = ctx->key;
421 	} else {
422 		ctx->cdata.key_inline = false;
423 		ctx->cdata.key_dma = ctx->key_dma;
424 	}
425 
426 	desc = ctx->sh_desc_dec;
427 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
428 				  false);
429 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
430 				   desc_bytes(desc), ctx->dir);
431 
432 	return 0;
433 }
434 
435 static int rfc4106_setauthsize(struct crypto_aead *authenc,
436 			       unsigned int authsize)
437 {
438 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
439 	int err;
440 
441 	err = crypto_rfc4106_check_authsize(authsize);
442 	if (err)
443 		return err;
444 
445 	ctx->authsize = authsize;
446 	rfc4106_set_sh_desc(authenc);
447 
448 	return 0;
449 }
450 
451 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
452 {
453 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
454 	struct device *jrdev = ctx->jrdev;
455 	unsigned int ivsize = crypto_aead_ivsize(aead);
456 	u32 *desc;
457 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
458 			ctx->cdata.keylen;
459 
460 	if (!ctx->cdata.keylen || !ctx->authsize)
461 		return 0;
462 
463 	/*
464 	 * RFC4543 encrypt shared descriptor
465 	 * Job Descriptor and Shared Descriptor
466 	 * must fit into the 64-word Descriptor h/w Buffer
467 	 */
468 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
469 		ctx->cdata.key_inline = true;
470 		ctx->cdata.key_virt = ctx->key;
471 	} else {
472 		ctx->cdata.key_inline = false;
473 		ctx->cdata.key_dma = ctx->key_dma;
474 	}
475 
476 	desc = ctx->sh_desc_enc;
477 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
478 				  false);
479 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
480 				   desc_bytes(desc), ctx->dir);
481 
482 	/*
483 	 * Job Descriptor and Shared Descriptors
484 	 * must all fit into the 64-word Descriptor h/w Buffer
485 	 */
486 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
487 		ctx->cdata.key_inline = true;
488 		ctx->cdata.key_virt = ctx->key;
489 	} else {
490 		ctx->cdata.key_inline = false;
491 		ctx->cdata.key_dma = ctx->key_dma;
492 	}
493 
494 	desc = ctx->sh_desc_dec;
495 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
496 				  false);
497 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
498 				   desc_bytes(desc), ctx->dir);
499 
500 	return 0;
501 }
502 
503 static int rfc4543_setauthsize(struct crypto_aead *authenc,
504 			       unsigned int authsize)
505 {
506 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
507 
508 	if (authsize != 16)
509 		return -EINVAL;
510 
511 	ctx->authsize = authsize;
512 	rfc4543_set_sh_desc(authenc);
513 
514 	return 0;
515 }
516 
517 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
518 {
519 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
520 	struct device *jrdev = ctx->jrdev;
521 	unsigned int ivsize = crypto_aead_ivsize(aead);
522 	u32 *desc;
523 
524 	if (!ctx->cdata.keylen || !ctx->authsize)
525 		return 0;
526 
527 	desc = ctx->sh_desc_enc;
528 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
529 			       ctx->authsize, true, false);
530 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
531 				   desc_bytes(desc), ctx->dir);
532 
533 	desc = ctx->sh_desc_dec;
534 	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
535 			       ctx->authsize, false, false);
536 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
537 				   desc_bytes(desc), ctx->dir);
538 
539 	return 0;
540 }
541 
542 static int chachapoly_setauthsize(struct crypto_aead *aead,
543 				  unsigned int authsize)
544 {
545 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
546 
547 	if (authsize != POLY1305_DIGEST_SIZE)
548 		return -EINVAL;
549 
550 	ctx->authsize = authsize;
551 	return chachapoly_set_sh_desc(aead);
552 }
553 
554 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
555 			     unsigned int keylen)
556 {
557 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
558 	unsigned int ivsize = crypto_aead_ivsize(aead);
559 	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
560 
561 	if (keylen != CHACHA_KEY_SIZE + saltlen)
562 		return -EINVAL;
563 
564 	ctx->cdata.key_virt = key;
565 	ctx->cdata.keylen = keylen - saltlen;
566 
567 	return chachapoly_set_sh_desc(aead);
568 }
569 
570 static int aead_setkey(struct crypto_aead *aead,
571 			       const u8 *key, unsigned int keylen)
572 {
573 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
574 	struct device *jrdev = ctx->jrdev;
575 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
576 	struct crypto_authenc_keys keys;
577 	int ret = 0;
578 
579 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
580 		goto badkey;
581 
582 	dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
583 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
584 	       keys.authkeylen);
585 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
586 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
587 
588 	/*
589 	 * If DKP is supported, use it in the shared descriptor to generate
590 	 * the split key.
591 	 */
592 	if (ctrlpriv->era >= 6) {
593 		ctx->adata.keylen = keys.authkeylen;
594 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
595 						      OP_ALG_ALGSEL_MASK);
596 
597 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
598 			goto badkey;
599 
600 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
601 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
602 		       keys.enckeylen);
603 		dma_sync_single_for_device(jrdev, ctx->key_dma,
604 					   ctx->adata.keylen_pad +
605 					   keys.enckeylen, ctx->dir);
606 		goto skip_split_key;
607 	}
608 
609 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
610 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
611 			    keys.enckeylen);
612 	if (ret) {
613 		goto badkey;
614 	}
615 
616 	/* postpend encryption key to auth split key */
617 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
618 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
619 				   keys.enckeylen, ctx->dir);
620 
621 	print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
622 			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
623 			     ctx->adata.keylen_pad + keys.enckeylen, 1);
624 
625 skip_split_key:
626 	ctx->cdata.keylen = keys.enckeylen;
627 	memzero_explicit(&keys, sizeof(keys));
628 	return aead_set_sh_desc(aead);
629 badkey:
630 	memzero_explicit(&keys, sizeof(keys));
631 	return -EINVAL;
632 }
633 
634 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
635 			    unsigned int keylen)
636 {
637 	struct crypto_authenc_keys keys;
638 	int err;
639 
640 	err = crypto_authenc_extractkeys(&keys, key, keylen);
641 	if (unlikely(err))
642 		return err;
643 
644 	err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
645 	      aead_setkey(aead, key, keylen);
646 
647 	memzero_explicit(&keys, sizeof(keys));
648 	return err;
649 }
650 
651 static int gcm_setkey(struct crypto_aead *aead,
652 		      const u8 *key, unsigned int keylen)
653 {
654 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
655 	struct device *jrdev = ctx->jrdev;
656 	int err;
657 
658 	err = aes_check_keylen(keylen);
659 	if (err)
660 		return err;
661 
662 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
663 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
664 
665 	memcpy(ctx->key, key, keylen);
666 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
667 	ctx->cdata.keylen = keylen;
668 
669 	return gcm_set_sh_desc(aead);
670 }
671 
672 static int rfc4106_setkey(struct crypto_aead *aead,
673 			  const u8 *key, unsigned int keylen)
674 {
675 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
676 	struct device *jrdev = ctx->jrdev;
677 	int err;
678 
679 	err = aes_check_keylen(keylen - 4);
680 	if (err)
681 		return err;
682 
683 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
684 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
685 
686 	memcpy(ctx->key, key, keylen);
687 
688 	/*
689 	 * The last four bytes of the key material are used as the salt value
690 	 * in the nonce. Update the AES key length.
691 	 */
692 	ctx->cdata.keylen = keylen - 4;
693 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
694 				   ctx->dir);
695 	return rfc4106_set_sh_desc(aead);
696 }
697 
698 static int rfc4543_setkey(struct crypto_aead *aead,
699 			  const u8 *key, unsigned int keylen)
700 {
701 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
702 	struct device *jrdev = ctx->jrdev;
703 	int err;
704 
705 	err = aes_check_keylen(keylen - 4);
706 	if (err)
707 		return err;
708 
709 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
710 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
711 
712 	memcpy(ctx->key, key, keylen);
713 
714 	/*
715 	 * The last four bytes of the key material are used as the salt value
716 	 * in the nonce. Update the AES key length.
717 	 */
718 	ctx->cdata.keylen = keylen - 4;
719 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
720 				   ctx->dir);
721 	return rfc4543_set_sh_desc(aead);
722 }
723 
724 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
725 			   unsigned int keylen, const u32 ctx1_iv_off)
726 {
727 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
728 	struct caam_skcipher_alg *alg =
729 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
730 			     skcipher);
731 	struct device *jrdev = ctx->jrdev;
732 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
733 	u32 *desc;
734 	const bool is_rfc3686 = alg->caam.rfc3686;
735 
736 	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
737 			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
738 
739 	ctx->cdata.keylen = keylen;
740 	ctx->cdata.key_virt = key;
741 	ctx->cdata.key_inline = true;
742 
743 	/* skcipher_encrypt shared descriptor */
744 	desc = ctx->sh_desc_enc;
745 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
746 				   ctx1_iv_off);
747 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
748 				   desc_bytes(desc), ctx->dir);
749 
750 	/* skcipher_decrypt shared descriptor */
751 	desc = ctx->sh_desc_dec;
752 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
753 				   ctx1_iv_off);
754 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
755 				   desc_bytes(desc), ctx->dir);
756 
757 	return 0;
758 }
759 
760 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
761 			       const u8 *key, unsigned int keylen)
762 {
763 	int err;
764 
765 	err = aes_check_keylen(keylen);
766 	if (err)
767 		return err;
768 
769 	return skcipher_setkey(skcipher, key, keylen, 0);
770 }
771 
772 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
773 				   const u8 *key, unsigned int keylen)
774 {
775 	u32 ctx1_iv_off;
776 	int err;
777 
778 	/*
779 	 * RFC3686 specific:
780 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
781 	 *	| *key = {KEY, NONCE}
782 	 */
783 	ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
784 	keylen -= CTR_RFC3686_NONCE_SIZE;
785 
786 	err = aes_check_keylen(keylen);
787 	if (err)
788 		return err;
789 
790 	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
791 }
792 
793 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
794 			       const u8 *key, unsigned int keylen)
795 {
796 	u32 ctx1_iv_off;
797 	int err;
798 
799 	/*
800 	 * AES-CTR needs to load IV in CONTEXT1 reg
801 	 * at an offset of 128bits (16bytes)
802 	 * CONTEXT1[255:128] = IV
803 	 */
804 	ctx1_iv_off = 16;
805 
806 	err = aes_check_keylen(keylen);
807 	if (err)
808 		return err;
809 
810 	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
811 }
812 
813 static int arc4_skcipher_setkey(struct crypto_skcipher *skcipher,
814 				const u8 *key, unsigned int keylen)
815 {
816 	return skcipher_setkey(skcipher, key, keylen, 0);
817 }
818 
819 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
820 			       const u8 *key, unsigned int keylen)
821 {
822 	return verify_skcipher_des_key(skcipher, key) ?:
823 	       skcipher_setkey(skcipher, key, keylen, 0);
824 }
825 
826 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
827 				const u8 *key, unsigned int keylen)
828 {
829 	return verify_skcipher_des3_key(skcipher, key) ?:
830 	       skcipher_setkey(skcipher, key, keylen, 0);
831 }
832 
833 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
834 			       unsigned int keylen)
835 {
836 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
837 	struct device *jrdev = ctx->jrdev;
838 	u32 *desc;
839 
840 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
841 		dev_err(jrdev, "key size mismatch\n");
842 		return -EINVAL;
843 	}
844 
845 	ctx->cdata.keylen = keylen;
846 	ctx->cdata.key_virt = key;
847 	ctx->cdata.key_inline = true;
848 
849 	/* xts_skcipher_encrypt shared descriptor */
850 	desc = ctx->sh_desc_enc;
851 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
852 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
853 				   desc_bytes(desc), ctx->dir);
854 
855 	/* xts_skcipher_decrypt shared descriptor */
856 	desc = ctx->sh_desc_dec;
857 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
858 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
859 				   desc_bytes(desc), ctx->dir);
860 
861 	return 0;
862 }
863 
864 /*
865  * aead_edesc - s/w-extended aead descriptor
866  * @src_nents: number of segments in input s/w scatterlist
867  * @dst_nents: number of segments in output s/w scatterlist
868  * @mapped_src_nents: number of segments in input h/w link table
869  * @mapped_dst_nents: number of segments in output h/w link table
870  * @sec4_sg_bytes: length of dma mapped sec4_sg space
871  * @bklog: stored to determine if the request needs backlog
872  * @sec4_sg_dma: bus physical mapped address of h/w link table
873  * @sec4_sg: pointer to h/w link table
874  * @hw_desc: the h/w job descriptor followed by any referenced link tables
875  */
876 struct aead_edesc {
877 	int src_nents;
878 	int dst_nents;
879 	int mapped_src_nents;
880 	int mapped_dst_nents;
881 	int sec4_sg_bytes;
882 	bool bklog;
883 	dma_addr_t sec4_sg_dma;
884 	struct sec4_sg_entry *sec4_sg;
885 	u32 hw_desc[];
886 };
887 
888 /*
889  * skcipher_edesc - s/w-extended skcipher descriptor
890  * @src_nents: number of segments in input s/w scatterlist
891  * @dst_nents: number of segments in output s/w scatterlist
892  * @mapped_src_nents: number of segments in input h/w link table
893  * @mapped_dst_nents: number of segments in output h/w link table
894  * @iv_dma: dma address of iv for checking continuity and link table
895  * @sec4_sg_bytes: length of dma mapped sec4_sg space
896  * @bklog: stored to determine if the request needs backlog
897  * @sec4_sg_dma: bus physical mapped address of h/w link table
898  * @sec4_sg: pointer to h/w link table
899  * @hw_desc: the h/w job descriptor followed by any referenced link tables
900  *	     and IV
901  */
902 struct skcipher_edesc {
903 	int src_nents;
904 	int dst_nents;
905 	int mapped_src_nents;
906 	int mapped_dst_nents;
907 	dma_addr_t iv_dma;
908 	int sec4_sg_bytes;
909 	bool bklog;
910 	dma_addr_t sec4_sg_dma;
911 	struct sec4_sg_entry *sec4_sg;
912 	u32 hw_desc[];
913 };
914 
915 static void caam_unmap(struct device *dev, struct scatterlist *src,
916 		       struct scatterlist *dst, int src_nents,
917 		       int dst_nents,
918 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
919 		       int sec4_sg_bytes)
920 {
921 	if (dst != src) {
922 		if (src_nents)
923 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
924 		if (dst_nents)
925 			dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
926 	} else {
927 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
928 	}
929 
930 	if (iv_dma)
931 		dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
932 	if (sec4_sg_bytes)
933 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
934 				 DMA_TO_DEVICE);
935 }
936 
937 static void aead_unmap(struct device *dev,
938 		       struct aead_edesc *edesc,
939 		       struct aead_request *req)
940 {
941 	caam_unmap(dev, req->src, req->dst,
942 		   edesc->src_nents, edesc->dst_nents, 0, 0,
943 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
944 }
945 
946 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
947 			   struct skcipher_request *req)
948 {
949 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
950 	int ivsize = crypto_skcipher_ivsize(skcipher);
951 
952 	caam_unmap(dev, req->src, req->dst,
953 		   edesc->src_nents, edesc->dst_nents,
954 		   edesc->iv_dma, ivsize,
955 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
956 }
957 
958 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
959 			    void *context)
960 {
961 	struct aead_request *req = context;
962 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
963 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
964 	struct aead_edesc *edesc;
965 	int ecode = 0;
966 
967 	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
968 
969 	edesc = rctx->edesc;
970 
971 	if (err)
972 		ecode = caam_jr_strstatus(jrdev, err);
973 
974 	aead_unmap(jrdev, edesc, req);
975 
976 	kfree(edesc);
977 
978 	/*
979 	 * If no backlog flag, the completion of the request is done
980 	 * by CAAM, not crypto engine.
981 	 */
982 	if (!edesc->bklog)
983 		aead_request_complete(req, ecode);
984 	else
985 		crypto_finalize_aead_request(jrp->engine, req, ecode);
986 }
987 
988 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
989 				void *context)
990 {
991 	struct skcipher_request *req = context;
992 	struct skcipher_edesc *edesc;
993 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
994 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
995 	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
996 	int ivsize = crypto_skcipher_ivsize(skcipher);
997 	int ecode = 0;
998 
999 	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1000 
1001 	edesc = rctx->edesc;
1002 	if (err)
1003 		ecode = caam_jr_strstatus(jrdev, err);
1004 
1005 	skcipher_unmap(jrdev, edesc, req);
1006 
1007 	/*
1008 	 * The crypto API expects us to set the IV (req->iv) to the last
1009 	 * ciphertext block (CBC mode) or last counter (CTR mode).
1010 	 * This is used e.g. by the CTS mode.
1011 	 */
1012 	if (ivsize && !ecode) {
1013 		memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1014 		       ivsize);
1015 
1016 		print_hex_dump_debug("dstiv  @" __stringify(__LINE__)": ",
1017 				     DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1018 				     ivsize, 1);
1019 	}
1020 
1021 	caam_dump_sg("dst    @" __stringify(__LINE__)": ",
1022 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1023 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1024 
1025 	kfree(edesc);
1026 
1027 	/*
1028 	 * If no backlog flag, the completion of the request is done
1029 	 * by CAAM, not crypto engine.
1030 	 */
1031 	if (!edesc->bklog)
1032 		skcipher_request_complete(req, ecode);
1033 	else
1034 		crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1035 }
1036 
1037 /*
1038  * Fill in aead job descriptor
1039  */
1040 static void init_aead_job(struct aead_request *req,
1041 			  struct aead_edesc *edesc,
1042 			  bool all_contig, bool encrypt)
1043 {
1044 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1045 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1046 	int authsize = ctx->authsize;
1047 	u32 *desc = edesc->hw_desc;
1048 	u32 out_options, in_options;
1049 	dma_addr_t dst_dma, src_dma;
1050 	int len, sec4_sg_index = 0;
1051 	dma_addr_t ptr;
1052 	u32 *sh_desc;
1053 
1054 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1055 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1056 
1057 	len = desc_len(sh_desc);
1058 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1059 
1060 	if (all_contig) {
1061 		src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1062 						    0;
1063 		in_options = 0;
1064 	} else {
1065 		src_dma = edesc->sec4_sg_dma;
1066 		sec4_sg_index += edesc->mapped_src_nents;
1067 		in_options = LDST_SGF;
1068 	}
1069 
1070 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1071 			  in_options);
1072 
1073 	dst_dma = src_dma;
1074 	out_options = in_options;
1075 
1076 	if (unlikely(req->src != req->dst)) {
1077 		if (!edesc->mapped_dst_nents) {
1078 			dst_dma = 0;
1079 			out_options = 0;
1080 		} else if (edesc->mapped_dst_nents == 1) {
1081 			dst_dma = sg_dma_address(req->dst);
1082 			out_options = 0;
1083 		} else {
1084 			dst_dma = edesc->sec4_sg_dma +
1085 				  sec4_sg_index *
1086 				  sizeof(struct sec4_sg_entry);
1087 			out_options = LDST_SGF;
1088 		}
1089 	}
1090 
1091 	if (encrypt)
1092 		append_seq_out_ptr(desc, dst_dma,
1093 				   req->assoclen + req->cryptlen + authsize,
1094 				   out_options);
1095 	else
1096 		append_seq_out_ptr(desc, dst_dma,
1097 				   req->assoclen + req->cryptlen - authsize,
1098 				   out_options);
1099 }
1100 
1101 static void init_gcm_job(struct aead_request *req,
1102 			 struct aead_edesc *edesc,
1103 			 bool all_contig, bool encrypt)
1104 {
1105 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1106 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1107 	unsigned int ivsize = crypto_aead_ivsize(aead);
1108 	u32 *desc = edesc->hw_desc;
1109 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1110 	unsigned int last;
1111 
1112 	init_aead_job(req, edesc, all_contig, encrypt);
1113 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1114 
1115 	/* BUG This should not be specific to generic GCM. */
1116 	last = 0;
1117 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1118 		last = FIFOLD_TYPE_LAST1;
1119 
1120 	/* Read GCM IV */
1121 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1122 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1123 	/* Append Salt */
1124 	if (!generic_gcm)
1125 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1126 	/* Append IV */
1127 	append_data(desc, req->iv, ivsize);
1128 	/* End of blank commands */
1129 }
1130 
1131 static void init_chachapoly_job(struct aead_request *req,
1132 				struct aead_edesc *edesc, bool all_contig,
1133 				bool encrypt)
1134 {
1135 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1136 	unsigned int ivsize = crypto_aead_ivsize(aead);
1137 	unsigned int assoclen = req->assoclen;
1138 	u32 *desc = edesc->hw_desc;
1139 	u32 ctx_iv_off = 4;
1140 
1141 	init_aead_job(req, edesc, all_contig, encrypt);
1142 
1143 	if (ivsize != CHACHAPOLY_IV_SIZE) {
1144 		/* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1145 		ctx_iv_off += 4;
1146 
1147 		/*
1148 		 * The associated data comes already with the IV but we need
1149 		 * to skip it when we authenticate or encrypt...
1150 		 */
1151 		assoclen -= ivsize;
1152 	}
1153 
1154 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1155 
1156 	/*
1157 	 * For IPsec load the IV further in the same register.
1158 	 * For RFC7539 simply load the 12 bytes nonce in a single operation
1159 	 */
1160 	append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1161 			   LDST_SRCDST_BYTE_CONTEXT |
1162 			   ctx_iv_off << LDST_OFFSET_SHIFT);
1163 }
1164 
1165 static void init_authenc_job(struct aead_request *req,
1166 			     struct aead_edesc *edesc,
1167 			     bool all_contig, bool encrypt)
1168 {
1169 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1170 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1171 						 struct caam_aead_alg, aead);
1172 	unsigned int ivsize = crypto_aead_ivsize(aead);
1173 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1174 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1175 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1176 			       OP_ALG_AAI_CTR_MOD128);
1177 	const bool is_rfc3686 = alg->caam.rfc3686;
1178 	u32 *desc = edesc->hw_desc;
1179 	u32 ivoffset = 0;
1180 
1181 	/*
1182 	 * AES-CTR needs to load IV in CONTEXT1 reg
1183 	 * at an offset of 128bits (16bytes)
1184 	 * CONTEXT1[255:128] = IV
1185 	 */
1186 	if (ctr_mode)
1187 		ivoffset = 16;
1188 
1189 	/*
1190 	 * RFC3686 specific:
1191 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1192 	 */
1193 	if (is_rfc3686)
1194 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1195 
1196 	init_aead_job(req, edesc, all_contig, encrypt);
1197 
1198 	/*
1199 	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1200 	 * having DPOVRD as destination.
1201 	 */
1202 	if (ctrlpriv->era < 3)
1203 		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1204 	else
1205 		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1206 
1207 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1208 		append_load_as_imm(desc, req->iv, ivsize,
1209 				   LDST_CLASS_1_CCB |
1210 				   LDST_SRCDST_BYTE_CONTEXT |
1211 				   (ivoffset << LDST_OFFSET_SHIFT));
1212 }
1213 
1214 /*
1215  * Fill in skcipher job descriptor
1216  */
1217 static void init_skcipher_job(struct skcipher_request *req,
1218 			      struct skcipher_edesc *edesc,
1219 			      const bool encrypt)
1220 {
1221 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1222 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1223 	struct device *jrdev = ctx->jrdev;
1224 	int ivsize = crypto_skcipher_ivsize(skcipher);
1225 	u32 *desc = edesc->hw_desc;
1226 	u32 *sh_desc;
1227 	u32 in_options = 0, out_options = 0;
1228 	dma_addr_t src_dma, dst_dma, ptr;
1229 	int len, sec4_sg_index = 0;
1230 
1231 	print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1232 			     DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1233 	dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1234 	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1235 
1236 	caam_dump_sg("src    @" __stringify(__LINE__)": ",
1237 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1238 		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1239 
1240 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1241 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1242 
1243 	len = desc_len(sh_desc);
1244 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1245 
1246 	if (ivsize || edesc->mapped_src_nents > 1) {
1247 		src_dma = edesc->sec4_sg_dma;
1248 		sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1249 		in_options = LDST_SGF;
1250 	} else {
1251 		src_dma = sg_dma_address(req->src);
1252 	}
1253 
1254 	append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1255 
1256 	if (likely(req->src == req->dst)) {
1257 		dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1258 		out_options = in_options;
1259 	} else if (!ivsize && edesc->mapped_dst_nents == 1) {
1260 		dst_dma = sg_dma_address(req->dst);
1261 	} else {
1262 		dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1263 			  sizeof(struct sec4_sg_entry);
1264 		out_options = LDST_SGF;
1265 	}
1266 
1267 	append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1268 }
1269 
1270 /*
1271  * allocate and map the aead extended descriptor
1272  */
1273 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1274 					   int desc_bytes, bool *all_contig_ptr,
1275 					   bool encrypt)
1276 {
1277 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1278 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1279 	struct device *jrdev = ctx->jrdev;
1280 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1281 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1282 		       GFP_KERNEL : GFP_ATOMIC;
1283 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1284 	int src_len, dst_len = 0;
1285 	struct aead_edesc *edesc;
1286 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1287 	unsigned int authsize = ctx->authsize;
1288 
1289 	if (unlikely(req->dst != req->src)) {
1290 		src_len = req->assoclen + req->cryptlen;
1291 		dst_len = src_len + (encrypt ? authsize : (-authsize));
1292 
1293 		src_nents = sg_nents_for_len(req->src, src_len);
1294 		if (unlikely(src_nents < 0)) {
1295 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1296 				src_len);
1297 			return ERR_PTR(src_nents);
1298 		}
1299 
1300 		dst_nents = sg_nents_for_len(req->dst, dst_len);
1301 		if (unlikely(dst_nents < 0)) {
1302 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1303 				dst_len);
1304 			return ERR_PTR(dst_nents);
1305 		}
1306 	} else {
1307 		src_len = req->assoclen + req->cryptlen +
1308 			  (encrypt ? authsize : 0);
1309 
1310 		src_nents = sg_nents_for_len(req->src, src_len);
1311 		if (unlikely(src_nents < 0)) {
1312 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1313 				src_len);
1314 			return ERR_PTR(src_nents);
1315 		}
1316 	}
1317 
1318 	if (likely(req->src == req->dst)) {
1319 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1320 					      DMA_BIDIRECTIONAL);
1321 		if (unlikely(!mapped_src_nents)) {
1322 			dev_err(jrdev, "unable to map source\n");
1323 			return ERR_PTR(-ENOMEM);
1324 		}
1325 	} else {
1326 		/* Cover also the case of null (zero length) input data */
1327 		if (src_nents) {
1328 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1329 						      src_nents, DMA_TO_DEVICE);
1330 			if (unlikely(!mapped_src_nents)) {
1331 				dev_err(jrdev, "unable to map source\n");
1332 				return ERR_PTR(-ENOMEM);
1333 			}
1334 		} else {
1335 			mapped_src_nents = 0;
1336 		}
1337 
1338 		/* Cover also the case of null (zero length) output data */
1339 		if (dst_nents) {
1340 			mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1341 						      dst_nents,
1342 						      DMA_FROM_DEVICE);
1343 			if (unlikely(!mapped_dst_nents)) {
1344 				dev_err(jrdev, "unable to map destination\n");
1345 				dma_unmap_sg(jrdev, req->src, src_nents,
1346 					     DMA_TO_DEVICE);
1347 				return ERR_PTR(-ENOMEM);
1348 			}
1349 		} else {
1350 			mapped_dst_nents = 0;
1351 		}
1352 	}
1353 
1354 	/*
1355 	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1356 	 * the end of the table by allocating more S/G entries.
1357 	 */
1358 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1359 	if (mapped_dst_nents > 1)
1360 		sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1361 	else
1362 		sec4_sg_len = pad_sg_nents(sec4_sg_len);
1363 
1364 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1365 
1366 	/* allocate space for base edesc and hw desc commands, link tables */
1367 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1368 			GFP_DMA | flags);
1369 	if (!edesc) {
1370 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1371 			   0, 0, 0);
1372 		return ERR_PTR(-ENOMEM);
1373 	}
1374 
1375 	edesc->src_nents = src_nents;
1376 	edesc->dst_nents = dst_nents;
1377 	edesc->mapped_src_nents = mapped_src_nents;
1378 	edesc->mapped_dst_nents = mapped_dst_nents;
1379 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1380 			 desc_bytes;
1381 
1382 	rctx->edesc = edesc;
1383 
1384 	*all_contig_ptr = !(mapped_src_nents > 1);
1385 
1386 	sec4_sg_index = 0;
1387 	if (mapped_src_nents > 1) {
1388 		sg_to_sec4_sg_last(req->src, src_len,
1389 				   edesc->sec4_sg + sec4_sg_index, 0);
1390 		sec4_sg_index += mapped_src_nents;
1391 	}
1392 	if (mapped_dst_nents > 1) {
1393 		sg_to_sec4_sg_last(req->dst, dst_len,
1394 				   edesc->sec4_sg + sec4_sg_index, 0);
1395 	}
1396 
1397 	if (!sec4_sg_bytes)
1398 		return edesc;
1399 
1400 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1401 					    sec4_sg_bytes, DMA_TO_DEVICE);
1402 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1403 		dev_err(jrdev, "unable to map S/G table\n");
1404 		aead_unmap(jrdev, edesc, req);
1405 		kfree(edesc);
1406 		return ERR_PTR(-ENOMEM);
1407 	}
1408 
1409 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1410 
1411 	return edesc;
1412 }
1413 
1414 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1415 {
1416 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1417 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1418 	struct aead_edesc *edesc = rctx->edesc;
1419 	u32 *desc = edesc->hw_desc;
1420 	int ret;
1421 
1422 	/*
1423 	 * Only the backlog request are sent to crypto-engine since the others
1424 	 * can be handled by CAAM, if free, especially since JR has up to 1024
1425 	 * entries (more than the 10 entries from crypto-engine).
1426 	 */
1427 	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1428 		ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1429 							     req);
1430 	else
1431 		ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1432 
1433 	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1434 		aead_unmap(jrdev, edesc, req);
1435 		kfree(rctx->edesc);
1436 	}
1437 
1438 	return ret;
1439 }
1440 
1441 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1442 {
1443 	struct aead_edesc *edesc;
1444 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1445 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1446 	struct device *jrdev = ctx->jrdev;
1447 	bool all_contig;
1448 	u32 *desc;
1449 
1450 	edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1451 				 encrypt);
1452 	if (IS_ERR(edesc))
1453 		return PTR_ERR(edesc);
1454 
1455 	desc = edesc->hw_desc;
1456 
1457 	init_chachapoly_job(req, edesc, all_contig, encrypt);
1458 	print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1459 			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1460 			     1);
1461 
1462 	return aead_enqueue_req(jrdev, req);
1463 }
1464 
1465 static int chachapoly_encrypt(struct aead_request *req)
1466 {
1467 	return chachapoly_crypt(req, true);
1468 }
1469 
1470 static int chachapoly_decrypt(struct aead_request *req)
1471 {
1472 	return chachapoly_crypt(req, false);
1473 }
1474 
1475 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1476 {
1477 	struct aead_edesc *edesc;
1478 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1479 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1480 	struct device *jrdev = ctx->jrdev;
1481 	bool all_contig;
1482 
1483 	/* allocate extended descriptor */
1484 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1485 				 &all_contig, encrypt);
1486 	if (IS_ERR(edesc))
1487 		return PTR_ERR(edesc);
1488 
1489 	/* Create and submit job descriptor */
1490 	init_authenc_job(req, edesc, all_contig, encrypt);
1491 
1492 	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1493 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1494 			     desc_bytes(edesc->hw_desc), 1);
1495 
1496 	return aead_enqueue_req(jrdev, req);
1497 }
1498 
1499 static int aead_encrypt(struct aead_request *req)
1500 {
1501 	return aead_crypt(req, true);
1502 }
1503 
1504 static int aead_decrypt(struct aead_request *req)
1505 {
1506 	return aead_crypt(req, false);
1507 }
1508 
1509 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1510 {
1511 	struct aead_request *req = aead_request_cast(areq);
1512 	struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1513 	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1514 	u32 *desc = rctx->edesc->hw_desc;
1515 	int ret;
1516 
1517 	rctx->edesc->bklog = true;
1518 
1519 	ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1520 
1521 	if (ret != -EINPROGRESS) {
1522 		aead_unmap(ctx->jrdev, rctx->edesc, req);
1523 		kfree(rctx->edesc);
1524 	} else {
1525 		ret = 0;
1526 	}
1527 
1528 	return ret;
1529 }
1530 
1531 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1532 {
1533 	struct aead_edesc *edesc;
1534 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1535 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1536 	struct device *jrdev = ctx->jrdev;
1537 	bool all_contig;
1538 
1539 	/* allocate extended descriptor */
1540 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1541 				 encrypt);
1542 	if (IS_ERR(edesc))
1543 		return PTR_ERR(edesc);
1544 
1545 	/* Create and submit job descriptor */
1546 	init_gcm_job(req, edesc, all_contig, encrypt);
1547 
1548 	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1549 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1550 			     desc_bytes(edesc->hw_desc), 1);
1551 
1552 	return aead_enqueue_req(jrdev, req);
1553 }
1554 
1555 static int gcm_encrypt(struct aead_request *req)
1556 {
1557 	return gcm_crypt(req, true);
1558 }
1559 
1560 static int gcm_decrypt(struct aead_request *req)
1561 {
1562 	return gcm_crypt(req, false);
1563 }
1564 
1565 static int ipsec_gcm_encrypt(struct aead_request *req)
1566 {
1567 	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1568 }
1569 
1570 static int ipsec_gcm_decrypt(struct aead_request *req)
1571 {
1572 	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1573 }
1574 
1575 /*
1576  * allocate and map the skcipher extended descriptor for skcipher
1577  */
1578 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1579 						   int desc_bytes)
1580 {
1581 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1582 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1583 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1584 	struct device *jrdev = ctx->jrdev;
1585 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1586 		       GFP_KERNEL : GFP_ATOMIC;
1587 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1588 	struct skcipher_edesc *edesc;
1589 	dma_addr_t iv_dma = 0;
1590 	u8 *iv;
1591 	int ivsize = crypto_skcipher_ivsize(skcipher);
1592 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1593 
1594 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1595 	if (unlikely(src_nents < 0)) {
1596 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1597 			req->cryptlen);
1598 		return ERR_PTR(src_nents);
1599 	}
1600 
1601 	if (req->dst != req->src) {
1602 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1603 		if (unlikely(dst_nents < 0)) {
1604 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1605 				req->cryptlen);
1606 			return ERR_PTR(dst_nents);
1607 		}
1608 	}
1609 
1610 	if (likely(req->src == req->dst)) {
1611 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1612 					      DMA_BIDIRECTIONAL);
1613 		if (unlikely(!mapped_src_nents)) {
1614 			dev_err(jrdev, "unable to map source\n");
1615 			return ERR_PTR(-ENOMEM);
1616 		}
1617 	} else {
1618 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1619 					      DMA_TO_DEVICE);
1620 		if (unlikely(!mapped_src_nents)) {
1621 			dev_err(jrdev, "unable to map source\n");
1622 			return ERR_PTR(-ENOMEM);
1623 		}
1624 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1625 					      DMA_FROM_DEVICE);
1626 		if (unlikely(!mapped_dst_nents)) {
1627 			dev_err(jrdev, "unable to map destination\n");
1628 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1629 			return ERR_PTR(-ENOMEM);
1630 		}
1631 	}
1632 
1633 	if (!ivsize && mapped_src_nents == 1)
1634 		sec4_sg_ents = 0; // no need for an input hw s/g table
1635 	else
1636 		sec4_sg_ents = mapped_src_nents + !!ivsize;
1637 	dst_sg_idx = sec4_sg_ents;
1638 
1639 	/*
1640 	 * Input, output HW S/G tables: [IV, src][dst, IV]
1641 	 * IV entries point to the same buffer
1642 	 * If src == dst, S/G entries are reused (S/G tables overlap)
1643 	 *
1644 	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1645 	 * the end of the table by allocating more S/G entries. Logic:
1646 	 * if (output S/G)
1647 	 *      pad output S/G, if needed
1648 	 * else if (input S/G) ...
1649 	 *      pad input S/G, if needed
1650 	 */
1651 	if (ivsize || mapped_dst_nents > 1) {
1652 		if (req->src == req->dst)
1653 			sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1654 		else
1655 			sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1656 						     !!ivsize);
1657 	} else {
1658 		sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1659 	}
1660 
1661 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1662 
1663 	/*
1664 	 * allocate space for base edesc and hw desc commands, link tables, IV
1665 	 */
1666 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1667 			GFP_DMA | flags);
1668 	if (!edesc) {
1669 		dev_err(jrdev, "could not allocate extended descriptor\n");
1670 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1671 			   0, 0, 0);
1672 		return ERR_PTR(-ENOMEM);
1673 	}
1674 
1675 	edesc->src_nents = src_nents;
1676 	edesc->dst_nents = dst_nents;
1677 	edesc->mapped_src_nents = mapped_src_nents;
1678 	edesc->mapped_dst_nents = mapped_dst_nents;
1679 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1680 	edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1681 						  desc_bytes);
1682 	rctx->edesc = edesc;
1683 
1684 	/* Make sure IV is located in a DMAable area */
1685 	if (ivsize) {
1686 		iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
1687 		memcpy(iv, req->iv, ivsize);
1688 
1689 		iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1690 		if (dma_mapping_error(jrdev, iv_dma)) {
1691 			dev_err(jrdev, "unable to map IV\n");
1692 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1693 				   dst_nents, 0, 0, 0, 0);
1694 			kfree(edesc);
1695 			return ERR_PTR(-ENOMEM);
1696 		}
1697 
1698 		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1699 	}
1700 	if (dst_sg_idx)
1701 		sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1702 			      !!ivsize, 0);
1703 
1704 	if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1705 		sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1706 			      dst_sg_idx, 0);
1707 
1708 	if (ivsize)
1709 		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1710 				   mapped_dst_nents, iv_dma, ivsize, 0);
1711 
1712 	if (ivsize || mapped_dst_nents > 1)
1713 		sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1714 				    mapped_dst_nents);
1715 
1716 	if (sec4_sg_bytes) {
1717 		edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1718 						    sec4_sg_bytes,
1719 						    DMA_TO_DEVICE);
1720 		if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1721 			dev_err(jrdev, "unable to map S/G table\n");
1722 			caam_unmap(jrdev, req->src, req->dst, src_nents,
1723 				   dst_nents, iv_dma, ivsize, 0, 0);
1724 			kfree(edesc);
1725 			return ERR_PTR(-ENOMEM);
1726 		}
1727 	}
1728 
1729 	edesc->iv_dma = iv_dma;
1730 
1731 	print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1732 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1733 			     sec4_sg_bytes, 1);
1734 
1735 	return edesc;
1736 }
1737 
1738 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1739 {
1740 	struct skcipher_request *req = skcipher_request_cast(areq);
1741 	struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1742 	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1743 	u32 *desc = rctx->edesc->hw_desc;
1744 	int ret;
1745 
1746 	rctx->edesc->bklog = true;
1747 
1748 	ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1749 
1750 	if (ret != -EINPROGRESS) {
1751 		skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1752 		kfree(rctx->edesc);
1753 	} else {
1754 		ret = 0;
1755 	}
1756 
1757 	return ret;
1758 }
1759 
1760 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1761 {
1762 	struct skcipher_edesc *edesc;
1763 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1764 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1765 	struct device *jrdev = ctx->jrdev;
1766 	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1767 	u32 *desc;
1768 	int ret = 0;
1769 
1770 	if (!req->cryptlen)
1771 		return 0;
1772 
1773 	/* allocate extended descriptor */
1774 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1775 	if (IS_ERR(edesc))
1776 		return PTR_ERR(edesc);
1777 
1778 	/* Create and submit job descriptor*/
1779 	init_skcipher_job(req, edesc, encrypt);
1780 
1781 	print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1782 			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1783 			     desc_bytes(edesc->hw_desc), 1);
1784 
1785 	desc = edesc->hw_desc;
1786 	/*
1787 	 * Only the backlog request are sent to crypto-engine since the others
1788 	 * can be handled by CAAM, if free, especially since JR has up to 1024
1789 	 * entries (more than the 10 entries from crypto-engine).
1790 	 */
1791 	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1792 		ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1793 								 req);
1794 	else
1795 		ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1796 
1797 	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1798 		skcipher_unmap(jrdev, edesc, req);
1799 		kfree(edesc);
1800 	}
1801 
1802 	return ret;
1803 }
1804 
1805 static int skcipher_encrypt(struct skcipher_request *req)
1806 {
1807 	return skcipher_crypt(req, true);
1808 }
1809 
1810 static int skcipher_decrypt(struct skcipher_request *req)
1811 {
1812 	return skcipher_crypt(req, false);
1813 }
1814 
1815 static struct caam_skcipher_alg driver_algs[] = {
1816 	{
1817 		.skcipher = {
1818 			.base = {
1819 				.cra_name = "cbc(aes)",
1820 				.cra_driver_name = "cbc-aes-caam",
1821 				.cra_blocksize = AES_BLOCK_SIZE,
1822 			},
1823 			.setkey = aes_skcipher_setkey,
1824 			.encrypt = skcipher_encrypt,
1825 			.decrypt = skcipher_decrypt,
1826 			.min_keysize = AES_MIN_KEY_SIZE,
1827 			.max_keysize = AES_MAX_KEY_SIZE,
1828 			.ivsize = AES_BLOCK_SIZE,
1829 		},
1830 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1831 	},
1832 	{
1833 		.skcipher = {
1834 			.base = {
1835 				.cra_name = "cbc(des3_ede)",
1836 				.cra_driver_name = "cbc-3des-caam",
1837 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1838 			},
1839 			.setkey = des3_skcipher_setkey,
1840 			.encrypt = skcipher_encrypt,
1841 			.decrypt = skcipher_decrypt,
1842 			.min_keysize = DES3_EDE_KEY_SIZE,
1843 			.max_keysize = DES3_EDE_KEY_SIZE,
1844 			.ivsize = DES3_EDE_BLOCK_SIZE,
1845 		},
1846 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1847 	},
1848 	{
1849 		.skcipher = {
1850 			.base = {
1851 				.cra_name = "cbc(des)",
1852 				.cra_driver_name = "cbc-des-caam",
1853 				.cra_blocksize = DES_BLOCK_SIZE,
1854 			},
1855 			.setkey = des_skcipher_setkey,
1856 			.encrypt = skcipher_encrypt,
1857 			.decrypt = skcipher_decrypt,
1858 			.min_keysize = DES_KEY_SIZE,
1859 			.max_keysize = DES_KEY_SIZE,
1860 			.ivsize = DES_BLOCK_SIZE,
1861 		},
1862 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1863 	},
1864 	{
1865 		.skcipher = {
1866 			.base = {
1867 				.cra_name = "ctr(aes)",
1868 				.cra_driver_name = "ctr-aes-caam",
1869 				.cra_blocksize = 1,
1870 			},
1871 			.setkey = ctr_skcipher_setkey,
1872 			.encrypt = skcipher_encrypt,
1873 			.decrypt = skcipher_decrypt,
1874 			.min_keysize = AES_MIN_KEY_SIZE,
1875 			.max_keysize = AES_MAX_KEY_SIZE,
1876 			.ivsize = AES_BLOCK_SIZE,
1877 			.chunksize = AES_BLOCK_SIZE,
1878 		},
1879 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1880 					OP_ALG_AAI_CTR_MOD128,
1881 	},
1882 	{
1883 		.skcipher = {
1884 			.base = {
1885 				.cra_name = "rfc3686(ctr(aes))",
1886 				.cra_driver_name = "rfc3686-ctr-aes-caam",
1887 				.cra_blocksize = 1,
1888 			},
1889 			.setkey = rfc3686_skcipher_setkey,
1890 			.encrypt = skcipher_encrypt,
1891 			.decrypt = skcipher_decrypt,
1892 			.min_keysize = AES_MIN_KEY_SIZE +
1893 				       CTR_RFC3686_NONCE_SIZE,
1894 			.max_keysize = AES_MAX_KEY_SIZE +
1895 				       CTR_RFC3686_NONCE_SIZE,
1896 			.ivsize = CTR_RFC3686_IV_SIZE,
1897 			.chunksize = AES_BLOCK_SIZE,
1898 		},
1899 		.caam = {
1900 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1901 					   OP_ALG_AAI_CTR_MOD128,
1902 			.rfc3686 = true,
1903 		},
1904 	},
1905 	{
1906 		.skcipher = {
1907 			.base = {
1908 				.cra_name = "xts(aes)",
1909 				.cra_driver_name = "xts-aes-caam",
1910 				.cra_blocksize = AES_BLOCK_SIZE,
1911 			},
1912 			.setkey = xts_skcipher_setkey,
1913 			.encrypt = skcipher_encrypt,
1914 			.decrypt = skcipher_decrypt,
1915 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1916 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1917 			.ivsize = AES_BLOCK_SIZE,
1918 		},
1919 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1920 	},
1921 	{
1922 		.skcipher = {
1923 			.base = {
1924 				.cra_name = "ecb(des)",
1925 				.cra_driver_name = "ecb-des-caam",
1926 				.cra_blocksize = DES_BLOCK_SIZE,
1927 			},
1928 			.setkey = des_skcipher_setkey,
1929 			.encrypt = skcipher_encrypt,
1930 			.decrypt = skcipher_decrypt,
1931 			.min_keysize = DES_KEY_SIZE,
1932 			.max_keysize = DES_KEY_SIZE,
1933 		},
1934 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1935 	},
1936 	{
1937 		.skcipher = {
1938 			.base = {
1939 				.cra_name = "ecb(aes)",
1940 				.cra_driver_name = "ecb-aes-caam",
1941 				.cra_blocksize = AES_BLOCK_SIZE,
1942 			},
1943 			.setkey = aes_skcipher_setkey,
1944 			.encrypt = skcipher_encrypt,
1945 			.decrypt = skcipher_decrypt,
1946 			.min_keysize = AES_MIN_KEY_SIZE,
1947 			.max_keysize = AES_MAX_KEY_SIZE,
1948 		},
1949 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1950 	},
1951 	{
1952 		.skcipher = {
1953 			.base = {
1954 				.cra_name = "ecb(des3_ede)",
1955 				.cra_driver_name = "ecb-des3-caam",
1956 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1957 			},
1958 			.setkey = des3_skcipher_setkey,
1959 			.encrypt = skcipher_encrypt,
1960 			.decrypt = skcipher_decrypt,
1961 			.min_keysize = DES3_EDE_KEY_SIZE,
1962 			.max_keysize = DES3_EDE_KEY_SIZE,
1963 		},
1964 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
1965 	},
1966 	{
1967 		.skcipher = {
1968 			.base = {
1969 				.cra_name = "ecb(arc4)",
1970 				.cra_driver_name = "ecb-arc4-caam",
1971 				.cra_blocksize = ARC4_BLOCK_SIZE,
1972 			},
1973 			.setkey = arc4_skcipher_setkey,
1974 			.encrypt = skcipher_encrypt,
1975 			.decrypt = skcipher_decrypt,
1976 			.min_keysize = ARC4_MIN_KEY_SIZE,
1977 			.max_keysize = ARC4_MAX_KEY_SIZE,
1978 		},
1979 		.caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB,
1980 	},
1981 };
1982 
1983 static struct caam_aead_alg driver_aeads[] = {
1984 	{
1985 		.aead = {
1986 			.base = {
1987 				.cra_name = "rfc4106(gcm(aes))",
1988 				.cra_driver_name = "rfc4106-gcm-aes-caam",
1989 				.cra_blocksize = 1,
1990 			},
1991 			.setkey = rfc4106_setkey,
1992 			.setauthsize = rfc4106_setauthsize,
1993 			.encrypt = ipsec_gcm_encrypt,
1994 			.decrypt = ipsec_gcm_decrypt,
1995 			.ivsize = GCM_RFC4106_IV_SIZE,
1996 			.maxauthsize = AES_BLOCK_SIZE,
1997 		},
1998 		.caam = {
1999 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2000 			.nodkp = true,
2001 		},
2002 	},
2003 	{
2004 		.aead = {
2005 			.base = {
2006 				.cra_name = "rfc4543(gcm(aes))",
2007 				.cra_driver_name = "rfc4543-gcm-aes-caam",
2008 				.cra_blocksize = 1,
2009 			},
2010 			.setkey = rfc4543_setkey,
2011 			.setauthsize = rfc4543_setauthsize,
2012 			.encrypt = ipsec_gcm_encrypt,
2013 			.decrypt = ipsec_gcm_decrypt,
2014 			.ivsize = GCM_RFC4543_IV_SIZE,
2015 			.maxauthsize = AES_BLOCK_SIZE,
2016 		},
2017 		.caam = {
2018 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2019 			.nodkp = true,
2020 		},
2021 	},
2022 	/* Galois Counter Mode */
2023 	{
2024 		.aead = {
2025 			.base = {
2026 				.cra_name = "gcm(aes)",
2027 				.cra_driver_name = "gcm-aes-caam",
2028 				.cra_blocksize = 1,
2029 			},
2030 			.setkey = gcm_setkey,
2031 			.setauthsize = gcm_setauthsize,
2032 			.encrypt = gcm_encrypt,
2033 			.decrypt = gcm_decrypt,
2034 			.ivsize = GCM_AES_IV_SIZE,
2035 			.maxauthsize = AES_BLOCK_SIZE,
2036 		},
2037 		.caam = {
2038 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2039 			.nodkp = true,
2040 		},
2041 	},
2042 	/* single-pass ipsec_esp descriptor */
2043 	{
2044 		.aead = {
2045 			.base = {
2046 				.cra_name = "authenc(hmac(md5),"
2047 					    "ecb(cipher_null))",
2048 				.cra_driver_name = "authenc-hmac-md5-"
2049 						   "ecb-cipher_null-caam",
2050 				.cra_blocksize = NULL_BLOCK_SIZE,
2051 			},
2052 			.setkey = aead_setkey,
2053 			.setauthsize = aead_setauthsize,
2054 			.encrypt = aead_encrypt,
2055 			.decrypt = aead_decrypt,
2056 			.ivsize = NULL_IV_SIZE,
2057 			.maxauthsize = MD5_DIGEST_SIZE,
2058 		},
2059 		.caam = {
2060 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2061 					   OP_ALG_AAI_HMAC_PRECOMP,
2062 		},
2063 	},
2064 	{
2065 		.aead = {
2066 			.base = {
2067 				.cra_name = "authenc(hmac(sha1),"
2068 					    "ecb(cipher_null))",
2069 				.cra_driver_name = "authenc-hmac-sha1-"
2070 						   "ecb-cipher_null-caam",
2071 				.cra_blocksize = NULL_BLOCK_SIZE,
2072 			},
2073 			.setkey = aead_setkey,
2074 			.setauthsize = aead_setauthsize,
2075 			.encrypt = aead_encrypt,
2076 			.decrypt = aead_decrypt,
2077 			.ivsize = NULL_IV_SIZE,
2078 			.maxauthsize = SHA1_DIGEST_SIZE,
2079 		},
2080 		.caam = {
2081 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2082 					   OP_ALG_AAI_HMAC_PRECOMP,
2083 		},
2084 	},
2085 	{
2086 		.aead = {
2087 			.base = {
2088 				.cra_name = "authenc(hmac(sha224),"
2089 					    "ecb(cipher_null))",
2090 				.cra_driver_name = "authenc-hmac-sha224-"
2091 						   "ecb-cipher_null-caam",
2092 				.cra_blocksize = NULL_BLOCK_SIZE,
2093 			},
2094 			.setkey = aead_setkey,
2095 			.setauthsize = aead_setauthsize,
2096 			.encrypt = aead_encrypt,
2097 			.decrypt = aead_decrypt,
2098 			.ivsize = NULL_IV_SIZE,
2099 			.maxauthsize = SHA224_DIGEST_SIZE,
2100 		},
2101 		.caam = {
2102 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2103 					   OP_ALG_AAI_HMAC_PRECOMP,
2104 		},
2105 	},
2106 	{
2107 		.aead = {
2108 			.base = {
2109 				.cra_name = "authenc(hmac(sha256),"
2110 					    "ecb(cipher_null))",
2111 				.cra_driver_name = "authenc-hmac-sha256-"
2112 						   "ecb-cipher_null-caam",
2113 				.cra_blocksize = NULL_BLOCK_SIZE,
2114 			},
2115 			.setkey = aead_setkey,
2116 			.setauthsize = aead_setauthsize,
2117 			.encrypt = aead_encrypt,
2118 			.decrypt = aead_decrypt,
2119 			.ivsize = NULL_IV_SIZE,
2120 			.maxauthsize = SHA256_DIGEST_SIZE,
2121 		},
2122 		.caam = {
2123 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2124 					   OP_ALG_AAI_HMAC_PRECOMP,
2125 		},
2126 	},
2127 	{
2128 		.aead = {
2129 			.base = {
2130 				.cra_name = "authenc(hmac(sha384),"
2131 					    "ecb(cipher_null))",
2132 				.cra_driver_name = "authenc-hmac-sha384-"
2133 						   "ecb-cipher_null-caam",
2134 				.cra_blocksize = NULL_BLOCK_SIZE,
2135 			},
2136 			.setkey = aead_setkey,
2137 			.setauthsize = aead_setauthsize,
2138 			.encrypt = aead_encrypt,
2139 			.decrypt = aead_decrypt,
2140 			.ivsize = NULL_IV_SIZE,
2141 			.maxauthsize = SHA384_DIGEST_SIZE,
2142 		},
2143 		.caam = {
2144 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2145 					   OP_ALG_AAI_HMAC_PRECOMP,
2146 		},
2147 	},
2148 	{
2149 		.aead = {
2150 			.base = {
2151 				.cra_name = "authenc(hmac(sha512),"
2152 					    "ecb(cipher_null))",
2153 				.cra_driver_name = "authenc-hmac-sha512-"
2154 						   "ecb-cipher_null-caam",
2155 				.cra_blocksize = NULL_BLOCK_SIZE,
2156 			},
2157 			.setkey = aead_setkey,
2158 			.setauthsize = aead_setauthsize,
2159 			.encrypt = aead_encrypt,
2160 			.decrypt = aead_decrypt,
2161 			.ivsize = NULL_IV_SIZE,
2162 			.maxauthsize = SHA512_DIGEST_SIZE,
2163 		},
2164 		.caam = {
2165 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2166 					   OP_ALG_AAI_HMAC_PRECOMP,
2167 		},
2168 	},
2169 	{
2170 		.aead = {
2171 			.base = {
2172 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2173 				.cra_driver_name = "authenc-hmac-md5-"
2174 						   "cbc-aes-caam",
2175 				.cra_blocksize = AES_BLOCK_SIZE,
2176 			},
2177 			.setkey = aead_setkey,
2178 			.setauthsize = aead_setauthsize,
2179 			.encrypt = aead_encrypt,
2180 			.decrypt = aead_decrypt,
2181 			.ivsize = AES_BLOCK_SIZE,
2182 			.maxauthsize = MD5_DIGEST_SIZE,
2183 		},
2184 		.caam = {
2185 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2186 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2187 					   OP_ALG_AAI_HMAC_PRECOMP,
2188 		},
2189 	},
2190 	{
2191 		.aead = {
2192 			.base = {
2193 				.cra_name = "echainiv(authenc(hmac(md5),"
2194 					    "cbc(aes)))",
2195 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2196 						   "cbc-aes-caam",
2197 				.cra_blocksize = AES_BLOCK_SIZE,
2198 			},
2199 			.setkey = aead_setkey,
2200 			.setauthsize = aead_setauthsize,
2201 			.encrypt = aead_encrypt,
2202 			.decrypt = aead_decrypt,
2203 			.ivsize = AES_BLOCK_SIZE,
2204 			.maxauthsize = MD5_DIGEST_SIZE,
2205 		},
2206 		.caam = {
2207 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2208 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2209 					   OP_ALG_AAI_HMAC_PRECOMP,
2210 			.geniv = true,
2211 		},
2212 	},
2213 	{
2214 		.aead = {
2215 			.base = {
2216 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2217 				.cra_driver_name = "authenc-hmac-sha1-"
2218 						   "cbc-aes-caam",
2219 				.cra_blocksize = AES_BLOCK_SIZE,
2220 			},
2221 			.setkey = aead_setkey,
2222 			.setauthsize = aead_setauthsize,
2223 			.encrypt = aead_encrypt,
2224 			.decrypt = aead_decrypt,
2225 			.ivsize = AES_BLOCK_SIZE,
2226 			.maxauthsize = SHA1_DIGEST_SIZE,
2227 		},
2228 		.caam = {
2229 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2230 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2231 					   OP_ALG_AAI_HMAC_PRECOMP,
2232 		},
2233 	},
2234 	{
2235 		.aead = {
2236 			.base = {
2237 				.cra_name = "echainiv(authenc(hmac(sha1),"
2238 					    "cbc(aes)))",
2239 				.cra_driver_name = "echainiv-authenc-"
2240 						   "hmac-sha1-cbc-aes-caam",
2241 				.cra_blocksize = AES_BLOCK_SIZE,
2242 			},
2243 			.setkey = aead_setkey,
2244 			.setauthsize = aead_setauthsize,
2245 			.encrypt = aead_encrypt,
2246 			.decrypt = aead_decrypt,
2247 			.ivsize = AES_BLOCK_SIZE,
2248 			.maxauthsize = SHA1_DIGEST_SIZE,
2249 		},
2250 		.caam = {
2251 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2252 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2253 					   OP_ALG_AAI_HMAC_PRECOMP,
2254 			.geniv = true,
2255 		},
2256 	},
2257 	{
2258 		.aead = {
2259 			.base = {
2260 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2261 				.cra_driver_name = "authenc-hmac-sha224-"
2262 						   "cbc-aes-caam",
2263 				.cra_blocksize = AES_BLOCK_SIZE,
2264 			},
2265 			.setkey = aead_setkey,
2266 			.setauthsize = aead_setauthsize,
2267 			.encrypt = aead_encrypt,
2268 			.decrypt = aead_decrypt,
2269 			.ivsize = AES_BLOCK_SIZE,
2270 			.maxauthsize = SHA224_DIGEST_SIZE,
2271 		},
2272 		.caam = {
2273 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2274 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2275 					   OP_ALG_AAI_HMAC_PRECOMP,
2276 		},
2277 	},
2278 	{
2279 		.aead = {
2280 			.base = {
2281 				.cra_name = "echainiv(authenc(hmac(sha224),"
2282 					    "cbc(aes)))",
2283 				.cra_driver_name = "echainiv-authenc-"
2284 						   "hmac-sha224-cbc-aes-caam",
2285 				.cra_blocksize = AES_BLOCK_SIZE,
2286 			},
2287 			.setkey = aead_setkey,
2288 			.setauthsize = aead_setauthsize,
2289 			.encrypt = aead_encrypt,
2290 			.decrypt = aead_decrypt,
2291 			.ivsize = AES_BLOCK_SIZE,
2292 			.maxauthsize = SHA224_DIGEST_SIZE,
2293 		},
2294 		.caam = {
2295 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2296 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2297 					   OP_ALG_AAI_HMAC_PRECOMP,
2298 			.geniv = true,
2299 		},
2300 	},
2301 	{
2302 		.aead = {
2303 			.base = {
2304 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2305 				.cra_driver_name = "authenc-hmac-sha256-"
2306 						   "cbc-aes-caam",
2307 				.cra_blocksize = AES_BLOCK_SIZE,
2308 			},
2309 			.setkey = aead_setkey,
2310 			.setauthsize = aead_setauthsize,
2311 			.encrypt = aead_encrypt,
2312 			.decrypt = aead_decrypt,
2313 			.ivsize = AES_BLOCK_SIZE,
2314 			.maxauthsize = SHA256_DIGEST_SIZE,
2315 		},
2316 		.caam = {
2317 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2318 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2319 					   OP_ALG_AAI_HMAC_PRECOMP,
2320 		},
2321 	},
2322 	{
2323 		.aead = {
2324 			.base = {
2325 				.cra_name = "echainiv(authenc(hmac(sha256),"
2326 					    "cbc(aes)))",
2327 				.cra_driver_name = "echainiv-authenc-"
2328 						   "hmac-sha256-cbc-aes-caam",
2329 				.cra_blocksize = AES_BLOCK_SIZE,
2330 			},
2331 			.setkey = aead_setkey,
2332 			.setauthsize = aead_setauthsize,
2333 			.encrypt = aead_encrypt,
2334 			.decrypt = aead_decrypt,
2335 			.ivsize = AES_BLOCK_SIZE,
2336 			.maxauthsize = SHA256_DIGEST_SIZE,
2337 		},
2338 		.caam = {
2339 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2340 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2341 					   OP_ALG_AAI_HMAC_PRECOMP,
2342 			.geniv = true,
2343 		},
2344 	},
2345 	{
2346 		.aead = {
2347 			.base = {
2348 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2349 				.cra_driver_name = "authenc-hmac-sha384-"
2350 						   "cbc-aes-caam",
2351 				.cra_blocksize = AES_BLOCK_SIZE,
2352 			},
2353 			.setkey = aead_setkey,
2354 			.setauthsize = aead_setauthsize,
2355 			.encrypt = aead_encrypt,
2356 			.decrypt = aead_decrypt,
2357 			.ivsize = AES_BLOCK_SIZE,
2358 			.maxauthsize = SHA384_DIGEST_SIZE,
2359 		},
2360 		.caam = {
2361 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2362 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2363 					   OP_ALG_AAI_HMAC_PRECOMP,
2364 		},
2365 	},
2366 	{
2367 		.aead = {
2368 			.base = {
2369 				.cra_name = "echainiv(authenc(hmac(sha384),"
2370 					    "cbc(aes)))",
2371 				.cra_driver_name = "echainiv-authenc-"
2372 						   "hmac-sha384-cbc-aes-caam",
2373 				.cra_blocksize = AES_BLOCK_SIZE,
2374 			},
2375 			.setkey = aead_setkey,
2376 			.setauthsize = aead_setauthsize,
2377 			.encrypt = aead_encrypt,
2378 			.decrypt = aead_decrypt,
2379 			.ivsize = AES_BLOCK_SIZE,
2380 			.maxauthsize = SHA384_DIGEST_SIZE,
2381 		},
2382 		.caam = {
2383 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2384 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2385 					   OP_ALG_AAI_HMAC_PRECOMP,
2386 			.geniv = true,
2387 		},
2388 	},
2389 	{
2390 		.aead = {
2391 			.base = {
2392 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2393 				.cra_driver_name = "authenc-hmac-sha512-"
2394 						   "cbc-aes-caam",
2395 				.cra_blocksize = AES_BLOCK_SIZE,
2396 			},
2397 			.setkey = aead_setkey,
2398 			.setauthsize = aead_setauthsize,
2399 			.encrypt = aead_encrypt,
2400 			.decrypt = aead_decrypt,
2401 			.ivsize = AES_BLOCK_SIZE,
2402 			.maxauthsize = SHA512_DIGEST_SIZE,
2403 		},
2404 		.caam = {
2405 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2406 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2407 					   OP_ALG_AAI_HMAC_PRECOMP,
2408 		},
2409 	},
2410 	{
2411 		.aead = {
2412 			.base = {
2413 				.cra_name = "echainiv(authenc(hmac(sha512),"
2414 					    "cbc(aes)))",
2415 				.cra_driver_name = "echainiv-authenc-"
2416 						   "hmac-sha512-cbc-aes-caam",
2417 				.cra_blocksize = AES_BLOCK_SIZE,
2418 			},
2419 			.setkey = aead_setkey,
2420 			.setauthsize = aead_setauthsize,
2421 			.encrypt = aead_encrypt,
2422 			.decrypt = aead_decrypt,
2423 			.ivsize = AES_BLOCK_SIZE,
2424 			.maxauthsize = SHA512_DIGEST_SIZE,
2425 		},
2426 		.caam = {
2427 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2428 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2429 					   OP_ALG_AAI_HMAC_PRECOMP,
2430 			.geniv = true,
2431 		},
2432 	},
2433 	{
2434 		.aead = {
2435 			.base = {
2436 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2437 				.cra_driver_name = "authenc-hmac-md5-"
2438 						   "cbc-des3_ede-caam",
2439 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2440 			},
2441 			.setkey = des3_aead_setkey,
2442 			.setauthsize = aead_setauthsize,
2443 			.encrypt = aead_encrypt,
2444 			.decrypt = aead_decrypt,
2445 			.ivsize = DES3_EDE_BLOCK_SIZE,
2446 			.maxauthsize = MD5_DIGEST_SIZE,
2447 		},
2448 		.caam = {
2449 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2450 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2451 					   OP_ALG_AAI_HMAC_PRECOMP,
2452 		}
2453 	},
2454 	{
2455 		.aead = {
2456 			.base = {
2457 				.cra_name = "echainiv(authenc(hmac(md5),"
2458 					    "cbc(des3_ede)))",
2459 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2460 						   "cbc-des3_ede-caam",
2461 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2462 			},
2463 			.setkey = des3_aead_setkey,
2464 			.setauthsize = aead_setauthsize,
2465 			.encrypt = aead_encrypt,
2466 			.decrypt = aead_decrypt,
2467 			.ivsize = DES3_EDE_BLOCK_SIZE,
2468 			.maxauthsize = MD5_DIGEST_SIZE,
2469 		},
2470 		.caam = {
2471 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2472 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2473 					   OP_ALG_AAI_HMAC_PRECOMP,
2474 			.geniv = true,
2475 		}
2476 	},
2477 	{
2478 		.aead = {
2479 			.base = {
2480 				.cra_name = "authenc(hmac(sha1),"
2481 					    "cbc(des3_ede))",
2482 				.cra_driver_name = "authenc-hmac-sha1-"
2483 						   "cbc-des3_ede-caam",
2484 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2485 			},
2486 			.setkey = des3_aead_setkey,
2487 			.setauthsize = aead_setauthsize,
2488 			.encrypt = aead_encrypt,
2489 			.decrypt = aead_decrypt,
2490 			.ivsize = DES3_EDE_BLOCK_SIZE,
2491 			.maxauthsize = SHA1_DIGEST_SIZE,
2492 		},
2493 		.caam = {
2494 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2495 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2496 					   OP_ALG_AAI_HMAC_PRECOMP,
2497 		},
2498 	},
2499 	{
2500 		.aead = {
2501 			.base = {
2502 				.cra_name = "echainiv(authenc(hmac(sha1),"
2503 					    "cbc(des3_ede)))",
2504 				.cra_driver_name = "echainiv-authenc-"
2505 						   "hmac-sha1-"
2506 						   "cbc-des3_ede-caam",
2507 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2508 			},
2509 			.setkey = des3_aead_setkey,
2510 			.setauthsize = aead_setauthsize,
2511 			.encrypt = aead_encrypt,
2512 			.decrypt = aead_decrypt,
2513 			.ivsize = DES3_EDE_BLOCK_SIZE,
2514 			.maxauthsize = SHA1_DIGEST_SIZE,
2515 		},
2516 		.caam = {
2517 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2518 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2519 					   OP_ALG_AAI_HMAC_PRECOMP,
2520 			.geniv = true,
2521 		},
2522 	},
2523 	{
2524 		.aead = {
2525 			.base = {
2526 				.cra_name = "authenc(hmac(sha224),"
2527 					    "cbc(des3_ede))",
2528 				.cra_driver_name = "authenc-hmac-sha224-"
2529 						   "cbc-des3_ede-caam",
2530 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2531 			},
2532 			.setkey = des3_aead_setkey,
2533 			.setauthsize = aead_setauthsize,
2534 			.encrypt = aead_encrypt,
2535 			.decrypt = aead_decrypt,
2536 			.ivsize = DES3_EDE_BLOCK_SIZE,
2537 			.maxauthsize = SHA224_DIGEST_SIZE,
2538 		},
2539 		.caam = {
2540 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2541 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2542 					   OP_ALG_AAI_HMAC_PRECOMP,
2543 		},
2544 	},
2545 	{
2546 		.aead = {
2547 			.base = {
2548 				.cra_name = "echainiv(authenc(hmac(sha224),"
2549 					    "cbc(des3_ede)))",
2550 				.cra_driver_name = "echainiv-authenc-"
2551 						   "hmac-sha224-"
2552 						   "cbc-des3_ede-caam",
2553 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2554 			},
2555 			.setkey = des3_aead_setkey,
2556 			.setauthsize = aead_setauthsize,
2557 			.encrypt = aead_encrypt,
2558 			.decrypt = aead_decrypt,
2559 			.ivsize = DES3_EDE_BLOCK_SIZE,
2560 			.maxauthsize = SHA224_DIGEST_SIZE,
2561 		},
2562 		.caam = {
2563 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2564 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2565 					   OP_ALG_AAI_HMAC_PRECOMP,
2566 			.geniv = true,
2567 		},
2568 	},
2569 	{
2570 		.aead = {
2571 			.base = {
2572 				.cra_name = "authenc(hmac(sha256),"
2573 					    "cbc(des3_ede))",
2574 				.cra_driver_name = "authenc-hmac-sha256-"
2575 						   "cbc-des3_ede-caam",
2576 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2577 			},
2578 			.setkey = des3_aead_setkey,
2579 			.setauthsize = aead_setauthsize,
2580 			.encrypt = aead_encrypt,
2581 			.decrypt = aead_decrypt,
2582 			.ivsize = DES3_EDE_BLOCK_SIZE,
2583 			.maxauthsize = SHA256_DIGEST_SIZE,
2584 		},
2585 		.caam = {
2586 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2587 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2588 					   OP_ALG_AAI_HMAC_PRECOMP,
2589 		},
2590 	},
2591 	{
2592 		.aead = {
2593 			.base = {
2594 				.cra_name = "echainiv(authenc(hmac(sha256),"
2595 					    "cbc(des3_ede)))",
2596 				.cra_driver_name = "echainiv-authenc-"
2597 						   "hmac-sha256-"
2598 						   "cbc-des3_ede-caam",
2599 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2600 			},
2601 			.setkey = des3_aead_setkey,
2602 			.setauthsize = aead_setauthsize,
2603 			.encrypt = aead_encrypt,
2604 			.decrypt = aead_decrypt,
2605 			.ivsize = DES3_EDE_BLOCK_SIZE,
2606 			.maxauthsize = SHA256_DIGEST_SIZE,
2607 		},
2608 		.caam = {
2609 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2610 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2611 					   OP_ALG_AAI_HMAC_PRECOMP,
2612 			.geniv = true,
2613 		},
2614 	},
2615 	{
2616 		.aead = {
2617 			.base = {
2618 				.cra_name = "authenc(hmac(sha384),"
2619 					    "cbc(des3_ede))",
2620 				.cra_driver_name = "authenc-hmac-sha384-"
2621 						   "cbc-des3_ede-caam",
2622 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2623 			},
2624 			.setkey = des3_aead_setkey,
2625 			.setauthsize = aead_setauthsize,
2626 			.encrypt = aead_encrypt,
2627 			.decrypt = aead_decrypt,
2628 			.ivsize = DES3_EDE_BLOCK_SIZE,
2629 			.maxauthsize = SHA384_DIGEST_SIZE,
2630 		},
2631 		.caam = {
2632 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2633 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2634 					   OP_ALG_AAI_HMAC_PRECOMP,
2635 		},
2636 	},
2637 	{
2638 		.aead = {
2639 			.base = {
2640 				.cra_name = "echainiv(authenc(hmac(sha384),"
2641 					    "cbc(des3_ede)))",
2642 				.cra_driver_name = "echainiv-authenc-"
2643 						   "hmac-sha384-"
2644 						   "cbc-des3_ede-caam",
2645 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2646 			},
2647 			.setkey = des3_aead_setkey,
2648 			.setauthsize = aead_setauthsize,
2649 			.encrypt = aead_encrypt,
2650 			.decrypt = aead_decrypt,
2651 			.ivsize = DES3_EDE_BLOCK_SIZE,
2652 			.maxauthsize = SHA384_DIGEST_SIZE,
2653 		},
2654 		.caam = {
2655 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2656 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2657 					   OP_ALG_AAI_HMAC_PRECOMP,
2658 			.geniv = true,
2659 		},
2660 	},
2661 	{
2662 		.aead = {
2663 			.base = {
2664 				.cra_name = "authenc(hmac(sha512),"
2665 					    "cbc(des3_ede))",
2666 				.cra_driver_name = "authenc-hmac-sha512-"
2667 						   "cbc-des3_ede-caam",
2668 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2669 			},
2670 			.setkey = des3_aead_setkey,
2671 			.setauthsize = aead_setauthsize,
2672 			.encrypt = aead_encrypt,
2673 			.decrypt = aead_decrypt,
2674 			.ivsize = DES3_EDE_BLOCK_SIZE,
2675 			.maxauthsize = SHA512_DIGEST_SIZE,
2676 		},
2677 		.caam = {
2678 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2679 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2680 					   OP_ALG_AAI_HMAC_PRECOMP,
2681 		},
2682 	},
2683 	{
2684 		.aead = {
2685 			.base = {
2686 				.cra_name = "echainiv(authenc(hmac(sha512),"
2687 					    "cbc(des3_ede)))",
2688 				.cra_driver_name = "echainiv-authenc-"
2689 						   "hmac-sha512-"
2690 						   "cbc-des3_ede-caam",
2691 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2692 			},
2693 			.setkey = des3_aead_setkey,
2694 			.setauthsize = aead_setauthsize,
2695 			.encrypt = aead_encrypt,
2696 			.decrypt = aead_decrypt,
2697 			.ivsize = DES3_EDE_BLOCK_SIZE,
2698 			.maxauthsize = SHA512_DIGEST_SIZE,
2699 		},
2700 		.caam = {
2701 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2702 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2703 					   OP_ALG_AAI_HMAC_PRECOMP,
2704 			.geniv = true,
2705 		},
2706 	},
2707 	{
2708 		.aead = {
2709 			.base = {
2710 				.cra_name = "authenc(hmac(md5),cbc(des))",
2711 				.cra_driver_name = "authenc-hmac-md5-"
2712 						   "cbc-des-caam",
2713 				.cra_blocksize = DES_BLOCK_SIZE,
2714 			},
2715 			.setkey = aead_setkey,
2716 			.setauthsize = aead_setauthsize,
2717 			.encrypt = aead_encrypt,
2718 			.decrypt = aead_decrypt,
2719 			.ivsize = DES_BLOCK_SIZE,
2720 			.maxauthsize = MD5_DIGEST_SIZE,
2721 		},
2722 		.caam = {
2723 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2724 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2725 					   OP_ALG_AAI_HMAC_PRECOMP,
2726 		},
2727 	},
2728 	{
2729 		.aead = {
2730 			.base = {
2731 				.cra_name = "echainiv(authenc(hmac(md5),"
2732 					    "cbc(des)))",
2733 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2734 						   "cbc-des-caam",
2735 				.cra_blocksize = DES_BLOCK_SIZE,
2736 			},
2737 			.setkey = aead_setkey,
2738 			.setauthsize = aead_setauthsize,
2739 			.encrypt = aead_encrypt,
2740 			.decrypt = aead_decrypt,
2741 			.ivsize = DES_BLOCK_SIZE,
2742 			.maxauthsize = MD5_DIGEST_SIZE,
2743 		},
2744 		.caam = {
2745 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2746 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2747 					   OP_ALG_AAI_HMAC_PRECOMP,
2748 			.geniv = true,
2749 		},
2750 	},
2751 	{
2752 		.aead = {
2753 			.base = {
2754 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2755 				.cra_driver_name = "authenc-hmac-sha1-"
2756 						   "cbc-des-caam",
2757 				.cra_blocksize = DES_BLOCK_SIZE,
2758 			},
2759 			.setkey = aead_setkey,
2760 			.setauthsize = aead_setauthsize,
2761 			.encrypt = aead_encrypt,
2762 			.decrypt = aead_decrypt,
2763 			.ivsize = DES_BLOCK_SIZE,
2764 			.maxauthsize = SHA1_DIGEST_SIZE,
2765 		},
2766 		.caam = {
2767 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2768 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2769 					   OP_ALG_AAI_HMAC_PRECOMP,
2770 		},
2771 	},
2772 	{
2773 		.aead = {
2774 			.base = {
2775 				.cra_name = "echainiv(authenc(hmac(sha1),"
2776 					    "cbc(des)))",
2777 				.cra_driver_name = "echainiv-authenc-"
2778 						   "hmac-sha1-cbc-des-caam",
2779 				.cra_blocksize = DES_BLOCK_SIZE,
2780 			},
2781 			.setkey = aead_setkey,
2782 			.setauthsize = aead_setauthsize,
2783 			.encrypt = aead_encrypt,
2784 			.decrypt = aead_decrypt,
2785 			.ivsize = DES_BLOCK_SIZE,
2786 			.maxauthsize = SHA1_DIGEST_SIZE,
2787 		},
2788 		.caam = {
2789 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2790 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2791 					   OP_ALG_AAI_HMAC_PRECOMP,
2792 			.geniv = true,
2793 		},
2794 	},
2795 	{
2796 		.aead = {
2797 			.base = {
2798 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2799 				.cra_driver_name = "authenc-hmac-sha224-"
2800 						   "cbc-des-caam",
2801 				.cra_blocksize = DES_BLOCK_SIZE,
2802 			},
2803 			.setkey = aead_setkey,
2804 			.setauthsize = aead_setauthsize,
2805 			.encrypt = aead_encrypt,
2806 			.decrypt = aead_decrypt,
2807 			.ivsize = DES_BLOCK_SIZE,
2808 			.maxauthsize = SHA224_DIGEST_SIZE,
2809 		},
2810 		.caam = {
2811 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2812 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2813 					   OP_ALG_AAI_HMAC_PRECOMP,
2814 		},
2815 	},
2816 	{
2817 		.aead = {
2818 			.base = {
2819 				.cra_name = "echainiv(authenc(hmac(sha224),"
2820 					    "cbc(des)))",
2821 				.cra_driver_name = "echainiv-authenc-"
2822 						   "hmac-sha224-cbc-des-caam",
2823 				.cra_blocksize = DES_BLOCK_SIZE,
2824 			},
2825 			.setkey = aead_setkey,
2826 			.setauthsize = aead_setauthsize,
2827 			.encrypt = aead_encrypt,
2828 			.decrypt = aead_decrypt,
2829 			.ivsize = DES_BLOCK_SIZE,
2830 			.maxauthsize = SHA224_DIGEST_SIZE,
2831 		},
2832 		.caam = {
2833 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2834 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2835 					   OP_ALG_AAI_HMAC_PRECOMP,
2836 			.geniv = true,
2837 		},
2838 	},
2839 	{
2840 		.aead = {
2841 			.base = {
2842 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2843 				.cra_driver_name = "authenc-hmac-sha256-"
2844 						   "cbc-des-caam",
2845 				.cra_blocksize = DES_BLOCK_SIZE,
2846 			},
2847 			.setkey = aead_setkey,
2848 			.setauthsize = aead_setauthsize,
2849 			.encrypt = aead_encrypt,
2850 			.decrypt = aead_decrypt,
2851 			.ivsize = DES_BLOCK_SIZE,
2852 			.maxauthsize = SHA256_DIGEST_SIZE,
2853 		},
2854 		.caam = {
2855 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2856 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2857 					   OP_ALG_AAI_HMAC_PRECOMP,
2858 		},
2859 	},
2860 	{
2861 		.aead = {
2862 			.base = {
2863 				.cra_name = "echainiv(authenc(hmac(sha256),"
2864 					    "cbc(des)))",
2865 				.cra_driver_name = "echainiv-authenc-"
2866 						   "hmac-sha256-cbc-des-caam",
2867 				.cra_blocksize = DES_BLOCK_SIZE,
2868 			},
2869 			.setkey = aead_setkey,
2870 			.setauthsize = aead_setauthsize,
2871 			.encrypt = aead_encrypt,
2872 			.decrypt = aead_decrypt,
2873 			.ivsize = DES_BLOCK_SIZE,
2874 			.maxauthsize = SHA256_DIGEST_SIZE,
2875 		},
2876 		.caam = {
2877 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2878 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2879 					   OP_ALG_AAI_HMAC_PRECOMP,
2880 			.geniv = true,
2881 		},
2882 	},
2883 	{
2884 		.aead = {
2885 			.base = {
2886 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2887 				.cra_driver_name = "authenc-hmac-sha384-"
2888 						   "cbc-des-caam",
2889 				.cra_blocksize = DES_BLOCK_SIZE,
2890 			},
2891 			.setkey = aead_setkey,
2892 			.setauthsize = aead_setauthsize,
2893 			.encrypt = aead_encrypt,
2894 			.decrypt = aead_decrypt,
2895 			.ivsize = DES_BLOCK_SIZE,
2896 			.maxauthsize = SHA384_DIGEST_SIZE,
2897 		},
2898 		.caam = {
2899 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2900 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2901 					   OP_ALG_AAI_HMAC_PRECOMP,
2902 		},
2903 	},
2904 	{
2905 		.aead = {
2906 			.base = {
2907 				.cra_name = "echainiv(authenc(hmac(sha384),"
2908 					    "cbc(des)))",
2909 				.cra_driver_name = "echainiv-authenc-"
2910 						   "hmac-sha384-cbc-des-caam",
2911 				.cra_blocksize = DES_BLOCK_SIZE,
2912 			},
2913 			.setkey = aead_setkey,
2914 			.setauthsize = aead_setauthsize,
2915 			.encrypt = aead_encrypt,
2916 			.decrypt = aead_decrypt,
2917 			.ivsize = DES_BLOCK_SIZE,
2918 			.maxauthsize = SHA384_DIGEST_SIZE,
2919 		},
2920 		.caam = {
2921 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2922 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2923 					   OP_ALG_AAI_HMAC_PRECOMP,
2924 			.geniv = true,
2925 		},
2926 	},
2927 	{
2928 		.aead = {
2929 			.base = {
2930 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2931 				.cra_driver_name = "authenc-hmac-sha512-"
2932 						   "cbc-des-caam",
2933 				.cra_blocksize = DES_BLOCK_SIZE,
2934 			},
2935 			.setkey = aead_setkey,
2936 			.setauthsize = aead_setauthsize,
2937 			.encrypt = aead_encrypt,
2938 			.decrypt = aead_decrypt,
2939 			.ivsize = DES_BLOCK_SIZE,
2940 			.maxauthsize = SHA512_DIGEST_SIZE,
2941 		},
2942 		.caam = {
2943 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2944 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2945 					   OP_ALG_AAI_HMAC_PRECOMP,
2946 		},
2947 	},
2948 	{
2949 		.aead = {
2950 			.base = {
2951 				.cra_name = "echainiv(authenc(hmac(sha512),"
2952 					    "cbc(des)))",
2953 				.cra_driver_name = "echainiv-authenc-"
2954 						   "hmac-sha512-cbc-des-caam",
2955 				.cra_blocksize = DES_BLOCK_SIZE,
2956 			},
2957 			.setkey = aead_setkey,
2958 			.setauthsize = aead_setauthsize,
2959 			.encrypt = aead_encrypt,
2960 			.decrypt = aead_decrypt,
2961 			.ivsize = DES_BLOCK_SIZE,
2962 			.maxauthsize = SHA512_DIGEST_SIZE,
2963 		},
2964 		.caam = {
2965 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2966 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2967 					   OP_ALG_AAI_HMAC_PRECOMP,
2968 			.geniv = true,
2969 		},
2970 	},
2971 	{
2972 		.aead = {
2973 			.base = {
2974 				.cra_name = "authenc(hmac(md5),"
2975 					    "rfc3686(ctr(aes)))",
2976 				.cra_driver_name = "authenc-hmac-md5-"
2977 						   "rfc3686-ctr-aes-caam",
2978 				.cra_blocksize = 1,
2979 			},
2980 			.setkey = aead_setkey,
2981 			.setauthsize = aead_setauthsize,
2982 			.encrypt = aead_encrypt,
2983 			.decrypt = aead_decrypt,
2984 			.ivsize = CTR_RFC3686_IV_SIZE,
2985 			.maxauthsize = MD5_DIGEST_SIZE,
2986 		},
2987 		.caam = {
2988 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2989 					   OP_ALG_AAI_CTR_MOD128,
2990 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2991 					   OP_ALG_AAI_HMAC_PRECOMP,
2992 			.rfc3686 = true,
2993 		},
2994 	},
2995 	{
2996 		.aead = {
2997 			.base = {
2998 				.cra_name = "seqiv(authenc("
2999 					    "hmac(md5),rfc3686(ctr(aes))))",
3000 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
3001 						   "rfc3686-ctr-aes-caam",
3002 				.cra_blocksize = 1,
3003 			},
3004 			.setkey = aead_setkey,
3005 			.setauthsize = aead_setauthsize,
3006 			.encrypt = aead_encrypt,
3007 			.decrypt = aead_decrypt,
3008 			.ivsize = CTR_RFC3686_IV_SIZE,
3009 			.maxauthsize = MD5_DIGEST_SIZE,
3010 		},
3011 		.caam = {
3012 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3013 					   OP_ALG_AAI_CTR_MOD128,
3014 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3015 					   OP_ALG_AAI_HMAC_PRECOMP,
3016 			.rfc3686 = true,
3017 			.geniv = true,
3018 		},
3019 	},
3020 	{
3021 		.aead = {
3022 			.base = {
3023 				.cra_name = "authenc(hmac(sha1),"
3024 					    "rfc3686(ctr(aes)))",
3025 				.cra_driver_name = "authenc-hmac-sha1-"
3026 						   "rfc3686-ctr-aes-caam",
3027 				.cra_blocksize = 1,
3028 			},
3029 			.setkey = aead_setkey,
3030 			.setauthsize = aead_setauthsize,
3031 			.encrypt = aead_encrypt,
3032 			.decrypt = aead_decrypt,
3033 			.ivsize = CTR_RFC3686_IV_SIZE,
3034 			.maxauthsize = SHA1_DIGEST_SIZE,
3035 		},
3036 		.caam = {
3037 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3038 					   OP_ALG_AAI_CTR_MOD128,
3039 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3040 					   OP_ALG_AAI_HMAC_PRECOMP,
3041 			.rfc3686 = true,
3042 		},
3043 	},
3044 	{
3045 		.aead = {
3046 			.base = {
3047 				.cra_name = "seqiv(authenc("
3048 					    "hmac(sha1),rfc3686(ctr(aes))))",
3049 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3050 						   "rfc3686-ctr-aes-caam",
3051 				.cra_blocksize = 1,
3052 			},
3053 			.setkey = aead_setkey,
3054 			.setauthsize = aead_setauthsize,
3055 			.encrypt = aead_encrypt,
3056 			.decrypt = aead_decrypt,
3057 			.ivsize = CTR_RFC3686_IV_SIZE,
3058 			.maxauthsize = SHA1_DIGEST_SIZE,
3059 		},
3060 		.caam = {
3061 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3062 					   OP_ALG_AAI_CTR_MOD128,
3063 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3064 					   OP_ALG_AAI_HMAC_PRECOMP,
3065 			.rfc3686 = true,
3066 			.geniv = true,
3067 		},
3068 	},
3069 	{
3070 		.aead = {
3071 			.base = {
3072 				.cra_name = "authenc(hmac(sha224),"
3073 					    "rfc3686(ctr(aes)))",
3074 				.cra_driver_name = "authenc-hmac-sha224-"
3075 						   "rfc3686-ctr-aes-caam",
3076 				.cra_blocksize = 1,
3077 			},
3078 			.setkey = aead_setkey,
3079 			.setauthsize = aead_setauthsize,
3080 			.encrypt = aead_encrypt,
3081 			.decrypt = aead_decrypt,
3082 			.ivsize = CTR_RFC3686_IV_SIZE,
3083 			.maxauthsize = SHA224_DIGEST_SIZE,
3084 		},
3085 		.caam = {
3086 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3087 					   OP_ALG_AAI_CTR_MOD128,
3088 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3089 					   OP_ALG_AAI_HMAC_PRECOMP,
3090 			.rfc3686 = true,
3091 		},
3092 	},
3093 	{
3094 		.aead = {
3095 			.base = {
3096 				.cra_name = "seqiv(authenc("
3097 					    "hmac(sha224),rfc3686(ctr(aes))))",
3098 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3099 						   "rfc3686-ctr-aes-caam",
3100 				.cra_blocksize = 1,
3101 			},
3102 			.setkey = aead_setkey,
3103 			.setauthsize = aead_setauthsize,
3104 			.encrypt = aead_encrypt,
3105 			.decrypt = aead_decrypt,
3106 			.ivsize = CTR_RFC3686_IV_SIZE,
3107 			.maxauthsize = SHA224_DIGEST_SIZE,
3108 		},
3109 		.caam = {
3110 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3111 					   OP_ALG_AAI_CTR_MOD128,
3112 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3113 					   OP_ALG_AAI_HMAC_PRECOMP,
3114 			.rfc3686 = true,
3115 			.geniv = true,
3116 		},
3117 	},
3118 	{
3119 		.aead = {
3120 			.base = {
3121 				.cra_name = "authenc(hmac(sha256),"
3122 					    "rfc3686(ctr(aes)))",
3123 				.cra_driver_name = "authenc-hmac-sha256-"
3124 						   "rfc3686-ctr-aes-caam",
3125 				.cra_blocksize = 1,
3126 			},
3127 			.setkey = aead_setkey,
3128 			.setauthsize = aead_setauthsize,
3129 			.encrypt = aead_encrypt,
3130 			.decrypt = aead_decrypt,
3131 			.ivsize = CTR_RFC3686_IV_SIZE,
3132 			.maxauthsize = SHA256_DIGEST_SIZE,
3133 		},
3134 		.caam = {
3135 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3136 					   OP_ALG_AAI_CTR_MOD128,
3137 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3138 					   OP_ALG_AAI_HMAC_PRECOMP,
3139 			.rfc3686 = true,
3140 		},
3141 	},
3142 	{
3143 		.aead = {
3144 			.base = {
3145 				.cra_name = "seqiv(authenc(hmac(sha256),"
3146 					    "rfc3686(ctr(aes))))",
3147 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3148 						   "rfc3686-ctr-aes-caam",
3149 				.cra_blocksize = 1,
3150 			},
3151 			.setkey = aead_setkey,
3152 			.setauthsize = aead_setauthsize,
3153 			.encrypt = aead_encrypt,
3154 			.decrypt = aead_decrypt,
3155 			.ivsize = CTR_RFC3686_IV_SIZE,
3156 			.maxauthsize = SHA256_DIGEST_SIZE,
3157 		},
3158 		.caam = {
3159 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3160 					   OP_ALG_AAI_CTR_MOD128,
3161 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3162 					   OP_ALG_AAI_HMAC_PRECOMP,
3163 			.rfc3686 = true,
3164 			.geniv = true,
3165 		},
3166 	},
3167 	{
3168 		.aead = {
3169 			.base = {
3170 				.cra_name = "authenc(hmac(sha384),"
3171 					    "rfc3686(ctr(aes)))",
3172 				.cra_driver_name = "authenc-hmac-sha384-"
3173 						   "rfc3686-ctr-aes-caam",
3174 				.cra_blocksize = 1,
3175 			},
3176 			.setkey = aead_setkey,
3177 			.setauthsize = aead_setauthsize,
3178 			.encrypt = aead_encrypt,
3179 			.decrypt = aead_decrypt,
3180 			.ivsize = CTR_RFC3686_IV_SIZE,
3181 			.maxauthsize = SHA384_DIGEST_SIZE,
3182 		},
3183 		.caam = {
3184 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3185 					   OP_ALG_AAI_CTR_MOD128,
3186 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3187 					   OP_ALG_AAI_HMAC_PRECOMP,
3188 			.rfc3686 = true,
3189 		},
3190 	},
3191 	{
3192 		.aead = {
3193 			.base = {
3194 				.cra_name = "seqiv(authenc(hmac(sha384),"
3195 					    "rfc3686(ctr(aes))))",
3196 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3197 						   "rfc3686-ctr-aes-caam",
3198 				.cra_blocksize = 1,
3199 			},
3200 			.setkey = aead_setkey,
3201 			.setauthsize = aead_setauthsize,
3202 			.encrypt = aead_encrypt,
3203 			.decrypt = aead_decrypt,
3204 			.ivsize = CTR_RFC3686_IV_SIZE,
3205 			.maxauthsize = SHA384_DIGEST_SIZE,
3206 		},
3207 		.caam = {
3208 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3209 					   OP_ALG_AAI_CTR_MOD128,
3210 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3211 					   OP_ALG_AAI_HMAC_PRECOMP,
3212 			.rfc3686 = true,
3213 			.geniv = true,
3214 		},
3215 	},
3216 	{
3217 		.aead = {
3218 			.base = {
3219 				.cra_name = "authenc(hmac(sha512),"
3220 					    "rfc3686(ctr(aes)))",
3221 				.cra_driver_name = "authenc-hmac-sha512-"
3222 						   "rfc3686-ctr-aes-caam",
3223 				.cra_blocksize = 1,
3224 			},
3225 			.setkey = aead_setkey,
3226 			.setauthsize = aead_setauthsize,
3227 			.encrypt = aead_encrypt,
3228 			.decrypt = aead_decrypt,
3229 			.ivsize = CTR_RFC3686_IV_SIZE,
3230 			.maxauthsize = SHA512_DIGEST_SIZE,
3231 		},
3232 		.caam = {
3233 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3234 					   OP_ALG_AAI_CTR_MOD128,
3235 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3236 					   OP_ALG_AAI_HMAC_PRECOMP,
3237 			.rfc3686 = true,
3238 		},
3239 	},
3240 	{
3241 		.aead = {
3242 			.base = {
3243 				.cra_name = "seqiv(authenc(hmac(sha512),"
3244 					    "rfc3686(ctr(aes))))",
3245 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3246 						   "rfc3686-ctr-aes-caam",
3247 				.cra_blocksize = 1,
3248 			},
3249 			.setkey = aead_setkey,
3250 			.setauthsize = aead_setauthsize,
3251 			.encrypt = aead_encrypt,
3252 			.decrypt = aead_decrypt,
3253 			.ivsize = CTR_RFC3686_IV_SIZE,
3254 			.maxauthsize = SHA512_DIGEST_SIZE,
3255 		},
3256 		.caam = {
3257 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3258 					   OP_ALG_AAI_CTR_MOD128,
3259 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3260 					   OP_ALG_AAI_HMAC_PRECOMP,
3261 			.rfc3686 = true,
3262 			.geniv = true,
3263 		},
3264 	},
3265 	{
3266 		.aead = {
3267 			.base = {
3268 				.cra_name = "rfc7539(chacha20,poly1305)",
3269 				.cra_driver_name = "rfc7539-chacha20-poly1305-"
3270 						   "caam",
3271 				.cra_blocksize = 1,
3272 			},
3273 			.setkey = chachapoly_setkey,
3274 			.setauthsize = chachapoly_setauthsize,
3275 			.encrypt = chachapoly_encrypt,
3276 			.decrypt = chachapoly_decrypt,
3277 			.ivsize = CHACHAPOLY_IV_SIZE,
3278 			.maxauthsize = POLY1305_DIGEST_SIZE,
3279 		},
3280 		.caam = {
3281 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3282 					   OP_ALG_AAI_AEAD,
3283 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3284 					   OP_ALG_AAI_AEAD,
3285 			.nodkp = true,
3286 		},
3287 	},
3288 	{
3289 		.aead = {
3290 			.base = {
3291 				.cra_name = "rfc7539esp(chacha20,poly1305)",
3292 				.cra_driver_name = "rfc7539esp-chacha20-"
3293 						   "poly1305-caam",
3294 				.cra_blocksize = 1,
3295 			},
3296 			.setkey = chachapoly_setkey,
3297 			.setauthsize = chachapoly_setauthsize,
3298 			.encrypt = chachapoly_encrypt,
3299 			.decrypt = chachapoly_decrypt,
3300 			.ivsize = 8,
3301 			.maxauthsize = POLY1305_DIGEST_SIZE,
3302 		},
3303 		.caam = {
3304 			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3305 					   OP_ALG_AAI_AEAD,
3306 			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3307 					   OP_ALG_AAI_AEAD,
3308 			.nodkp = true,
3309 		},
3310 	},
3311 };
3312 
3313 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3314 			    bool uses_dkp)
3315 {
3316 	dma_addr_t dma_addr;
3317 	struct caam_drv_private *priv;
3318 	const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3319 						   sh_desc_enc);
3320 
3321 	ctx->jrdev = caam_jr_alloc();
3322 	if (IS_ERR(ctx->jrdev)) {
3323 		pr_err("Job Ring Device allocation for transform failed\n");
3324 		return PTR_ERR(ctx->jrdev);
3325 	}
3326 
3327 	priv = dev_get_drvdata(ctx->jrdev->parent);
3328 	if (priv->era >= 6 && uses_dkp)
3329 		ctx->dir = DMA_BIDIRECTIONAL;
3330 	else
3331 		ctx->dir = DMA_TO_DEVICE;
3332 
3333 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3334 					offsetof(struct caam_ctx,
3335 						 sh_desc_enc_dma) -
3336 					sh_desc_enc_offset,
3337 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3338 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3339 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3340 		caam_jr_free(ctx->jrdev);
3341 		return -ENOMEM;
3342 	}
3343 
3344 	ctx->sh_desc_enc_dma = dma_addr;
3345 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3346 						   sh_desc_dec) -
3347 					sh_desc_enc_offset;
3348 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3349 					sh_desc_enc_offset;
3350 
3351 	/* copy descriptor header template value */
3352 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3353 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3354 
3355 	return 0;
3356 }
3357 
3358 static int caam_cra_init(struct crypto_skcipher *tfm)
3359 {
3360 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3361 	struct caam_skcipher_alg *caam_alg =
3362 		container_of(alg, typeof(*caam_alg), skcipher);
3363 	struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3364 
3365 	crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3366 
3367 	ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3368 
3369 	return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3370 				false);
3371 }
3372 
3373 static int caam_aead_init(struct crypto_aead *tfm)
3374 {
3375 	struct aead_alg *alg = crypto_aead_alg(tfm);
3376 	struct caam_aead_alg *caam_alg =
3377 		 container_of(alg, struct caam_aead_alg, aead);
3378 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3379 
3380 	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3381 
3382 	ctx->enginectx.op.do_one_request = aead_do_one_req;
3383 
3384 	return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3385 }
3386 
3387 static void caam_exit_common(struct caam_ctx *ctx)
3388 {
3389 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3390 			       offsetof(struct caam_ctx, sh_desc_enc_dma) -
3391 			       offsetof(struct caam_ctx, sh_desc_enc),
3392 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3393 	caam_jr_free(ctx->jrdev);
3394 }
3395 
3396 static void caam_cra_exit(struct crypto_skcipher *tfm)
3397 {
3398 	caam_exit_common(crypto_skcipher_ctx(tfm));
3399 }
3400 
3401 static void caam_aead_exit(struct crypto_aead *tfm)
3402 {
3403 	caam_exit_common(crypto_aead_ctx(tfm));
3404 }
3405 
3406 void caam_algapi_exit(void)
3407 {
3408 	int i;
3409 
3410 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3411 		struct caam_aead_alg *t_alg = driver_aeads + i;
3412 
3413 		if (t_alg->registered)
3414 			crypto_unregister_aead(&t_alg->aead);
3415 	}
3416 
3417 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3418 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3419 
3420 		if (t_alg->registered)
3421 			crypto_unregister_skcipher(&t_alg->skcipher);
3422 	}
3423 }
3424 
3425 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3426 {
3427 	struct skcipher_alg *alg = &t_alg->skcipher;
3428 
3429 	alg->base.cra_module = THIS_MODULE;
3430 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3431 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3432 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3433 
3434 	alg->init = caam_cra_init;
3435 	alg->exit = caam_cra_exit;
3436 }
3437 
3438 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3439 {
3440 	struct aead_alg *alg = &t_alg->aead;
3441 
3442 	alg->base.cra_module = THIS_MODULE;
3443 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3444 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3445 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3446 
3447 	alg->init = caam_aead_init;
3448 	alg->exit = caam_aead_exit;
3449 }
3450 
3451 int caam_algapi_init(struct device *ctrldev)
3452 {
3453 	struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3454 	int i = 0, err = 0;
3455 	u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3456 	u32 arc4_inst;
3457 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3458 	bool registered = false, gcm_support;
3459 
3460 	/*
3461 	 * Register crypto algorithms the device supports.
3462 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3463 	 */
3464 	if (priv->era < 10) {
3465 		u32 cha_vid, cha_inst, aes_rn;
3466 
3467 		cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3468 		aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3469 		md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3470 
3471 		cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3472 		des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3473 			   CHA_ID_LS_DES_SHIFT;
3474 		aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3475 		md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3476 		arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >>
3477 			    CHA_ID_LS_ARC4_SHIFT;
3478 		ccha_inst = 0;
3479 		ptha_inst = 0;
3480 
3481 		aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3482 			 CHA_ID_LS_AES_MASK;
3483 		gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3484 	} else {
3485 		u32 aesa, mdha;
3486 
3487 		aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3488 		mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3489 
3490 		aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3491 		md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3492 
3493 		des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3494 		aes_inst = aesa & CHA_VER_NUM_MASK;
3495 		md_inst = mdha & CHA_VER_NUM_MASK;
3496 		ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3497 		ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3498 		arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK;
3499 
3500 		gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3501 	}
3502 
3503 	/* If MD is present, limit digest size based on LP256 */
3504 	if (md_inst && md_vid  == CHA_VER_VID_MD_LP256)
3505 		md_limit = SHA256_DIGEST_SIZE;
3506 
3507 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3508 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3509 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3510 
3511 		/* Skip DES algorithms if not supported by device */
3512 		if (!des_inst &&
3513 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3514 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3515 				continue;
3516 
3517 		/* Skip AES algorithms if not supported by device */
3518 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3519 				continue;
3520 
3521 		/* Skip ARC4 algorithms if not supported by device */
3522 		if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4)
3523 			continue;
3524 
3525 		/*
3526 		 * Check support for AES modes not available
3527 		 * on LP devices.
3528 		 */
3529 		if (aes_vid == CHA_VER_VID_AES_LP &&
3530 		    (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3531 		    OP_ALG_AAI_XTS)
3532 			continue;
3533 
3534 		caam_skcipher_alg_init(t_alg);
3535 
3536 		err = crypto_register_skcipher(&t_alg->skcipher);
3537 		if (err) {
3538 			pr_warn("%s alg registration failed\n",
3539 				t_alg->skcipher.base.cra_driver_name);
3540 			continue;
3541 		}
3542 
3543 		t_alg->registered = true;
3544 		registered = true;
3545 	}
3546 
3547 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3548 		struct caam_aead_alg *t_alg = driver_aeads + i;
3549 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3550 				 OP_ALG_ALGSEL_MASK;
3551 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3552 				 OP_ALG_ALGSEL_MASK;
3553 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3554 
3555 		/* Skip DES algorithms if not supported by device */
3556 		if (!des_inst &&
3557 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3558 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3559 				continue;
3560 
3561 		/* Skip AES algorithms if not supported by device */
3562 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3563 				continue;
3564 
3565 		/* Skip CHACHA20 algorithms if not supported by device */
3566 		if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3567 			continue;
3568 
3569 		/* Skip POLY1305 algorithms if not supported by device */
3570 		if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3571 			continue;
3572 
3573 		/* Skip GCM algorithms if not supported by device */
3574 		if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3575 		    alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3576 			continue;
3577 
3578 		/*
3579 		 * Skip algorithms requiring message digests
3580 		 * if MD or MD size is not supported by device.
3581 		 */
3582 		if (is_mdha(c2_alg_sel) &&
3583 		    (!md_inst || t_alg->aead.maxauthsize > md_limit))
3584 			continue;
3585 
3586 		caam_aead_alg_init(t_alg);
3587 
3588 		err = crypto_register_aead(&t_alg->aead);
3589 		if (err) {
3590 			pr_warn("%s alg registration failed\n",
3591 				t_alg->aead.base.cra_driver_name);
3592 			continue;
3593 		}
3594 
3595 		t_alg->registered = true;
3596 		registered = true;
3597 	}
3598 
3599 	if (registered)
3600 		pr_info("caam algorithms registered in /proc/crypto\n");
3601 
3602 	return err;
3603 }
3604