xref: /openbmc/linux/drivers/crypto/caam/caamalg.c (revision 4f6cce39)
1 /*
2  * caam - Freescale FSL CAAM support for crypto API
3  *
4  * Copyright 2008-2011 Freescale Semiconductor, Inc.
5  * Copyright 2016 NXP
6  *
7  * Based on talitos crypto API driver.
8  *
9  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10  *
11  * ---------------                     ---------------
12  * | JobDesc #1  |-------------------->|  ShareDesc  |
13  * | *(packet 1) |                     |   (PDB)     |
14  * ---------------      |------------->|  (hashKey)  |
15  *       .              |              | (cipherKey) |
16  *       .              |    |-------->| (operation) |
17  * ---------------      |    |         ---------------
18  * | JobDesc #2  |------|    |
19  * | *(packet 2) |           |
20  * ---------------           |
21  *       .                   |
22  *       .                   |
23  * ---------------           |
24  * | JobDesc #3  |------------
25  * | *(packet 3) |
26  * ---------------
27  *
28  * The SharedDesc never changes for a connection unless rekeyed, but
29  * each packet will likely be in a different place. So all we need
30  * to know to process the packet is where the input is, where the
31  * output goes, and what context we want to process with. Context is
32  * in the SharedDesc, packet references in the JobDesc.
33  *
34  * So, a job desc looks like:
35  *
36  * ---------------------
37  * | Header            |
38  * | ShareDesc Pointer |
39  * | SEQ_OUT_PTR       |
40  * | (output buffer)   |
41  * | (output length)   |
42  * | SEQ_IN_PTR        |
43  * | (input buffer)    |
44  * | (input length)    |
45  * ---------------------
46  */
47 
48 #include "compat.h"
49 
50 #include "regs.h"
51 #include "intern.h"
52 #include "desc_constr.h"
53 #include "jr.h"
54 #include "error.h"
55 #include "sg_sw_sec4.h"
56 #include "key_gen.h"
57 #include "caamalg_desc.h"
58 
59 /*
60  * crypto alg
61  */
62 #define CAAM_CRA_PRIORITY		3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
65 					 CTR_RFC3686_NONCE_SIZE + \
66 					 SHA512_DIGEST_SIZE * 2)
67 
68 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
70 					 CAAM_CMD_SZ * 4)
71 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
72 					 CAAM_CMD_SZ * 5)
73 
74 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
76 
77 #ifdef DEBUG
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
80 #else
81 #define debug(format, arg...)
82 #endif
83 
84 #ifdef DEBUG
85 #include <linux/highmem.h>
86 
87 static void dbg_dump_sg(const char *level, const char *prefix_str,
88 			int prefix_type, int rowsize, int groupsize,
89 			struct scatterlist *sg, size_t tlen, bool ascii)
90 {
91 	struct scatterlist *it;
92 	void *it_page;
93 	size_t len;
94 	void *buf;
95 
96 	for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
97 		/*
98 		 * make sure the scatterlist's page
99 		 * has a valid virtual memory mapping
100 		 */
101 		it_page = kmap_atomic(sg_page(it));
102 		if (unlikely(!it_page)) {
103 			printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
104 			return;
105 		}
106 
107 		buf = it_page + it->offset;
108 		len = min_t(size_t, tlen, it->length);
109 		print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 			       groupsize, buf, len, ascii);
111 		tlen -= len;
112 
113 		kunmap_atomic(it_page);
114 	}
115 }
116 #endif
117 
118 static struct list_head alg_list;
119 
120 struct caam_alg_entry {
121 	int class1_alg_type;
122 	int class2_alg_type;
123 	bool rfc3686;
124 	bool geniv;
125 };
126 
127 struct caam_aead_alg {
128 	struct aead_alg aead;
129 	struct caam_alg_entry caam;
130 	bool registered;
131 };
132 
133 /*
134  * per-session context
135  */
136 struct caam_ctx {
137 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
138 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
139 	u32 sh_desc_givenc[DESC_MAX_USED_LEN];
140 	u8 key[CAAM_MAX_KEY_SIZE];
141 	dma_addr_t sh_desc_enc_dma;
142 	dma_addr_t sh_desc_dec_dma;
143 	dma_addr_t sh_desc_givenc_dma;
144 	dma_addr_t key_dma;
145 	struct device *jrdev;
146 	struct alginfo adata;
147 	struct alginfo cdata;
148 	unsigned int authsize;
149 };
150 
151 static int aead_null_set_sh_desc(struct crypto_aead *aead)
152 {
153 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 	struct device *jrdev = ctx->jrdev;
155 	u32 *desc;
156 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 			ctx->adata.keylen_pad;
158 
159 	/*
160 	 * Job Descriptor and Shared Descriptors
161 	 * must all fit into the 64-word Descriptor h/w Buffer
162 	 */
163 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
164 		ctx->adata.key_inline = true;
165 		ctx->adata.key_virt = ctx->key;
166 	} else {
167 		ctx->adata.key_inline = false;
168 		ctx->adata.key_dma = ctx->key_dma;
169 	}
170 
171 	/* aead_encrypt shared descriptor */
172 	desc = ctx->sh_desc_enc;
173 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
174 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
175 				   desc_bytes(desc), DMA_TO_DEVICE);
176 
177 	/*
178 	 * Job Descriptor and Shared Descriptors
179 	 * must all fit into the 64-word Descriptor h/w Buffer
180 	 */
181 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
182 		ctx->adata.key_inline = true;
183 		ctx->adata.key_virt = ctx->key;
184 	} else {
185 		ctx->adata.key_inline = false;
186 		ctx->adata.key_dma = ctx->key_dma;
187 	}
188 
189 	/* aead_decrypt shared descriptor */
190 	desc = ctx->sh_desc_dec;
191 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
192 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
193 				   desc_bytes(desc), DMA_TO_DEVICE);
194 
195 	return 0;
196 }
197 
198 static int aead_set_sh_desc(struct crypto_aead *aead)
199 {
200 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
201 						 struct caam_aead_alg, aead);
202 	unsigned int ivsize = crypto_aead_ivsize(aead);
203 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
204 	struct device *jrdev = ctx->jrdev;
205 	u32 ctx1_iv_off = 0;
206 	u32 *desc, *nonce = NULL;
207 	u32 inl_mask;
208 	unsigned int data_len[2];
209 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
210 			       OP_ALG_AAI_CTR_MOD128);
211 	const bool is_rfc3686 = alg->caam.rfc3686;
212 
213 	if (!ctx->authsize)
214 		return 0;
215 
216 	/* NULL encryption / decryption */
217 	if (!ctx->cdata.keylen)
218 		return aead_null_set_sh_desc(aead);
219 
220 	/*
221 	 * AES-CTR needs to load IV in CONTEXT1 reg
222 	 * at an offset of 128bits (16bytes)
223 	 * CONTEXT1[255:128] = IV
224 	 */
225 	if (ctr_mode)
226 		ctx1_iv_off = 16;
227 
228 	/*
229 	 * RFC3686 specific:
230 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
231 	 */
232 	if (is_rfc3686) {
233 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
234 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
235 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
236 	}
237 
238 	data_len[0] = ctx->adata.keylen_pad;
239 	data_len[1] = ctx->cdata.keylen;
240 
241 	if (alg->caam.geniv)
242 		goto skip_enc;
243 
244 	/*
245 	 * Job Descriptor and Shared Descriptors
246 	 * must all fit into the 64-word Descriptor h/w Buffer
247 	 */
248 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
249 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
250 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
251 			      ARRAY_SIZE(data_len)) < 0)
252 		return -EINVAL;
253 
254 	if (inl_mask & 1)
255 		ctx->adata.key_virt = ctx->key;
256 	else
257 		ctx->adata.key_dma = ctx->key_dma;
258 
259 	if (inl_mask & 2)
260 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
261 	else
262 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
263 
264 	ctx->adata.key_inline = !!(inl_mask & 1);
265 	ctx->cdata.key_inline = !!(inl_mask & 2);
266 
267 	/* aead_encrypt shared descriptor */
268 	desc = ctx->sh_desc_enc;
269 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ctx->authsize,
270 			       is_rfc3686, nonce, ctx1_iv_off);
271 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
272 				   desc_bytes(desc), DMA_TO_DEVICE);
273 
274 skip_enc:
275 	/*
276 	 * Job Descriptor and Shared Descriptors
277 	 * must all fit into the 64-word Descriptor h/w Buffer
278 	 */
279 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
280 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
281 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
282 			      ARRAY_SIZE(data_len)) < 0)
283 		return -EINVAL;
284 
285 	if (inl_mask & 1)
286 		ctx->adata.key_virt = ctx->key;
287 	else
288 		ctx->adata.key_dma = ctx->key_dma;
289 
290 	if (inl_mask & 2)
291 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
292 	else
293 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
294 
295 	ctx->adata.key_inline = !!(inl_mask & 1);
296 	ctx->cdata.key_inline = !!(inl_mask & 2);
297 
298 	/* aead_decrypt shared descriptor */
299 	desc = ctx->sh_desc_dec;
300 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
301 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
302 			       nonce, ctx1_iv_off);
303 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
304 				   desc_bytes(desc), DMA_TO_DEVICE);
305 
306 	if (!alg->caam.geniv)
307 		goto skip_givenc;
308 
309 	/*
310 	 * Job Descriptor and Shared Descriptors
311 	 * must all fit into the 64-word Descriptor h/w Buffer
312 	 */
313 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
314 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
315 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
316 			      ARRAY_SIZE(data_len)) < 0)
317 		return -EINVAL;
318 
319 	if (inl_mask & 1)
320 		ctx->adata.key_virt = ctx->key;
321 	else
322 		ctx->adata.key_dma = ctx->key_dma;
323 
324 	if (inl_mask & 2)
325 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
326 	else
327 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
328 
329 	ctx->adata.key_inline = !!(inl_mask & 1);
330 	ctx->cdata.key_inline = !!(inl_mask & 2);
331 
332 	/* aead_givencrypt shared descriptor */
333 	desc = ctx->sh_desc_enc;
334 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
335 				  ctx->authsize, is_rfc3686, nonce,
336 				  ctx1_iv_off);
337 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
338 				   desc_bytes(desc), DMA_TO_DEVICE);
339 
340 skip_givenc:
341 	return 0;
342 }
343 
344 static int aead_setauthsize(struct crypto_aead *authenc,
345 				    unsigned int authsize)
346 {
347 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
348 
349 	ctx->authsize = authsize;
350 	aead_set_sh_desc(authenc);
351 
352 	return 0;
353 }
354 
355 static int gcm_set_sh_desc(struct crypto_aead *aead)
356 {
357 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
358 	struct device *jrdev = ctx->jrdev;
359 	u32 *desc;
360 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
361 			ctx->cdata.keylen;
362 
363 	if (!ctx->cdata.keylen || !ctx->authsize)
364 		return 0;
365 
366 	/*
367 	 * AES GCM encrypt shared descriptor
368 	 * Job Descriptor and Shared Descriptor
369 	 * must fit into the 64-word Descriptor h/w Buffer
370 	 */
371 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
372 		ctx->cdata.key_inline = true;
373 		ctx->cdata.key_virt = ctx->key;
374 	} else {
375 		ctx->cdata.key_inline = false;
376 		ctx->cdata.key_dma = ctx->key_dma;
377 	}
378 
379 	desc = ctx->sh_desc_enc;
380 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
381 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
382 				   desc_bytes(desc), DMA_TO_DEVICE);
383 
384 	/*
385 	 * Job Descriptor and Shared Descriptors
386 	 * must all fit into the 64-word Descriptor h/w Buffer
387 	 */
388 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
389 		ctx->cdata.key_inline = true;
390 		ctx->cdata.key_virt = ctx->key;
391 	} else {
392 		ctx->cdata.key_inline = false;
393 		ctx->cdata.key_dma = ctx->key_dma;
394 	}
395 
396 	desc = ctx->sh_desc_dec;
397 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
398 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
399 				   desc_bytes(desc), DMA_TO_DEVICE);
400 
401 	return 0;
402 }
403 
404 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
405 {
406 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
407 
408 	ctx->authsize = authsize;
409 	gcm_set_sh_desc(authenc);
410 
411 	return 0;
412 }
413 
414 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
415 {
416 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
417 	struct device *jrdev = ctx->jrdev;
418 	u32 *desc;
419 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
420 			ctx->cdata.keylen;
421 
422 	if (!ctx->cdata.keylen || !ctx->authsize)
423 		return 0;
424 
425 	/*
426 	 * RFC4106 encrypt shared descriptor
427 	 * Job Descriptor and Shared Descriptor
428 	 * must fit into the 64-word Descriptor h/w Buffer
429 	 */
430 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
431 		ctx->cdata.key_inline = true;
432 		ctx->cdata.key_virt = ctx->key;
433 	} else {
434 		ctx->cdata.key_inline = false;
435 		ctx->cdata.key_dma = ctx->key_dma;
436 	}
437 
438 	desc = ctx->sh_desc_enc;
439 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
440 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
441 				   desc_bytes(desc), DMA_TO_DEVICE);
442 
443 	/*
444 	 * Job Descriptor and Shared Descriptors
445 	 * must all fit into the 64-word Descriptor h/w Buffer
446 	 */
447 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
448 		ctx->cdata.key_inline = true;
449 		ctx->cdata.key_virt = ctx->key;
450 	} else {
451 		ctx->cdata.key_inline = false;
452 		ctx->cdata.key_dma = ctx->key_dma;
453 	}
454 
455 	desc = ctx->sh_desc_dec;
456 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
457 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
458 				   desc_bytes(desc), DMA_TO_DEVICE);
459 
460 	return 0;
461 }
462 
463 static int rfc4106_setauthsize(struct crypto_aead *authenc,
464 			       unsigned int authsize)
465 {
466 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
467 
468 	ctx->authsize = authsize;
469 	rfc4106_set_sh_desc(authenc);
470 
471 	return 0;
472 }
473 
474 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
475 {
476 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
477 	struct device *jrdev = ctx->jrdev;
478 	u32 *desc;
479 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
480 			ctx->cdata.keylen;
481 
482 	if (!ctx->cdata.keylen || !ctx->authsize)
483 		return 0;
484 
485 	/*
486 	 * RFC4543 encrypt shared descriptor
487 	 * Job Descriptor and Shared Descriptor
488 	 * must fit into the 64-word Descriptor h/w Buffer
489 	 */
490 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
491 		ctx->cdata.key_inline = true;
492 		ctx->cdata.key_virt = ctx->key;
493 	} else {
494 		ctx->cdata.key_inline = false;
495 		ctx->cdata.key_dma = ctx->key_dma;
496 	}
497 
498 	desc = ctx->sh_desc_enc;
499 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
500 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
501 				   desc_bytes(desc), DMA_TO_DEVICE);
502 
503 	/*
504 	 * Job Descriptor and Shared Descriptors
505 	 * must all fit into the 64-word Descriptor h/w Buffer
506 	 */
507 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
508 		ctx->cdata.key_inline = true;
509 		ctx->cdata.key_virt = ctx->key;
510 	} else {
511 		ctx->cdata.key_inline = false;
512 		ctx->cdata.key_dma = ctx->key_dma;
513 	}
514 
515 	desc = ctx->sh_desc_dec;
516 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
517 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
518 				   desc_bytes(desc), DMA_TO_DEVICE);
519 
520 	return 0;
521 }
522 
523 static int rfc4543_setauthsize(struct crypto_aead *authenc,
524 			       unsigned int authsize)
525 {
526 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
527 
528 	ctx->authsize = authsize;
529 	rfc4543_set_sh_desc(authenc);
530 
531 	return 0;
532 }
533 
534 static int aead_setkey(struct crypto_aead *aead,
535 			       const u8 *key, unsigned int keylen)
536 {
537 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
538 	struct device *jrdev = ctx->jrdev;
539 	struct crypto_authenc_keys keys;
540 	int ret = 0;
541 
542 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
543 		goto badkey;
544 
545 #ifdef DEBUG
546 	printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
547 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
548 	       keys.authkeylen);
549 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
550 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
551 #endif
552 
553 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
554 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
555 			    keys.enckeylen);
556 	if (ret) {
557 		goto badkey;
558 	}
559 
560 	/* postpend encryption key to auth split key */
561 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
562 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
563 				   keys.enckeylen, DMA_TO_DEVICE);
564 #ifdef DEBUG
565 	print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
566 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
567 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
568 #endif
569 	ctx->cdata.keylen = keys.enckeylen;
570 	return aead_set_sh_desc(aead);
571 badkey:
572 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
573 	return -EINVAL;
574 }
575 
576 static int gcm_setkey(struct crypto_aead *aead,
577 		      const u8 *key, unsigned int keylen)
578 {
579 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
580 	struct device *jrdev = ctx->jrdev;
581 
582 #ifdef DEBUG
583 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
584 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
585 #endif
586 
587 	memcpy(ctx->key, key, keylen);
588 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
589 	ctx->cdata.keylen = keylen;
590 
591 	return gcm_set_sh_desc(aead);
592 }
593 
594 static int rfc4106_setkey(struct crypto_aead *aead,
595 			  const u8 *key, unsigned int keylen)
596 {
597 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
598 	struct device *jrdev = ctx->jrdev;
599 
600 	if (keylen < 4)
601 		return -EINVAL;
602 
603 #ifdef DEBUG
604 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
605 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
606 #endif
607 
608 	memcpy(ctx->key, key, keylen);
609 
610 	/*
611 	 * The last four bytes of the key material are used as the salt value
612 	 * in the nonce. Update the AES key length.
613 	 */
614 	ctx->cdata.keylen = keylen - 4;
615 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
616 				   DMA_TO_DEVICE);
617 	return rfc4106_set_sh_desc(aead);
618 }
619 
620 static int rfc4543_setkey(struct crypto_aead *aead,
621 			  const u8 *key, unsigned int keylen)
622 {
623 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
624 	struct device *jrdev = ctx->jrdev;
625 
626 	if (keylen < 4)
627 		return -EINVAL;
628 
629 #ifdef DEBUG
630 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
631 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
632 #endif
633 
634 	memcpy(ctx->key, key, keylen);
635 
636 	/*
637 	 * The last four bytes of the key material are used as the salt value
638 	 * in the nonce. Update the AES key length.
639 	 */
640 	ctx->cdata.keylen = keylen - 4;
641 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
642 				   DMA_TO_DEVICE);
643 	return rfc4543_set_sh_desc(aead);
644 }
645 
646 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
647 			     const u8 *key, unsigned int keylen)
648 {
649 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
650 	struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
651 	const char *alg_name = crypto_tfm_alg_name(tfm);
652 	struct device *jrdev = ctx->jrdev;
653 	unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
654 	u32 *desc;
655 	u32 ctx1_iv_off = 0;
656 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
657 			       OP_ALG_AAI_CTR_MOD128);
658 	const bool is_rfc3686 = (ctr_mode &&
659 				 (strstr(alg_name, "rfc3686") != NULL));
660 
661 	memcpy(ctx->key, key, keylen);
662 #ifdef DEBUG
663 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
664 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
665 #endif
666 	/*
667 	 * AES-CTR needs to load IV in CONTEXT1 reg
668 	 * at an offset of 128bits (16bytes)
669 	 * CONTEXT1[255:128] = IV
670 	 */
671 	if (ctr_mode)
672 		ctx1_iv_off = 16;
673 
674 	/*
675 	 * RFC3686 specific:
676 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
677 	 *	| *key = {KEY, NONCE}
678 	 */
679 	if (is_rfc3686) {
680 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
681 		keylen -= CTR_RFC3686_NONCE_SIZE;
682 	}
683 
684 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
685 	ctx->cdata.keylen = keylen;
686 	ctx->cdata.key_virt = ctx->key;
687 	ctx->cdata.key_inline = true;
688 
689 	/* ablkcipher_encrypt shared descriptor */
690 	desc = ctx->sh_desc_enc;
691 	cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
692 				     ctx1_iv_off);
693 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
694 				   desc_bytes(desc), DMA_TO_DEVICE);
695 
696 	/* ablkcipher_decrypt shared descriptor */
697 	desc = ctx->sh_desc_dec;
698 	cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
699 				     ctx1_iv_off);
700 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
701 				   desc_bytes(desc), DMA_TO_DEVICE);
702 
703 	/* ablkcipher_givencrypt shared descriptor */
704 	desc = ctx->sh_desc_givenc;
705 	cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
706 					ctx1_iv_off);
707 	dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
708 				   desc_bytes(desc), DMA_TO_DEVICE);
709 
710 	return 0;
711 }
712 
713 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
714 				 const u8 *key, unsigned int keylen)
715 {
716 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
717 	struct device *jrdev = ctx->jrdev;
718 	u32 *desc;
719 
720 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
721 		crypto_ablkcipher_set_flags(ablkcipher,
722 					    CRYPTO_TFM_RES_BAD_KEY_LEN);
723 		dev_err(jrdev, "key size mismatch\n");
724 		return -EINVAL;
725 	}
726 
727 	memcpy(ctx->key, key, keylen);
728 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
729 	ctx->cdata.keylen = keylen;
730 	ctx->cdata.key_virt = ctx->key;
731 	ctx->cdata.key_inline = true;
732 
733 	/* xts_ablkcipher_encrypt shared descriptor */
734 	desc = ctx->sh_desc_enc;
735 	cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
736 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
737 				   desc_bytes(desc), DMA_TO_DEVICE);
738 
739 	/* xts_ablkcipher_decrypt shared descriptor */
740 	desc = ctx->sh_desc_dec;
741 	cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
742 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
743 				   desc_bytes(desc), DMA_TO_DEVICE);
744 
745 	return 0;
746 }
747 
748 /*
749  * aead_edesc - s/w-extended aead descriptor
750  * @src_nents: number of segments in input s/w scatterlist
751  * @dst_nents: number of segments in output s/w scatterlist
752  * @sec4_sg_bytes: length of dma mapped sec4_sg space
753  * @sec4_sg_dma: bus physical mapped address of h/w link table
754  * @sec4_sg: pointer to h/w link table
755  * @hw_desc: the h/w job descriptor followed by any referenced link tables
756  */
757 struct aead_edesc {
758 	int src_nents;
759 	int dst_nents;
760 	int sec4_sg_bytes;
761 	dma_addr_t sec4_sg_dma;
762 	struct sec4_sg_entry *sec4_sg;
763 	u32 hw_desc[];
764 };
765 
766 /*
767  * ablkcipher_edesc - s/w-extended ablkcipher descriptor
768  * @src_nents: number of segments in input s/w scatterlist
769  * @dst_nents: number of segments in output s/w scatterlist
770  * @iv_dma: dma address of iv for checking continuity and link table
771  * @sec4_sg_bytes: length of dma mapped sec4_sg space
772  * @sec4_sg_dma: bus physical mapped address of h/w link table
773  * @sec4_sg: pointer to h/w link table
774  * @hw_desc: the h/w job descriptor followed by any referenced link tables
775  */
776 struct ablkcipher_edesc {
777 	int src_nents;
778 	int dst_nents;
779 	dma_addr_t iv_dma;
780 	int sec4_sg_bytes;
781 	dma_addr_t sec4_sg_dma;
782 	struct sec4_sg_entry *sec4_sg;
783 	u32 hw_desc[0];
784 };
785 
786 static void caam_unmap(struct device *dev, struct scatterlist *src,
787 		       struct scatterlist *dst, int src_nents,
788 		       int dst_nents,
789 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
790 		       int sec4_sg_bytes)
791 {
792 	if (dst != src) {
793 		if (src_nents)
794 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
795 		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
796 	} else {
797 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
798 	}
799 
800 	if (iv_dma)
801 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
802 	if (sec4_sg_bytes)
803 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
804 				 DMA_TO_DEVICE);
805 }
806 
807 static void aead_unmap(struct device *dev,
808 		       struct aead_edesc *edesc,
809 		       struct aead_request *req)
810 {
811 	caam_unmap(dev, req->src, req->dst,
812 		   edesc->src_nents, edesc->dst_nents, 0, 0,
813 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
814 }
815 
816 static void ablkcipher_unmap(struct device *dev,
817 			     struct ablkcipher_edesc *edesc,
818 			     struct ablkcipher_request *req)
819 {
820 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
821 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
822 
823 	caam_unmap(dev, req->src, req->dst,
824 		   edesc->src_nents, edesc->dst_nents,
825 		   edesc->iv_dma, ivsize,
826 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
827 }
828 
829 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
830 				   void *context)
831 {
832 	struct aead_request *req = context;
833 	struct aead_edesc *edesc;
834 
835 #ifdef DEBUG
836 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
837 #endif
838 
839 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
840 
841 	if (err)
842 		caam_jr_strstatus(jrdev, err);
843 
844 	aead_unmap(jrdev, edesc, req);
845 
846 	kfree(edesc);
847 
848 	aead_request_complete(req, err);
849 }
850 
851 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
852 				   void *context)
853 {
854 	struct aead_request *req = context;
855 	struct aead_edesc *edesc;
856 
857 #ifdef DEBUG
858 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
859 #endif
860 
861 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
862 
863 	if (err)
864 		caam_jr_strstatus(jrdev, err);
865 
866 	aead_unmap(jrdev, edesc, req);
867 
868 	/*
869 	 * verify hw auth check passed else return -EBADMSG
870 	 */
871 	if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
872 		err = -EBADMSG;
873 
874 	kfree(edesc);
875 
876 	aead_request_complete(req, err);
877 }
878 
879 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
880 				   void *context)
881 {
882 	struct ablkcipher_request *req = context;
883 	struct ablkcipher_edesc *edesc;
884 #ifdef DEBUG
885 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
886 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
887 
888 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
889 #endif
890 
891 	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
892 
893 	if (err)
894 		caam_jr_strstatus(jrdev, err);
895 
896 #ifdef DEBUG
897 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
898 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
899 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
900 	dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
901 		    DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
902 		    edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
903 #endif
904 
905 	ablkcipher_unmap(jrdev, edesc, req);
906 	kfree(edesc);
907 
908 	ablkcipher_request_complete(req, err);
909 }
910 
911 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
912 				    void *context)
913 {
914 	struct ablkcipher_request *req = context;
915 	struct ablkcipher_edesc *edesc;
916 #ifdef DEBUG
917 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
918 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
919 
920 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
921 #endif
922 
923 	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
924 	if (err)
925 		caam_jr_strstatus(jrdev, err);
926 
927 #ifdef DEBUG
928 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
929 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
930 		       ivsize, 1);
931 	dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
932 		    DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
933 		    edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
934 #endif
935 
936 	ablkcipher_unmap(jrdev, edesc, req);
937 	kfree(edesc);
938 
939 	ablkcipher_request_complete(req, err);
940 }
941 
942 /*
943  * Fill in aead job descriptor
944  */
945 static void init_aead_job(struct aead_request *req,
946 			  struct aead_edesc *edesc,
947 			  bool all_contig, bool encrypt)
948 {
949 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
950 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
951 	int authsize = ctx->authsize;
952 	u32 *desc = edesc->hw_desc;
953 	u32 out_options, in_options;
954 	dma_addr_t dst_dma, src_dma;
955 	int len, sec4_sg_index = 0;
956 	dma_addr_t ptr;
957 	u32 *sh_desc;
958 
959 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
960 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
961 
962 	len = desc_len(sh_desc);
963 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
964 
965 	if (all_contig) {
966 		src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
967 		in_options = 0;
968 	} else {
969 		src_dma = edesc->sec4_sg_dma;
970 		sec4_sg_index += edesc->src_nents;
971 		in_options = LDST_SGF;
972 	}
973 
974 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
975 			  in_options);
976 
977 	dst_dma = src_dma;
978 	out_options = in_options;
979 
980 	if (unlikely(req->src != req->dst)) {
981 		if (edesc->dst_nents == 1) {
982 			dst_dma = sg_dma_address(req->dst);
983 		} else {
984 			dst_dma = edesc->sec4_sg_dma +
985 				  sec4_sg_index *
986 				  sizeof(struct sec4_sg_entry);
987 			out_options = LDST_SGF;
988 		}
989 	}
990 
991 	if (encrypt)
992 		append_seq_out_ptr(desc, dst_dma,
993 				   req->assoclen + req->cryptlen + authsize,
994 				   out_options);
995 	else
996 		append_seq_out_ptr(desc, dst_dma,
997 				   req->assoclen + req->cryptlen - authsize,
998 				   out_options);
999 
1000 	/* REG3 = assoclen */
1001 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1002 }
1003 
1004 static void init_gcm_job(struct aead_request *req,
1005 			 struct aead_edesc *edesc,
1006 			 bool all_contig, bool encrypt)
1007 {
1008 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1009 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1010 	unsigned int ivsize = crypto_aead_ivsize(aead);
1011 	u32 *desc = edesc->hw_desc;
1012 	bool generic_gcm = (ivsize == 12);
1013 	unsigned int last;
1014 
1015 	init_aead_job(req, edesc, all_contig, encrypt);
1016 
1017 	/* BUG This should not be specific to generic GCM. */
1018 	last = 0;
1019 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1020 		last = FIFOLD_TYPE_LAST1;
1021 
1022 	/* Read GCM IV */
1023 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1024 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1025 	/* Append Salt */
1026 	if (!generic_gcm)
1027 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1028 	/* Append IV */
1029 	append_data(desc, req->iv, ivsize);
1030 	/* End of blank commands */
1031 }
1032 
1033 static void init_authenc_job(struct aead_request *req,
1034 			     struct aead_edesc *edesc,
1035 			     bool all_contig, bool encrypt)
1036 {
1037 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1038 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1039 						 struct caam_aead_alg, aead);
1040 	unsigned int ivsize = crypto_aead_ivsize(aead);
1041 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1042 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1043 			       OP_ALG_AAI_CTR_MOD128);
1044 	const bool is_rfc3686 = alg->caam.rfc3686;
1045 	u32 *desc = edesc->hw_desc;
1046 	u32 ivoffset = 0;
1047 
1048 	/*
1049 	 * AES-CTR needs to load IV in CONTEXT1 reg
1050 	 * at an offset of 128bits (16bytes)
1051 	 * CONTEXT1[255:128] = IV
1052 	 */
1053 	if (ctr_mode)
1054 		ivoffset = 16;
1055 
1056 	/*
1057 	 * RFC3686 specific:
1058 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1059 	 */
1060 	if (is_rfc3686)
1061 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1062 
1063 	init_aead_job(req, edesc, all_contig, encrypt);
1064 
1065 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1066 		append_load_as_imm(desc, req->iv, ivsize,
1067 				   LDST_CLASS_1_CCB |
1068 				   LDST_SRCDST_BYTE_CONTEXT |
1069 				   (ivoffset << LDST_OFFSET_SHIFT));
1070 }
1071 
1072 /*
1073  * Fill in ablkcipher job descriptor
1074  */
1075 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1076 				struct ablkcipher_edesc *edesc,
1077 				struct ablkcipher_request *req,
1078 				bool iv_contig)
1079 {
1080 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1081 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1082 	u32 *desc = edesc->hw_desc;
1083 	u32 out_options = 0, in_options;
1084 	dma_addr_t dst_dma, src_dma;
1085 	int len, sec4_sg_index = 0;
1086 
1087 #ifdef DEBUG
1088 	print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1089 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1090 		       ivsize, 1);
1091 	pr_err("asked=%d, nbytes%d\n",
1092 	       (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1093 	dbg_dump_sg(KERN_ERR, "src    @"__stringify(__LINE__)": ",
1094 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1095 		    edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1096 #endif
1097 
1098 	len = desc_len(sh_desc);
1099 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1100 
1101 	if (iv_contig) {
1102 		src_dma = edesc->iv_dma;
1103 		in_options = 0;
1104 	} else {
1105 		src_dma = edesc->sec4_sg_dma;
1106 		sec4_sg_index += edesc->src_nents + 1;
1107 		in_options = LDST_SGF;
1108 	}
1109 	append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1110 
1111 	if (likely(req->src == req->dst)) {
1112 		if (edesc->src_nents == 1 && iv_contig) {
1113 			dst_dma = sg_dma_address(req->src);
1114 		} else {
1115 			dst_dma = edesc->sec4_sg_dma +
1116 				sizeof(struct sec4_sg_entry);
1117 			out_options = LDST_SGF;
1118 		}
1119 	} else {
1120 		if (edesc->dst_nents == 1) {
1121 			dst_dma = sg_dma_address(req->dst);
1122 		} else {
1123 			dst_dma = edesc->sec4_sg_dma +
1124 				sec4_sg_index * sizeof(struct sec4_sg_entry);
1125 			out_options = LDST_SGF;
1126 		}
1127 	}
1128 	append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1129 }
1130 
1131 /*
1132  * Fill in ablkcipher givencrypt job descriptor
1133  */
1134 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1135 				    struct ablkcipher_edesc *edesc,
1136 				    struct ablkcipher_request *req,
1137 				    bool iv_contig)
1138 {
1139 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1140 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1141 	u32 *desc = edesc->hw_desc;
1142 	u32 out_options, in_options;
1143 	dma_addr_t dst_dma, src_dma;
1144 	int len, sec4_sg_index = 0;
1145 
1146 #ifdef DEBUG
1147 	print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1148 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1149 		       ivsize, 1);
1150 	dbg_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__) ": ",
1151 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1152 		    edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1153 #endif
1154 
1155 	len = desc_len(sh_desc);
1156 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1157 
1158 	if (edesc->src_nents == 1) {
1159 		src_dma = sg_dma_address(req->src);
1160 		in_options = 0;
1161 	} else {
1162 		src_dma = edesc->sec4_sg_dma;
1163 		sec4_sg_index += edesc->src_nents;
1164 		in_options = LDST_SGF;
1165 	}
1166 	append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1167 
1168 	if (iv_contig) {
1169 		dst_dma = edesc->iv_dma;
1170 		out_options = 0;
1171 	} else {
1172 		dst_dma = edesc->sec4_sg_dma +
1173 			  sec4_sg_index * sizeof(struct sec4_sg_entry);
1174 		out_options = LDST_SGF;
1175 	}
1176 	append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1177 }
1178 
1179 /*
1180  * allocate and map the aead extended descriptor
1181  */
1182 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1183 					   int desc_bytes, bool *all_contig_ptr,
1184 					   bool encrypt)
1185 {
1186 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1187 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1188 	struct device *jrdev = ctx->jrdev;
1189 	gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1190 		       CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
1191 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1192 	struct aead_edesc *edesc;
1193 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1194 	unsigned int authsize = ctx->authsize;
1195 
1196 	if (unlikely(req->dst != req->src)) {
1197 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1198 					     req->cryptlen);
1199 		if (unlikely(src_nents < 0)) {
1200 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1201 				req->assoclen + req->cryptlen);
1202 			return ERR_PTR(src_nents);
1203 		}
1204 
1205 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1206 					     req->cryptlen +
1207 						(encrypt ? authsize :
1208 							   (-authsize)));
1209 		if (unlikely(dst_nents < 0)) {
1210 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1211 				req->assoclen + req->cryptlen +
1212 				(encrypt ? authsize : (-authsize)));
1213 			return ERR_PTR(dst_nents);
1214 		}
1215 	} else {
1216 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1217 					     req->cryptlen +
1218 					     (encrypt ? authsize : 0));
1219 		if (unlikely(src_nents < 0)) {
1220 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1221 				req->assoclen + req->cryptlen +
1222 				(encrypt ? authsize : 0));
1223 			return ERR_PTR(src_nents);
1224 		}
1225 	}
1226 
1227 	if (likely(req->src == req->dst)) {
1228 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1229 					      DMA_BIDIRECTIONAL);
1230 		if (unlikely(!mapped_src_nents)) {
1231 			dev_err(jrdev, "unable to map source\n");
1232 			return ERR_PTR(-ENOMEM);
1233 		}
1234 	} else {
1235 		/* Cover also the case of null (zero length) input data */
1236 		if (src_nents) {
1237 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1238 						      src_nents, DMA_TO_DEVICE);
1239 			if (unlikely(!mapped_src_nents)) {
1240 				dev_err(jrdev, "unable to map source\n");
1241 				return ERR_PTR(-ENOMEM);
1242 			}
1243 		} else {
1244 			mapped_src_nents = 0;
1245 		}
1246 
1247 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1248 					      DMA_FROM_DEVICE);
1249 		if (unlikely(!mapped_dst_nents)) {
1250 			dev_err(jrdev, "unable to map destination\n");
1251 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1252 			return ERR_PTR(-ENOMEM);
1253 		}
1254 	}
1255 
1256 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1257 	sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1258 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1259 
1260 	/* allocate space for base edesc and hw desc commands, link tables */
1261 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1262 			GFP_DMA | flags);
1263 	if (!edesc) {
1264 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1265 			   0, 0, 0);
1266 		return ERR_PTR(-ENOMEM);
1267 	}
1268 
1269 	edesc->src_nents = src_nents;
1270 	edesc->dst_nents = dst_nents;
1271 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1272 			 desc_bytes;
1273 	*all_contig_ptr = !(mapped_src_nents > 1);
1274 
1275 	sec4_sg_index = 0;
1276 	if (mapped_src_nents > 1) {
1277 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1278 				   edesc->sec4_sg + sec4_sg_index, 0);
1279 		sec4_sg_index += mapped_src_nents;
1280 	}
1281 	if (mapped_dst_nents > 1) {
1282 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1283 				   edesc->sec4_sg + sec4_sg_index, 0);
1284 	}
1285 
1286 	if (!sec4_sg_bytes)
1287 		return edesc;
1288 
1289 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1290 					    sec4_sg_bytes, DMA_TO_DEVICE);
1291 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1292 		dev_err(jrdev, "unable to map S/G table\n");
1293 		aead_unmap(jrdev, edesc, req);
1294 		kfree(edesc);
1295 		return ERR_PTR(-ENOMEM);
1296 	}
1297 
1298 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1299 
1300 	return edesc;
1301 }
1302 
1303 static int gcm_encrypt(struct aead_request *req)
1304 {
1305 	struct aead_edesc *edesc;
1306 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1307 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1308 	struct device *jrdev = ctx->jrdev;
1309 	bool all_contig;
1310 	u32 *desc;
1311 	int ret = 0;
1312 
1313 	/* allocate extended descriptor */
1314 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1315 	if (IS_ERR(edesc))
1316 		return PTR_ERR(edesc);
1317 
1318 	/* Create and submit job descriptor */
1319 	init_gcm_job(req, edesc, all_contig, true);
1320 #ifdef DEBUG
1321 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1322 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1323 		       desc_bytes(edesc->hw_desc), 1);
1324 #endif
1325 
1326 	desc = edesc->hw_desc;
1327 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1328 	if (!ret) {
1329 		ret = -EINPROGRESS;
1330 	} else {
1331 		aead_unmap(jrdev, edesc, req);
1332 		kfree(edesc);
1333 	}
1334 
1335 	return ret;
1336 }
1337 
1338 static int ipsec_gcm_encrypt(struct aead_request *req)
1339 {
1340 	if (req->assoclen < 8)
1341 		return -EINVAL;
1342 
1343 	return gcm_encrypt(req);
1344 }
1345 
1346 static int aead_encrypt(struct aead_request *req)
1347 {
1348 	struct aead_edesc *edesc;
1349 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1350 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1351 	struct device *jrdev = ctx->jrdev;
1352 	bool all_contig;
1353 	u32 *desc;
1354 	int ret = 0;
1355 
1356 	/* allocate extended descriptor */
1357 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1358 				 &all_contig, true);
1359 	if (IS_ERR(edesc))
1360 		return PTR_ERR(edesc);
1361 
1362 	/* Create and submit job descriptor */
1363 	init_authenc_job(req, edesc, all_contig, true);
1364 #ifdef DEBUG
1365 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1366 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1367 		       desc_bytes(edesc->hw_desc), 1);
1368 #endif
1369 
1370 	desc = edesc->hw_desc;
1371 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1372 	if (!ret) {
1373 		ret = -EINPROGRESS;
1374 	} else {
1375 		aead_unmap(jrdev, edesc, req);
1376 		kfree(edesc);
1377 	}
1378 
1379 	return ret;
1380 }
1381 
1382 static int gcm_decrypt(struct aead_request *req)
1383 {
1384 	struct aead_edesc *edesc;
1385 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1386 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1387 	struct device *jrdev = ctx->jrdev;
1388 	bool all_contig;
1389 	u32 *desc;
1390 	int ret = 0;
1391 
1392 	/* allocate extended descriptor */
1393 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1394 	if (IS_ERR(edesc))
1395 		return PTR_ERR(edesc);
1396 
1397 	/* Create and submit job descriptor*/
1398 	init_gcm_job(req, edesc, all_contig, false);
1399 #ifdef DEBUG
1400 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1401 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1402 		       desc_bytes(edesc->hw_desc), 1);
1403 #endif
1404 
1405 	desc = edesc->hw_desc;
1406 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1407 	if (!ret) {
1408 		ret = -EINPROGRESS;
1409 	} else {
1410 		aead_unmap(jrdev, edesc, req);
1411 		kfree(edesc);
1412 	}
1413 
1414 	return ret;
1415 }
1416 
1417 static int ipsec_gcm_decrypt(struct aead_request *req)
1418 {
1419 	if (req->assoclen < 8)
1420 		return -EINVAL;
1421 
1422 	return gcm_decrypt(req);
1423 }
1424 
1425 static int aead_decrypt(struct aead_request *req)
1426 {
1427 	struct aead_edesc *edesc;
1428 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1429 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1430 	struct device *jrdev = ctx->jrdev;
1431 	bool all_contig;
1432 	u32 *desc;
1433 	int ret = 0;
1434 
1435 #ifdef DEBUG
1436 	dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1437 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1438 		    req->assoclen + req->cryptlen, 1);
1439 #endif
1440 
1441 	/* allocate extended descriptor */
1442 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1443 				 &all_contig, false);
1444 	if (IS_ERR(edesc))
1445 		return PTR_ERR(edesc);
1446 
1447 	/* Create and submit job descriptor*/
1448 	init_authenc_job(req, edesc, all_contig, false);
1449 #ifdef DEBUG
1450 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1451 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1452 		       desc_bytes(edesc->hw_desc), 1);
1453 #endif
1454 
1455 	desc = edesc->hw_desc;
1456 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1457 	if (!ret) {
1458 		ret = -EINPROGRESS;
1459 	} else {
1460 		aead_unmap(jrdev, edesc, req);
1461 		kfree(edesc);
1462 	}
1463 
1464 	return ret;
1465 }
1466 
1467 /*
1468  * allocate and map the ablkcipher extended descriptor for ablkcipher
1469  */
1470 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1471 						       *req, int desc_bytes,
1472 						       bool *iv_contig_out)
1473 {
1474 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1475 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1476 	struct device *jrdev = ctx->jrdev;
1477 	gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1478 					  CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1479 		       GFP_KERNEL : GFP_ATOMIC;
1480 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1481 	struct ablkcipher_edesc *edesc;
1482 	dma_addr_t iv_dma = 0;
1483 	bool in_contig;
1484 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1485 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1486 
1487 	src_nents = sg_nents_for_len(req->src, req->nbytes);
1488 	if (unlikely(src_nents < 0)) {
1489 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1490 			req->nbytes);
1491 		return ERR_PTR(src_nents);
1492 	}
1493 
1494 	if (req->dst != req->src) {
1495 		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1496 		if (unlikely(dst_nents < 0)) {
1497 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1498 				req->nbytes);
1499 			return ERR_PTR(dst_nents);
1500 		}
1501 	}
1502 
1503 	if (likely(req->src == req->dst)) {
1504 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1505 					      DMA_BIDIRECTIONAL);
1506 		if (unlikely(!mapped_src_nents)) {
1507 			dev_err(jrdev, "unable to map source\n");
1508 			return ERR_PTR(-ENOMEM);
1509 		}
1510 	} else {
1511 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1512 					      DMA_TO_DEVICE);
1513 		if (unlikely(!mapped_src_nents)) {
1514 			dev_err(jrdev, "unable to map source\n");
1515 			return ERR_PTR(-ENOMEM);
1516 		}
1517 
1518 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1519 					      DMA_FROM_DEVICE);
1520 		if (unlikely(!mapped_dst_nents)) {
1521 			dev_err(jrdev, "unable to map destination\n");
1522 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1523 			return ERR_PTR(-ENOMEM);
1524 		}
1525 	}
1526 
1527 	iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1528 	if (dma_mapping_error(jrdev, iv_dma)) {
1529 		dev_err(jrdev, "unable to map IV\n");
1530 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1531 			   0, 0, 0);
1532 		return ERR_PTR(-ENOMEM);
1533 	}
1534 
1535 	if (mapped_src_nents == 1 &&
1536 	    iv_dma + ivsize == sg_dma_address(req->src)) {
1537 		in_contig = true;
1538 		sec4_sg_ents = 0;
1539 	} else {
1540 		in_contig = false;
1541 		sec4_sg_ents = 1 + mapped_src_nents;
1542 	}
1543 	dst_sg_idx = sec4_sg_ents;
1544 	sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1545 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1546 
1547 	/* allocate space for base edesc and hw desc commands, link tables */
1548 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1549 			GFP_DMA | flags);
1550 	if (!edesc) {
1551 		dev_err(jrdev, "could not allocate extended descriptor\n");
1552 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1553 			   iv_dma, ivsize, 0, 0);
1554 		return ERR_PTR(-ENOMEM);
1555 	}
1556 
1557 	edesc->src_nents = src_nents;
1558 	edesc->dst_nents = dst_nents;
1559 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1560 	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1561 			 desc_bytes;
1562 
1563 	if (!in_contig) {
1564 		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1565 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1566 				   edesc->sec4_sg + 1, 0);
1567 	}
1568 
1569 	if (mapped_dst_nents > 1) {
1570 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1571 				   edesc->sec4_sg + dst_sg_idx, 0);
1572 	}
1573 
1574 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1575 					    sec4_sg_bytes, DMA_TO_DEVICE);
1576 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1577 		dev_err(jrdev, "unable to map S/G table\n");
1578 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1579 			   iv_dma, ivsize, 0, 0);
1580 		kfree(edesc);
1581 		return ERR_PTR(-ENOMEM);
1582 	}
1583 
1584 	edesc->iv_dma = iv_dma;
1585 
1586 #ifdef DEBUG
1587 	print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1588 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1589 		       sec4_sg_bytes, 1);
1590 #endif
1591 
1592 	*iv_contig_out = in_contig;
1593 	return edesc;
1594 }
1595 
1596 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1597 {
1598 	struct ablkcipher_edesc *edesc;
1599 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1600 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1601 	struct device *jrdev = ctx->jrdev;
1602 	bool iv_contig;
1603 	u32 *desc;
1604 	int ret = 0;
1605 
1606 	/* allocate extended descriptor */
1607 	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1608 				       CAAM_CMD_SZ, &iv_contig);
1609 	if (IS_ERR(edesc))
1610 		return PTR_ERR(edesc);
1611 
1612 	/* Create and submit job descriptor*/
1613 	init_ablkcipher_job(ctx->sh_desc_enc,
1614 		ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1615 #ifdef DEBUG
1616 	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1617 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1618 		       desc_bytes(edesc->hw_desc), 1);
1619 #endif
1620 	desc = edesc->hw_desc;
1621 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1622 
1623 	if (!ret) {
1624 		ret = -EINPROGRESS;
1625 	} else {
1626 		ablkcipher_unmap(jrdev, edesc, req);
1627 		kfree(edesc);
1628 	}
1629 
1630 	return ret;
1631 }
1632 
1633 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1634 {
1635 	struct ablkcipher_edesc *edesc;
1636 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1637 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1638 	struct device *jrdev = ctx->jrdev;
1639 	bool iv_contig;
1640 	u32 *desc;
1641 	int ret = 0;
1642 
1643 	/* allocate extended descriptor */
1644 	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1645 				       CAAM_CMD_SZ, &iv_contig);
1646 	if (IS_ERR(edesc))
1647 		return PTR_ERR(edesc);
1648 
1649 	/* Create and submit job descriptor*/
1650 	init_ablkcipher_job(ctx->sh_desc_dec,
1651 		ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1652 	desc = edesc->hw_desc;
1653 #ifdef DEBUG
1654 	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1655 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1656 		       desc_bytes(edesc->hw_desc), 1);
1657 #endif
1658 
1659 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1660 	if (!ret) {
1661 		ret = -EINPROGRESS;
1662 	} else {
1663 		ablkcipher_unmap(jrdev, edesc, req);
1664 		kfree(edesc);
1665 	}
1666 
1667 	return ret;
1668 }
1669 
1670 /*
1671  * allocate and map the ablkcipher extended descriptor
1672  * for ablkcipher givencrypt
1673  */
1674 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1675 				struct skcipher_givcrypt_request *greq,
1676 				int desc_bytes,
1677 				bool *iv_contig_out)
1678 {
1679 	struct ablkcipher_request *req = &greq->creq;
1680 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1681 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1682 	struct device *jrdev = ctx->jrdev;
1683 	gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1684 					  CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1685 		       GFP_KERNEL : GFP_ATOMIC;
1686 	int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1687 	struct ablkcipher_edesc *edesc;
1688 	dma_addr_t iv_dma = 0;
1689 	bool out_contig;
1690 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1691 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1692 
1693 	src_nents = sg_nents_for_len(req->src, req->nbytes);
1694 	if (unlikely(src_nents < 0)) {
1695 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1696 			req->nbytes);
1697 		return ERR_PTR(src_nents);
1698 	}
1699 
1700 	if (likely(req->src == req->dst)) {
1701 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1702 					      DMA_BIDIRECTIONAL);
1703 		if (unlikely(!mapped_src_nents)) {
1704 			dev_err(jrdev, "unable to map source\n");
1705 			return ERR_PTR(-ENOMEM);
1706 		}
1707 
1708 		dst_nents = src_nents;
1709 		mapped_dst_nents = src_nents;
1710 	} else {
1711 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1712 					      DMA_TO_DEVICE);
1713 		if (unlikely(!mapped_src_nents)) {
1714 			dev_err(jrdev, "unable to map source\n");
1715 			return ERR_PTR(-ENOMEM);
1716 		}
1717 
1718 		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1719 		if (unlikely(dst_nents < 0)) {
1720 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1721 				req->nbytes);
1722 			return ERR_PTR(dst_nents);
1723 		}
1724 
1725 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1726 					      DMA_FROM_DEVICE);
1727 		if (unlikely(!mapped_dst_nents)) {
1728 			dev_err(jrdev, "unable to map destination\n");
1729 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1730 			return ERR_PTR(-ENOMEM);
1731 		}
1732 	}
1733 
1734 	/*
1735 	 * Check if iv can be contiguous with source and destination.
1736 	 * If so, include it. If not, create scatterlist.
1737 	 */
1738 	iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1739 	if (dma_mapping_error(jrdev, iv_dma)) {
1740 		dev_err(jrdev, "unable to map IV\n");
1741 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1742 			   0, 0, 0);
1743 		return ERR_PTR(-ENOMEM);
1744 	}
1745 
1746 	sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1747 	dst_sg_idx = sec4_sg_ents;
1748 	if (mapped_dst_nents == 1 &&
1749 	    iv_dma + ivsize == sg_dma_address(req->dst)) {
1750 		out_contig = true;
1751 	} else {
1752 		out_contig = false;
1753 		sec4_sg_ents += 1 + mapped_dst_nents;
1754 	}
1755 
1756 	/* allocate space for base edesc and hw desc commands, link tables */
1757 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1758 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1759 			GFP_DMA | flags);
1760 	if (!edesc) {
1761 		dev_err(jrdev, "could not allocate extended descriptor\n");
1762 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1763 			   iv_dma, ivsize, 0, 0);
1764 		return ERR_PTR(-ENOMEM);
1765 	}
1766 
1767 	edesc->src_nents = src_nents;
1768 	edesc->dst_nents = dst_nents;
1769 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1770 	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1771 			 desc_bytes;
1772 
1773 	if (mapped_src_nents > 1)
1774 		sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1775 				   0);
1776 
1777 	if (!out_contig) {
1778 		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
1779 				   iv_dma, ivsize, 0);
1780 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1781 				   edesc->sec4_sg + dst_sg_idx + 1, 0);
1782 	}
1783 
1784 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1785 					    sec4_sg_bytes, DMA_TO_DEVICE);
1786 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1787 		dev_err(jrdev, "unable to map S/G table\n");
1788 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1789 			   iv_dma, ivsize, 0, 0);
1790 		kfree(edesc);
1791 		return ERR_PTR(-ENOMEM);
1792 	}
1793 	edesc->iv_dma = iv_dma;
1794 
1795 #ifdef DEBUG
1796 	print_hex_dump(KERN_ERR,
1797 		       "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1798 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1799 		       sec4_sg_bytes, 1);
1800 #endif
1801 
1802 	*iv_contig_out = out_contig;
1803 	return edesc;
1804 }
1805 
1806 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1807 {
1808 	struct ablkcipher_request *req = &creq->creq;
1809 	struct ablkcipher_edesc *edesc;
1810 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1811 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1812 	struct device *jrdev = ctx->jrdev;
1813 	bool iv_contig = false;
1814 	u32 *desc;
1815 	int ret = 0;
1816 
1817 	/* allocate extended descriptor */
1818 	edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1819 				       CAAM_CMD_SZ, &iv_contig);
1820 	if (IS_ERR(edesc))
1821 		return PTR_ERR(edesc);
1822 
1823 	/* Create and submit job descriptor*/
1824 	init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1825 				edesc, req, iv_contig);
1826 #ifdef DEBUG
1827 	print_hex_dump(KERN_ERR,
1828 		       "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1829 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1830 		       desc_bytes(edesc->hw_desc), 1);
1831 #endif
1832 	desc = edesc->hw_desc;
1833 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1834 
1835 	if (!ret) {
1836 		ret = -EINPROGRESS;
1837 	} else {
1838 		ablkcipher_unmap(jrdev, edesc, req);
1839 		kfree(edesc);
1840 	}
1841 
1842 	return ret;
1843 }
1844 
1845 #define template_aead		template_u.aead
1846 #define template_ablkcipher	template_u.ablkcipher
1847 struct caam_alg_template {
1848 	char name[CRYPTO_MAX_ALG_NAME];
1849 	char driver_name[CRYPTO_MAX_ALG_NAME];
1850 	unsigned int blocksize;
1851 	u32 type;
1852 	union {
1853 		struct ablkcipher_alg ablkcipher;
1854 	} template_u;
1855 	u32 class1_alg_type;
1856 	u32 class2_alg_type;
1857 };
1858 
1859 static struct caam_alg_template driver_algs[] = {
1860 	/* ablkcipher descriptor */
1861 	{
1862 		.name = "cbc(aes)",
1863 		.driver_name = "cbc-aes-caam",
1864 		.blocksize = AES_BLOCK_SIZE,
1865 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1866 		.template_ablkcipher = {
1867 			.setkey = ablkcipher_setkey,
1868 			.encrypt = ablkcipher_encrypt,
1869 			.decrypt = ablkcipher_decrypt,
1870 			.givencrypt = ablkcipher_givencrypt,
1871 			.geniv = "<built-in>",
1872 			.min_keysize = AES_MIN_KEY_SIZE,
1873 			.max_keysize = AES_MAX_KEY_SIZE,
1874 			.ivsize = AES_BLOCK_SIZE,
1875 			},
1876 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1877 	},
1878 	{
1879 		.name = "cbc(des3_ede)",
1880 		.driver_name = "cbc-3des-caam",
1881 		.blocksize = DES3_EDE_BLOCK_SIZE,
1882 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1883 		.template_ablkcipher = {
1884 			.setkey = ablkcipher_setkey,
1885 			.encrypt = ablkcipher_encrypt,
1886 			.decrypt = ablkcipher_decrypt,
1887 			.givencrypt = ablkcipher_givencrypt,
1888 			.geniv = "<built-in>",
1889 			.min_keysize = DES3_EDE_KEY_SIZE,
1890 			.max_keysize = DES3_EDE_KEY_SIZE,
1891 			.ivsize = DES3_EDE_BLOCK_SIZE,
1892 			},
1893 		.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1894 	},
1895 	{
1896 		.name = "cbc(des)",
1897 		.driver_name = "cbc-des-caam",
1898 		.blocksize = DES_BLOCK_SIZE,
1899 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1900 		.template_ablkcipher = {
1901 			.setkey = ablkcipher_setkey,
1902 			.encrypt = ablkcipher_encrypt,
1903 			.decrypt = ablkcipher_decrypt,
1904 			.givencrypt = ablkcipher_givencrypt,
1905 			.geniv = "<built-in>",
1906 			.min_keysize = DES_KEY_SIZE,
1907 			.max_keysize = DES_KEY_SIZE,
1908 			.ivsize = DES_BLOCK_SIZE,
1909 			},
1910 		.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1911 	},
1912 	{
1913 		.name = "ctr(aes)",
1914 		.driver_name = "ctr-aes-caam",
1915 		.blocksize = 1,
1916 		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1917 		.template_ablkcipher = {
1918 			.setkey = ablkcipher_setkey,
1919 			.encrypt = ablkcipher_encrypt,
1920 			.decrypt = ablkcipher_decrypt,
1921 			.geniv = "chainiv",
1922 			.min_keysize = AES_MIN_KEY_SIZE,
1923 			.max_keysize = AES_MAX_KEY_SIZE,
1924 			.ivsize = AES_BLOCK_SIZE,
1925 			},
1926 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1927 	},
1928 	{
1929 		.name = "rfc3686(ctr(aes))",
1930 		.driver_name = "rfc3686-ctr-aes-caam",
1931 		.blocksize = 1,
1932 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1933 		.template_ablkcipher = {
1934 			.setkey = ablkcipher_setkey,
1935 			.encrypt = ablkcipher_encrypt,
1936 			.decrypt = ablkcipher_decrypt,
1937 			.givencrypt = ablkcipher_givencrypt,
1938 			.geniv = "<built-in>",
1939 			.min_keysize = AES_MIN_KEY_SIZE +
1940 				       CTR_RFC3686_NONCE_SIZE,
1941 			.max_keysize = AES_MAX_KEY_SIZE +
1942 				       CTR_RFC3686_NONCE_SIZE,
1943 			.ivsize = CTR_RFC3686_IV_SIZE,
1944 			},
1945 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1946 	},
1947 	{
1948 		.name = "xts(aes)",
1949 		.driver_name = "xts-aes-caam",
1950 		.blocksize = AES_BLOCK_SIZE,
1951 		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1952 		.template_ablkcipher = {
1953 			.setkey = xts_ablkcipher_setkey,
1954 			.encrypt = ablkcipher_encrypt,
1955 			.decrypt = ablkcipher_decrypt,
1956 			.geniv = "eseqiv",
1957 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1958 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1959 			.ivsize = AES_BLOCK_SIZE,
1960 			},
1961 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1962 	},
1963 };
1964 
1965 static struct caam_aead_alg driver_aeads[] = {
1966 	{
1967 		.aead = {
1968 			.base = {
1969 				.cra_name = "rfc4106(gcm(aes))",
1970 				.cra_driver_name = "rfc4106-gcm-aes-caam",
1971 				.cra_blocksize = 1,
1972 			},
1973 			.setkey = rfc4106_setkey,
1974 			.setauthsize = rfc4106_setauthsize,
1975 			.encrypt = ipsec_gcm_encrypt,
1976 			.decrypt = ipsec_gcm_decrypt,
1977 			.ivsize = 8,
1978 			.maxauthsize = AES_BLOCK_SIZE,
1979 		},
1980 		.caam = {
1981 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1982 		},
1983 	},
1984 	{
1985 		.aead = {
1986 			.base = {
1987 				.cra_name = "rfc4543(gcm(aes))",
1988 				.cra_driver_name = "rfc4543-gcm-aes-caam",
1989 				.cra_blocksize = 1,
1990 			},
1991 			.setkey = rfc4543_setkey,
1992 			.setauthsize = rfc4543_setauthsize,
1993 			.encrypt = ipsec_gcm_encrypt,
1994 			.decrypt = ipsec_gcm_decrypt,
1995 			.ivsize = 8,
1996 			.maxauthsize = AES_BLOCK_SIZE,
1997 		},
1998 		.caam = {
1999 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2000 		},
2001 	},
2002 	/* Galois Counter Mode */
2003 	{
2004 		.aead = {
2005 			.base = {
2006 				.cra_name = "gcm(aes)",
2007 				.cra_driver_name = "gcm-aes-caam",
2008 				.cra_blocksize = 1,
2009 			},
2010 			.setkey = gcm_setkey,
2011 			.setauthsize = gcm_setauthsize,
2012 			.encrypt = gcm_encrypt,
2013 			.decrypt = gcm_decrypt,
2014 			.ivsize = 12,
2015 			.maxauthsize = AES_BLOCK_SIZE,
2016 		},
2017 		.caam = {
2018 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2019 		},
2020 	},
2021 	/* single-pass ipsec_esp descriptor */
2022 	{
2023 		.aead = {
2024 			.base = {
2025 				.cra_name = "authenc(hmac(md5),"
2026 					    "ecb(cipher_null))",
2027 				.cra_driver_name = "authenc-hmac-md5-"
2028 						   "ecb-cipher_null-caam",
2029 				.cra_blocksize = NULL_BLOCK_SIZE,
2030 			},
2031 			.setkey = aead_setkey,
2032 			.setauthsize = aead_setauthsize,
2033 			.encrypt = aead_encrypt,
2034 			.decrypt = aead_decrypt,
2035 			.ivsize = NULL_IV_SIZE,
2036 			.maxauthsize = MD5_DIGEST_SIZE,
2037 		},
2038 		.caam = {
2039 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2040 					   OP_ALG_AAI_HMAC_PRECOMP,
2041 		},
2042 	},
2043 	{
2044 		.aead = {
2045 			.base = {
2046 				.cra_name = "authenc(hmac(sha1),"
2047 					    "ecb(cipher_null))",
2048 				.cra_driver_name = "authenc-hmac-sha1-"
2049 						   "ecb-cipher_null-caam",
2050 				.cra_blocksize = NULL_BLOCK_SIZE,
2051 			},
2052 			.setkey = aead_setkey,
2053 			.setauthsize = aead_setauthsize,
2054 			.encrypt = aead_encrypt,
2055 			.decrypt = aead_decrypt,
2056 			.ivsize = NULL_IV_SIZE,
2057 			.maxauthsize = SHA1_DIGEST_SIZE,
2058 		},
2059 		.caam = {
2060 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2061 					   OP_ALG_AAI_HMAC_PRECOMP,
2062 		},
2063 	},
2064 	{
2065 		.aead = {
2066 			.base = {
2067 				.cra_name = "authenc(hmac(sha224),"
2068 					    "ecb(cipher_null))",
2069 				.cra_driver_name = "authenc-hmac-sha224-"
2070 						   "ecb-cipher_null-caam",
2071 				.cra_blocksize = NULL_BLOCK_SIZE,
2072 			},
2073 			.setkey = aead_setkey,
2074 			.setauthsize = aead_setauthsize,
2075 			.encrypt = aead_encrypt,
2076 			.decrypt = aead_decrypt,
2077 			.ivsize = NULL_IV_SIZE,
2078 			.maxauthsize = SHA224_DIGEST_SIZE,
2079 		},
2080 		.caam = {
2081 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2082 					   OP_ALG_AAI_HMAC_PRECOMP,
2083 		},
2084 	},
2085 	{
2086 		.aead = {
2087 			.base = {
2088 				.cra_name = "authenc(hmac(sha256),"
2089 					    "ecb(cipher_null))",
2090 				.cra_driver_name = "authenc-hmac-sha256-"
2091 						   "ecb-cipher_null-caam",
2092 				.cra_blocksize = NULL_BLOCK_SIZE,
2093 			},
2094 			.setkey = aead_setkey,
2095 			.setauthsize = aead_setauthsize,
2096 			.encrypt = aead_encrypt,
2097 			.decrypt = aead_decrypt,
2098 			.ivsize = NULL_IV_SIZE,
2099 			.maxauthsize = SHA256_DIGEST_SIZE,
2100 		},
2101 		.caam = {
2102 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2103 					   OP_ALG_AAI_HMAC_PRECOMP,
2104 		},
2105 	},
2106 	{
2107 		.aead = {
2108 			.base = {
2109 				.cra_name = "authenc(hmac(sha384),"
2110 					    "ecb(cipher_null))",
2111 				.cra_driver_name = "authenc-hmac-sha384-"
2112 						   "ecb-cipher_null-caam",
2113 				.cra_blocksize = NULL_BLOCK_SIZE,
2114 			},
2115 			.setkey = aead_setkey,
2116 			.setauthsize = aead_setauthsize,
2117 			.encrypt = aead_encrypt,
2118 			.decrypt = aead_decrypt,
2119 			.ivsize = NULL_IV_SIZE,
2120 			.maxauthsize = SHA384_DIGEST_SIZE,
2121 		},
2122 		.caam = {
2123 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2124 					   OP_ALG_AAI_HMAC_PRECOMP,
2125 		},
2126 	},
2127 	{
2128 		.aead = {
2129 			.base = {
2130 				.cra_name = "authenc(hmac(sha512),"
2131 					    "ecb(cipher_null))",
2132 				.cra_driver_name = "authenc-hmac-sha512-"
2133 						   "ecb-cipher_null-caam",
2134 				.cra_blocksize = NULL_BLOCK_SIZE,
2135 			},
2136 			.setkey = aead_setkey,
2137 			.setauthsize = aead_setauthsize,
2138 			.encrypt = aead_encrypt,
2139 			.decrypt = aead_decrypt,
2140 			.ivsize = NULL_IV_SIZE,
2141 			.maxauthsize = SHA512_DIGEST_SIZE,
2142 		},
2143 		.caam = {
2144 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2145 					   OP_ALG_AAI_HMAC_PRECOMP,
2146 		},
2147 	},
2148 	{
2149 		.aead = {
2150 			.base = {
2151 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2152 				.cra_driver_name = "authenc-hmac-md5-"
2153 						   "cbc-aes-caam",
2154 				.cra_blocksize = AES_BLOCK_SIZE,
2155 			},
2156 			.setkey = aead_setkey,
2157 			.setauthsize = aead_setauthsize,
2158 			.encrypt = aead_encrypt,
2159 			.decrypt = aead_decrypt,
2160 			.ivsize = AES_BLOCK_SIZE,
2161 			.maxauthsize = MD5_DIGEST_SIZE,
2162 		},
2163 		.caam = {
2164 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2165 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2166 					   OP_ALG_AAI_HMAC_PRECOMP,
2167 		},
2168 	},
2169 	{
2170 		.aead = {
2171 			.base = {
2172 				.cra_name = "echainiv(authenc(hmac(md5),"
2173 					    "cbc(aes)))",
2174 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2175 						   "cbc-aes-caam",
2176 				.cra_blocksize = AES_BLOCK_SIZE,
2177 			},
2178 			.setkey = aead_setkey,
2179 			.setauthsize = aead_setauthsize,
2180 			.encrypt = aead_encrypt,
2181 			.decrypt = aead_decrypt,
2182 			.ivsize = AES_BLOCK_SIZE,
2183 			.maxauthsize = MD5_DIGEST_SIZE,
2184 		},
2185 		.caam = {
2186 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2187 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2188 					   OP_ALG_AAI_HMAC_PRECOMP,
2189 			.geniv = true,
2190 		},
2191 	},
2192 	{
2193 		.aead = {
2194 			.base = {
2195 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2196 				.cra_driver_name = "authenc-hmac-sha1-"
2197 						   "cbc-aes-caam",
2198 				.cra_blocksize = AES_BLOCK_SIZE,
2199 			},
2200 			.setkey = aead_setkey,
2201 			.setauthsize = aead_setauthsize,
2202 			.encrypt = aead_encrypt,
2203 			.decrypt = aead_decrypt,
2204 			.ivsize = AES_BLOCK_SIZE,
2205 			.maxauthsize = SHA1_DIGEST_SIZE,
2206 		},
2207 		.caam = {
2208 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2209 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2210 					   OP_ALG_AAI_HMAC_PRECOMP,
2211 		},
2212 	},
2213 	{
2214 		.aead = {
2215 			.base = {
2216 				.cra_name = "echainiv(authenc(hmac(sha1),"
2217 					    "cbc(aes)))",
2218 				.cra_driver_name = "echainiv-authenc-"
2219 						   "hmac-sha1-cbc-aes-caam",
2220 				.cra_blocksize = AES_BLOCK_SIZE,
2221 			},
2222 			.setkey = aead_setkey,
2223 			.setauthsize = aead_setauthsize,
2224 			.encrypt = aead_encrypt,
2225 			.decrypt = aead_decrypt,
2226 			.ivsize = AES_BLOCK_SIZE,
2227 			.maxauthsize = SHA1_DIGEST_SIZE,
2228 		},
2229 		.caam = {
2230 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2231 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2232 					   OP_ALG_AAI_HMAC_PRECOMP,
2233 			.geniv = true,
2234 		},
2235 	},
2236 	{
2237 		.aead = {
2238 			.base = {
2239 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2240 				.cra_driver_name = "authenc-hmac-sha224-"
2241 						   "cbc-aes-caam",
2242 				.cra_blocksize = AES_BLOCK_SIZE,
2243 			},
2244 			.setkey = aead_setkey,
2245 			.setauthsize = aead_setauthsize,
2246 			.encrypt = aead_encrypt,
2247 			.decrypt = aead_decrypt,
2248 			.ivsize = AES_BLOCK_SIZE,
2249 			.maxauthsize = SHA224_DIGEST_SIZE,
2250 		},
2251 		.caam = {
2252 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2253 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2254 					   OP_ALG_AAI_HMAC_PRECOMP,
2255 		},
2256 	},
2257 	{
2258 		.aead = {
2259 			.base = {
2260 				.cra_name = "echainiv(authenc(hmac(sha224),"
2261 					    "cbc(aes)))",
2262 				.cra_driver_name = "echainiv-authenc-"
2263 						   "hmac-sha224-cbc-aes-caam",
2264 				.cra_blocksize = AES_BLOCK_SIZE,
2265 			},
2266 			.setkey = aead_setkey,
2267 			.setauthsize = aead_setauthsize,
2268 			.encrypt = aead_encrypt,
2269 			.decrypt = aead_decrypt,
2270 			.ivsize = AES_BLOCK_SIZE,
2271 			.maxauthsize = SHA224_DIGEST_SIZE,
2272 		},
2273 		.caam = {
2274 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2275 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2276 					   OP_ALG_AAI_HMAC_PRECOMP,
2277 			.geniv = true,
2278 		},
2279 	},
2280 	{
2281 		.aead = {
2282 			.base = {
2283 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2284 				.cra_driver_name = "authenc-hmac-sha256-"
2285 						   "cbc-aes-caam",
2286 				.cra_blocksize = AES_BLOCK_SIZE,
2287 			},
2288 			.setkey = aead_setkey,
2289 			.setauthsize = aead_setauthsize,
2290 			.encrypt = aead_encrypt,
2291 			.decrypt = aead_decrypt,
2292 			.ivsize = AES_BLOCK_SIZE,
2293 			.maxauthsize = SHA256_DIGEST_SIZE,
2294 		},
2295 		.caam = {
2296 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2297 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2298 					   OP_ALG_AAI_HMAC_PRECOMP,
2299 		},
2300 	},
2301 	{
2302 		.aead = {
2303 			.base = {
2304 				.cra_name = "echainiv(authenc(hmac(sha256),"
2305 					    "cbc(aes)))",
2306 				.cra_driver_name = "echainiv-authenc-"
2307 						   "hmac-sha256-cbc-aes-caam",
2308 				.cra_blocksize = AES_BLOCK_SIZE,
2309 			},
2310 			.setkey = aead_setkey,
2311 			.setauthsize = aead_setauthsize,
2312 			.encrypt = aead_encrypt,
2313 			.decrypt = aead_decrypt,
2314 			.ivsize = AES_BLOCK_SIZE,
2315 			.maxauthsize = SHA256_DIGEST_SIZE,
2316 		},
2317 		.caam = {
2318 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2319 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2320 					   OP_ALG_AAI_HMAC_PRECOMP,
2321 			.geniv = true,
2322 		},
2323 	},
2324 	{
2325 		.aead = {
2326 			.base = {
2327 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2328 				.cra_driver_name = "authenc-hmac-sha384-"
2329 						   "cbc-aes-caam",
2330 				.cra_blocksize = AES_BLOCK_SIZE,
2331 			},
2332 			.setkey = aead_setkey,
2333 			.setauthsize = aead_setauthsize,
2334 			.encrypt = aead_encrypt,
2335 			.decrypt = aead_decrypt,
2336 			.ivsize = AES_BLOCK_SIZE,
2337 			.maxauthsize = SHA384_DIGEST_SIZE,
2338 		},
2339 		.caam = {
2340 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2341 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2342 					   OP_ALG_AAI_HMAC_PRECOMP,
2343 		},
2344 	},
2345 	{
2346 		.aead = {
2347 			.base = {
2348 				.cra_name = "echainiv(authenc(hmac(sha384),"
2349 					    "cbc(aes)))",
2350 				.cra_driver_name = "echainiv-authenc-"
2351 						   "hmac-sha384-cbc-aes-caam",
2352 				.cra_blocksize = AES_BLOCK_SIZE,
2353 			},
2354 			.setkey = aead_setkey,
2355 			.setauthsize = aead_setauthsize,
2356 			.encrypt = aead_encrypt,
2357 			.decrypt = aead_decrypt,
2358 			.ivsize = AES_BLOCK_SIZE,
2359 			.maxauthsize = SHA384_DIGEST_SIZE,
2360 		},
2361 		.caam = {
2362 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2363 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2364 					   OP_ALG_AAI_HMAC_PRECOMP,
2365 			.geniv = true,
2366 		},
2367 	},
2368 	{
2369 		.aead = {
2370 			.base = {
2371 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2372 				.cra_driver_name = "authenc-hmac-sha512-"
2373 						   "cbc-aes-caam",
2374 				.cra_blocksize = AES_BLOCK_SIZE,
2375 			},
2376 			.setkey = aead_setkey,
2377 			.setauthsize = aead_setauthsize,
2378 			.encrypt = aead_encrypt,
2379 			.decrypt = aead_decrypt,
2380 			.ivsize = AES_BLOCK_SIZE,
2381 			.maxauthsize = SHA512_DIGEST_SIZE,
2382 		},
2383 		.caam = {
2384 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2385 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2386 					   OP_ALG_AAI_HMAC_PRECOMP,
2387 		},
2388 	},
2389 	{
2390 		.aead = {
2391 			.base = {
2392 				.cra_name = "echainiv(authenc(hmac(sha512),"
2393 					    "cbc(aes)))",
2394 				.cra_driver_name = "echainiv-authenc-"
2395 						   "hmac-sha512-cbc-aes-caam",
2396 				.cra_blocksize = AES_BLOCK_SIZE,
2397 			},
2398 			.setkey = aead_setkey,
2399 			.setauthsize = aead_setauthsize,
2400 			.encrypt = aead_encrypt,
2401 			.decrypt = aead_decrypt,
2402 			.ivsize = AES_BLOCK_SIZE,
2403 			.maxauthsize = SHA512_DIGEST_SIZE,
2404 		},
2405 		.caam = {
2406 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2407 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2408 					   OP_ALG_AAI_HMAC_PRECOMP,
2409 			.geniv = true,
2410 		},
2411 	},
2412 	{
2413 		.aead = {
2414 			.base = {
2415 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2416 				.cra_driver_name = "authenc-hmac-md5-"
2417 						   "cbc-des3_ede-caam",
2418 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2419 			},
2420 			.setkey = aead_setkey,
2421 			.setauthsize = aead_setauthsize,
2422 			.encrypt = aead_encrypt,
2423 			.decrypt = aead_decrypt,
2424 			.ivsize = DES3_EDE_BLOCK_SIZE,
2425 			.maxauthsize = MD5_DIGEST_SIZE,
2426 		},
2427 		.caam = {
2428 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2429 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2430 					   OP_ALG_AAI_HMAC_PRECOMP,
2431 		}
2432 	},
2433 	{
2434 		.aead = {
2435 			.base = {
2436 				.cra_name = "echainiv(authenc(hmac(md5),"
2437 					    "cbc(des3_ede)))",
2438 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2439 						   "cbc-des3_ede-caam",
2440 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2441 			},
2442 			.setkey = aead_setkey,
2443 			.setauthsize = aead_setauthsize,
2444 			.encrypt = aead_encrypt,
2445 			.decrypt = aead_decrypt,
2446 			.ivsize = DES3_EDE_BLOCK_SIZE,
2447 			.maxauthsize = MD5_DIGEST_SIZE,
2448 		},
2449 		.caam = {
2450 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2451 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2452 					   OP_ALG_AAI_HMAC_PRECOMP,
2453 			.geniv = true,
2454 		}
2455 	},
2456 	{
2457 		.aead = {
2458 			.base = {
2459 				.cra_name = "authenc(hmac(sha1),"
2460 					    "cbc(des3_ede))",
2461 				.cra_driver_name = "authenc-hmac-sha1-"
2462 						   "cbc-des3_ede-caam",
2463 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2464 			},
2465 			.setkey = aead_setkey,
2466 			.setauthsize = aead_setauthsize,
2467 			.encrypt = aead_encrypt,
2468 			.decrypt = aead_decrypt,
2469 			.ivsize = DES3_EDE_BLOCK_SIZE,
2470 			.maxauthsize = SHA1_DIGEST_SIZE,
2471 		},
2472 		.caam = {
2473 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2474 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2475 					   OP_ALG_AAI_HMAC_PRECOMP,
2476 		},
2477 	},
2478 	{
2479 		.aead = {
2480 			.base = {
2481 				.cra_name = "echainiv(authenc(hmac(sha1),"
2482 					    "cbc(des3_ede)))",
2483 				.cra_driver_name = "echainiv-authenc-"
2484 						   "hmac-sha1-"
2485 						   "cbc-des3_ede-caam",
2486 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2487 			},
2488 			.setkey = aead_setkey,
2489 			.setauthsize = aead_setauthsize,
2490 			.encrypt = aead_encrypt,
2491 			.decrypt = aead_decrypt,
2492 			.ivsize = DES3_EDE_BLOCK_SIZE,
2493 			.maxauthsize = SHA1_DIGEST_SIZE,
2494 		},
2495 		.caam = {
2496 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2497 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2498 					   OP_ALG_AAI_HMAC_PRECOMP,
2499 			.geniv = true,
2500 		},
2501 	},
2502 	{
2503 		.aead = {
2504 			.base = {
2505 				.cra_name = "authenc(hmac(sha224),"
2506 					    "cbc(des3_ede))",
2507 				.cra_driver_name = "authenc-hmac-sha224-"
2508 						   "cbc-des3_ede-caam",
2509 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2510 			},
2511 			.setkey = aead_setkey,
2512 			.setauthsize = aead_setauthsize,
2513 			.encrypt = aead_encrypt,
2514 			.decrypt = aead_decrypt,
2515 			.ivsize = DES3_EDE_BLOCK_SIZE,
2516 			.maxauthsize = SHA224_DIGEST_SIZE,
2517 		},
2518 		.caam = {
2519 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2520 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2521 					   OP_ALG_AAI_HMAC_PRECOMP,
2522 		},
2523 	},
2524 	{
2525 		.aead = {
2526 			.base = {
2527 				.cra_name = "echainiv(authenc(hmac(sha224),"
2528 					    "cbc(des3_ede)))",
2529 				.cra_driver_name = "echainiv-authenc-"
2530 						   "hmac-sha224-"
2531 						   "cbc-des3_ede-caam",
2532 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2533 			},
2534 			.setkey = aead_setkey,
2535 			.setauthsize = aead_setauthsize,
2536 			.encrypt = aead_encrypt,
2537 			.decrypt = aead_decrypt,
2538 			.ivsize = DES3_EDE_BLOCK_SIZE,
2539 			.maxauthsize = SHA224_DIGEST_SIZE,
2540 		},
2541 		.caam = {
2542 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2543 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2544 					   OP_ALG_AAI_HMAC_PRECOMP,
2545 			.geniv = true,
2546 		},
2547 	},
2548 	{
2549 		.aead = {
2550 			.base = {
2551 				.cra_name = "authenc(hmac(sha256),"
2552 					    "cbc(des3_ede))",
2553 				.cra_driver_name = "authenc-hmac-sha256-"
2554 						   "cbc-des3_ede-caam",
2555 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2556 			},
2557 			.setkey = aead_setkey,
2558 			.setauthsize = aead_setauthsize,
2559 			.encrypt = aead_encrypt,
2560 			.decrypt = aead_decrypt,
2561 			.ivsize = DES3_EDE_BLOCK_SIZE,
2562 			.maxauthsize = SHA256_DIGEST_SIZE,
2563 		},
2564 		.caam = {
2565 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2566 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2567 					   OP_ALG_AAI_HMAC_PRECOMP,
2568 		},
2569 	},
2570 	{
2571 		.aead = {
2572 			.base = {
2573 				.cra_name = "echainiv(authenc(hmac(sha256),"
2574 					    "cbc(des3_ede)))",
2575 				.cra_driver_name = "echainiv-authenc-"
2576 						   "hmac-sha256-"
2577 						   "cbc-des3_ede-caam",
2578 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2579 			},
2580 			.setkey = aead_setkey,
2581 			.setauthsize = aead_setauthsize,
2582 			.encrypt = aead_encrypt,
2583 			.decrypt = aead_decrypt,
2584 			.ivsize = DES3_EDE_BLOCK_SIZE,
2585 			.maxauthsize = SHA256_DIGEST_SIZE,
2586 		},
2587 		.caam = {
2588 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2589 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2590 					   OP_ALG_AAI_HMAC_PRECOMP,
2591 			.geniv = true,
2592 		},
2593 	},
2594 	{
2595 		.aead = {
2596 			.base = {
2597 				.cra_name = "authenc(hmac(sha384),"
2598 					    "cbc(des3_ede))",
2599 				.cra_driver_name = "authenc-hmac-sha384-"
2600 						   "cbc-des3_ede-caam",
2601 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2602 			},
2603 			.setkey = aead_setkey,
2604 			.setauthsize = aead_setauthsize,
2605 			.encrypt = aead_encrypt,
2606 			.decrypt = aead_decrypt,
2607 			.ivsize = DES3_EDE_BLOCK_SIZE,
2608 			.maxauthsize = SHA384_DIGEST_SIZE,
2609 		},
2610 		.caam = {
2611 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2612 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2613 					   OP_ALG_AAI_HMAC_PRECOMP,
2614 		},
2615 	},
2616 	{
2617 		.aead = {
2618 			.base = {
2619 				.cra_name = "echainiv(authenc(hmac(sha384),"
2620 					    "cbc(des3_ede)))",
2621 				.cra_driver_name = "echainiv-authenc-"
2622 						   "hmac-sha384-"
2623 						   "cbc-des3_ede-caam",
2624 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2625 			},
2626 			.setkey = aead_setkey,
2627 			.setauthsize = aead_setauthsize,
2628 			.encrypt = aead_encrypt,
2629 			.decrypt = aead_decrypt,
2630 			.ivsize = DES3_EDE_BLOCK_SIZE,
2631 			.maxauthsize = SHA384_DIGEST_SIZE,
2632 		},
2633 		.caam = {
2634 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2635 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2636 					   OP_ALG_AAI_HMAC_PRECOMP,
2637 			.geniv = true,
2638 		},
2639 	},
2640 	{
2641 		.aead = {
2642 			.base = {
2643 				.cra_name = "authenc(hmac(sha512),"
2644 					    "cbc(des3_ede))",
2645 				.cra_driver_name = "authenc-hmac-sha512-"
2646 						   "cbc-des3_ede-caam",
2647 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2648 			},
2649 			.setkey = aead_setkey,
2650 			.setauthsize = aead_setauthsize,
2651 			.encrypt = aead_encrypt,
2652 			.decrypt = aead_decrypt,
2653 			.ivsize = DES3_EDE_BLOCK_SIZE,
2654 			.maxauthsize = SHA512_DIGEST_SIZE,
2655 		},
2656 		.caam = {
2657 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2658 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2659 					   OP_ALG_AAI_HMAC_PRECOMP,
2660 		},
2661 	},
2662 	{
2663 		.aead = {
2664 			.base = {
2665 				.cra_name = "echainiv(authenc(hmac(sha512),"
2666 					    "cbc(des3_ede)))",
2667 				.cra_driver_name = "echainiv-authenc-"
2668 						   "hmac-sha512-"
2669 						   "cbc-des3_ede-caam",
2670 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2671 			},
2672 			.setkey = aead_setkey,
2673 			.setauthsize = aead_setauthsize,
2674 			.encrypt = aead_encrypt,
2675 			.decrypt = aead_decrypt,
2676 			.ivsize = DES3_EDE_BLOCK_SIZE,
2677 			.maxauthsize = SHA512_DIGEST_SIZE,
2678 		},
2679 		.caam = {
2680 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2681 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2682 					   OP_ALG_AAI_HMAC_PRECOMP,
2683 			.geniv = true,
2684 		},
2685 	},
2686 	{
2687 		.aead = {
2688 			.base = {
2689 				.cra_name = "authenc(hmac(md5),cbc(des))",
2690 				.cra_driver_name = "authenc-hmac-md5-"
2691 						   "cbc-des-caam",
2692 				.cra_blocksize = DES_BLOCK_SIZE,
2693 			},
2694 			.setkey = aead_setkey,
2695 			.setauthsize = aead_setauthsize,
2696 			.encrypt = aead_encrypt,
2697 			.decrypt = aead_decrypt,
2698 			.ivsize = DES_BLOCK_SIZE,
2699 			.maxauthsize = MD5_DIGEST_SIZE,
2700 		},
2701 		.caam = {
2702 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2703 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2704 					   OP_ALG_AAI_HMAC_PRECOMP,
2705 		},
2706 	},
2707 	{
2708 		.aead = {
2709 			.base = {
2710 				.cra_name = "echainiv(authenc(hmac(md5),"
2711 					    "cbc(des)))",
2712 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2713 						   "cbc-des-caam",
2714 				.cra_blocksize = DES_BLOCK_SIZE,
2715 			},
2716 			.setkey = aead_setkey,
2717 			.setauthsize = aead_setauthsize,
2718 			.encrypt = aead_encrypt,
2719 			.decrypt = aead_decrypt,
2720 			.ivsize = DES_BLOCK_SIZE,
2721 			.maxauthsize = MD5_DIGEST_SIZE,
2722 		},
2723 		.caam = {
2724 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2725 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2726 					   OP_ALG_AAI_HMAC_PRECOMP,
2727 			.geniv = true,
2728 		},
2729 	},
2730 	{
2731 		.aead = {
2732 			.base = {
2733 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2734 				.cra_driver_name = "authenc-hmac-sha1-"
2735 						   "cbc-des-caam",
2736 				.cra_blocksize = DES_BLOCK_SIZE,
2737 			},
2738 			.setkey = aead_setkey,
2739 			.setauthsize = aead_setauthsize,
2740 			.encrypt = aead_encrypt,
2741 			.decrypt = aead_decrypt,
2742 			.ivsize = DES_BLOCK_SIZE,
2743 			.maxauthsize = SHA1_DIGEST_SIZE,
2744 		},
2745 		.caam = {
2746 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2747 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2748 					   OP_ALG_AAI_HMAC_PRECOMP,
2749 		},
2750 	},
2751 	{
2752 		.aead = {
2753 			.base = {
2754 				.cra_name = "echainiv(authenc(hmac(sha1),"
2755 					    "cbc(des)))",
2756 				.cra_driver_name = "echainiv-authenc-"
2757 						   "hmac-sha1-cbc-des-caam",
2758 				.cra_blocksize = DES_BLOCK_SIZE,
2759 			},
2760 			.setkey = aead_setkey,
2761 			.setauthsize = aead_setauthsize,
2762 			.encrypt = aead_encrypt,
2763 			.decrypt = aead_decrypt,
2764 			.ivsize = DES_BLOCK_SIZE,
2765 			.maxauthsize = SHA1_DIGEST_SIZE,
2766 		},
2767 		.caam = {
2768 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2769 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2770 					   OP_ALG_AAI_HMAC_PRECOMP,
2771 			.geniv = true,
2772 		},
2773 	},
2774 	{
2775 		.aead = {
2776 			.base = {
2777 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2778 				.cra_driver_name = "authenc-hmac-sha224-"
2779 						   "cbc-des-caam",
2780 				.cra_blocksize = DES_BLOCK_SIZE,
2781 			},
2782 			.setkey = aead_setkey,
2783 			.setauthsize = aead_setauthsize,
2784 			.encrypt = aead_encrypt,
2785 			.decrypt = aead_decrypt,
2786 			.ivsize = DES_BLOCK_SIZE,
2787 			.maxauthsize = SHA224_DIGEST_SIZE,
2788 		},
2789 		.caam = {
2790 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2791 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2792 					   OP_ALG_AAI_HMAC_PRECOMP,
2793 		},
2794 	},
2795 	{
2796 		.aead = {
2797 			.base = {
2798 				.cra_name = "echainiv(authenc(hmac(sha224),"
2799 					    "cbc(des)))",
2800 				.cra_driver_name = "echainiv-authenc-"
2801 						   "hmac-sha224-cbc-des-caam",
2802 				.cra_blocksize = DES_BLOCK_SIZE,
2803 			},
2804 			.setkey = aead_setkey,
2805 			.setauthsize = aead_setauthsize,
2806 			.encrypt = aead_encrypt,
2807 			.decrypt = aead_decrypt,
2808 			.ivsize = DES_BLOCK_SIZE,
2809 			.maxauthsize = SHA224_DIGEST_SIZE,
2810 		},
2811 		.caam = {
2812 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2813 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2814 					   OP_ALG_AAI_HMAC_PRECOMP,
2815 			.geniv = true,
2816 		},
2817 	},
2818 	{
2819 		.aead = {
2820 			.base = {
2821 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2822 				.cra_driver_name = "authenc-hmac-sha256-"
2823 						   "cbc-des-caam",
2824 				.cra_blocksize = DES_BLOCK_SIZE,
2825 			},
2826 			.setkey = aead_setkey,
2827 			.setauthsize = aead_setauthsize,
2828 			.encrypt = aead_encrypt,
2829 			.decrypt = aead_decrypt,
2830 			.ivsize = DES_BLOCK_SIZE,
2831 			.maxauthsize = SHA256_DIGEST_SIZE,
2832 		},
2833 		.caam = {
2834 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2835 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2836 					   OP_ALG_AAI_HMAC_PRECOMP,
2837 		},
2838 	},
2839 	{
2840 		.aead = {
2841 			.base = {
2842 				.cra_name = "echainiv(authenc(hmac(sha256),"
2843 					    "cbc(des)))",
2844 				.cra_driver_name = "echainiv-authenc-"
2845 						   "hmac-sha256-cbc-des-caam",
2846 				.cra_blocksize = DES_BLOCK_SIZE,
2847 			},
2848 			.setkey = aead_setkey,
2849 			.setauthsize = aead_setauthsize,
2850 			.encrypt = aead_encrypt,
2851 			.decrypt = aead_decrypt,
2852 			.ivsize = DES_BLOCK_SIZE,
2853 			.maxauthsize = SHA256_DIGEST_SIZE,
2854 		},
2855 		.caam = {
2856 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2857 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2858 					   OP_ALG_AAI_HMAC_PRECOMP,
2859 			.geniv = true,
2860 		},
2861 	},
2862 	{
2863 		.aead = {
2864 			.base = {
2865 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2866 				.cra_driver_name = "authenc-hmac-sha384-"
2867 						   "cbc-des-caam",
2868 				.cra_blocksize = DES_BLOCK_SIZE,
2869 			},
2870 			.setkey = aead_setkey,
2871 			.setauthsize = aead_setauthsize,
2872 			.encrypt = aead_encrypt,
2873 			.decrypt = aead_decrypt,
2874 			.ivsize = DES_BLOCK_SIZE,
2875 			.maxauthsize = SHA384_DIGEST_SIZE,
2876 		},
2877 		.caam = {
2878 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2879 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2880 					   OP_ALG_AAI_HMAC_PRECOMP,
2881 		},
2882 	},
2883 	{
2884 		.aead = {
2885 			.base = {
2886 				.cra_name = "echainiv(authenc(hmac(sha384),"
2887 					    "cbc(des)))",
2888 				.cra_driver_name = "echainiv-authenc-"
2889 						   "hmac-sha384-cbc-des-caam",
2890 				.cra_blocksize = DES_BLOCK_SIZE,
2891 			},
2892 			.setkey = aead_setkey,
2893 			.setauthsize = aead_setauthsize,
2894 			.encrypt = aead_encrypt,
2895 			.decrypt = aead_decrypt,
2896 			.ivsize = DES_BLOCK_SIZE,
2897 			.maxauthsize = SHA384_DIGEST_SIZE,
2898 		},
2899 		.caam = {
2900 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2901 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2902 					   OP_ALG_AAI_HMAC_PRECOMP,
2903 			.geniv = true,
2904 		},
2905 	},
2906 	{
2907 		.aead = {
2908 			.base = {
2909 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2910 				.cra_driver_name = "authenc-hmac-sha512-"
2911 						   "cbc-des-caam",
2912 				.cra_blocksize = DES_BLOCK_SIZE,
2913 			},
2914 			.setkey = aead_setkey,
2915 			.setauthsize = aead_setauthsize,
2916 			.encrypt = aead_encrypt,
2917 			.decrypt = aead_decrypt,
2918 			.ivsize = DES_BLOCK_SIZE,
2919 			.maxauthsize = SHA512_DIGEST_SIZE,
2920 		},
2921 		.caam = {
2922 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2923 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2924 					   OP_ALG_AAI_HMAC_PRECOMP,
2925 		},
2926 	},
2927 	{
2928 		.aead = {
2929 			.base = {
2930 				.cra_name = "echainiv(authenc(hmac(sha512),"
2931 					    "cbc(des)))",
2932 				.cra_driver_name = "echainiv-authenc-"
2933 						   "hmac-sha512-cbc-des-caam",
2934 				.cra_blocksize = DES_BLOCK_SIZE,
2935 			},
2936 			.setkey = aead_setkey,
2937 			.setauthsize = aead_setauthsize,
2938 			.encrypt = aead_encrypt,
2939 			.decrypt = aead_decrypt,
2940 			.ivsize = DES_BLOCK_SIZE,
2941 			.maxauthsize = SHA512_DIGEST_SIZE,
2942 		},
2943 		.caam = {
2944 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2945 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2946 					   OP_ALG_AAI_HMAC_PRECOMP,
2947 			.geniv = true,
2948 		},
2949 	},
2950 	{
2951 		.aead = {
2952 			.base = {
2953 				.cra_name = "authenc(hmac(md5),"
2954 					    "rfc3686(ctr(aes)))",
2955 				.cra_driver_name = "authenc-hmac-md5-"
2956 						   "rfc3686-ctr-aes-caam",
2957 				.cra_blocksize = 1,
2958 			},
2959 			.setkey = aead_setkey,
2960 			.setauthsize = aead_setauthsize,
2961 			.encrypt = aead_encrypt,
2962 			.decrypt = aead_decrypt,
2963 			.ivsize = CTR_RFC3686_IV_SIZE,
2964 			.maxauthsize = MD5_DIGEST_SIZE,
2965 		},
2966 		.caam = {
2967 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2968 					   OP_ALG_AAI_CTR_MOD128,
2969 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2970 					   OP_ALG_AAI_HMAC_PRECOMP,
2971 			.rfc3686 = true,
2972 		},
2973 	},
2974 	{
2975 		.aead = {
2976 			.base = {
2977 				.cra_name = "seqiv(authenc("
2978 					    "hmac(md5),rfc3686(ctr(aes))))",
2979 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
2980 						   "rfc3686-ctr-aes-caam",
2981 				.cra_blocksize = 1,
2982 			},
2983 			.setkey = aead_setkey,
2984 			.setauthsize = aead_setauthsize,
2985 			.encrypt = aead_encrypt,
2986 			.decrypt = aead_decrypt,
2987 			.ivsize = CTR_RFC3686_IV_SIZE,
2988 			.maxauthsize = MD5_DIGEST_SIZE,
2989 		},
2990 		.caam = {
2991 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2992 					   OP_ALG_AAI_CTR_MOD128,
2993 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2994 					   OP_ALG_AAI_HMAC_PRECOMP,
2995 			.rfc3686 = true,
2996 			.geniv = true,
2997 		},
2998 	},
2999 	{
3000 		.aead = {
3001 			.base = {
3002 				.cra_name = "authenc(hmac(sha1),"
3003 					    "rfc3686(ctr(aes)))",
3004 				.cra_driver_name = "authenc-hmac-sha1-"
3005 						   "rfc3686-ctr-aes-caam",
3006 				.cra_blocksize = 1,
3007 			},
3008 			.setkey = aead_setkey,
3009 			.setauthsize = aead_setauthsize,
3010 			.encrypt = aead_encrypt,
3011 			.decrypt = aead_decrypt,
3012 			.ivsize = CTR_RFC3686_IV_SIZE,
3013 			.maxauthsize = SHA1_DIGEST_SIZE,
3014 		},
3015 		.caam = {
3016 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3017 					   OP_ALG_AAI_CTR_MOD128,
3018 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3019 					   OP_ALG_AAI_HMAC_PRECOMP,
3020 			.rfc3686 = true,
3021 		},
3022 	},
3023 	{
3024 		.aead = {
3025 			.base = {
3026 				.cra_name = "seqiv(authenc("
3027 					    "hmac(sha1),rfc3686(ctr(aes))))",
3028 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3029 						   "rfc3686-ctr-aes-caam",
3030 				.cra_blocksize = 1,
3031 			},
3032 			.setkey = aead_setkey,
3033 			.setauthsize = aead_setauthsize,
3034 			.encrypt = aead_encrypt,
3035 			.decrypt = aead_decrypt,
3036 			.ivsize = CTR_RFC3686_IV_SIZE,
3037 			.maxauthsize = SHA1_DIGEST_SIZE,
3038 		},
3039 		.caam = {
3040 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3041 					   OP_ALG_AAI_CTR_MOD128,
3042 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3043 					   OP_ALG_AAI_HMAC_PRECOMP,
3044 			.rfc3686 = true,
3045 			.geniv = true,
3046 		},
3047 	},
3048 	{
3049 		.aead = {
3050 			.base = {
3051 				.cra_name = "authenc(hmac(sha224),"
3052 					    "rfc3686(ctr(aes)))",
3053 				.cra_driver_name = "authenc-hmac-sha224-"
3054 						   "rfc3686-ctr-aes-caam",
3055 				.cra_blocksize = 1,
3056 			},
3057 			.setkey = aead_setkey,
3058 			.setauthsize = aead_setauthsize,
3059 			.encrypt = aead_encrypt,
3060 			.decrypt = aead_decrypt,
3061 			.ivsize = CTR_RFC3686_IV_SIZE,
3062 			.maxauthsize = SHA224_DIGEST_SIZE,
3063 		},
3064 		.caam = {
3065 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3066 					   OP_ALG_AAI_CTR_MOD128,
3067 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3068 					   OP_ALG_AAI_HMAC_PRECOMP,
3069 			.rfc3686 = true,
3070 		},
3071 	},
3072 	{
3073 		.aead = {
3074 			.base = {
3075 				.cra_name = "seqiv(authenc("
3076 					    "hmac(sha224),rfc3686(ctr(aes))))",
3077 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3078 						   "rfc3686-ctr-aes-caam",
3079 				.cra_blocksize = 1,
3080 			},
3081 			.setkey = aead_setkey,
3082 			.setauthsize = aead_setauthsize,
3083 			.encrypt = aead_encrypt,
3084 			.decrypt = aead_decrypt,
3085 			.ivsize = CTR_RFC3686_IV_SIZE,
3086 			.maxauthsize = SHA224_DIGEST_SIZE,
3087 		},
3088 		.caam = {
3089 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3090 					   OP_ALG_AAI_CTR_MOD128,
3091 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3092 					   OP_ALG_AAI_HMAC_PRECOMP,
3093 			.rfc3686 = true,
3094 			.geniv = true,
3095 		},
3096 	},
3097 	{
3098 		.aead = {
3099 			.base = {
3100 				.cra_name = "authenc(hmac(sha256),"
3101 					    "rfc3686(ctr(aes)))",
3102 				.cra_driver_name = "authenc-hmac-sha256-"
3103 						   "rfc3686-ctr-aes-caam",
3104 				.cra_blocksize = 1,
3105 			},
3106 			.setkey = aead_setkey,
3107 			.setauthsize = aead_setauthsize,
3108 			.encrypt = aead_encrypt,
3109 			.decrypt = aead_decrypt,
3110 			.ivsize = CTR_RFC3686_IV_SIZE,
3111 			.maxauthsize = SHA256_DIGEST_SIZE,
3112 		},
3113 		.caam = {
3114 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3115 					   OP_ALG_AAI_CTR_MOD128,
3116 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3117 					   OP_ALG_AAI_HMAC_PRECOMP,
3118 			.rfc3686 = true,
3119 		},
3120 	},
3121 	{
3122 		.aead = {
3123 			.base = {
3124 				.cra_name = "seqiv(authenc(hmac(sha256),"
3125 					    "rfc3686(ctr(aes))))",
3126 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3127 						   "rfc3686-ctr-aes-caam",
3128 				.cra_blocksize = 1,
3129 			},
3130 			.setkey = aead_setkey,
3131 			.setauthsize = aead_setauthsize,
3132 			.encrypt = aead_encrypt,
3133 			.decrypt = aead_decrypt,
3134 			.ivsize = CTR_RFC3686_IV_SIZE,
3135 			.maxauthsize = SHA256_DIGEST_SIZE,
3136 		},
3137 		.caam = {
3138 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3139 					   OP_ALG_AAI_CTR_MOD128,
3140 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3141 					   OP_ALG_AAI_HMAC_PRECOMP,
3142 			.rfc3686 = true,
3143 			.geniv = true,
3144 		},
3145 	},
3146 	{
3147 		.aead = {
3148 			.base = {
3149 				.cra_name = "authenc(hmac(sha384),"
3150 					    "rfc3686(ctr(aes)))",
3151 				.cra_driver_name = "authenc-hmac-sha384-"
3152 						   "rfc3686-ctr-aes-caam",
3153 				.cra_blocksize = 1,
3154 			},
3155 			.setkey = aead_setkey,
3156 			.setauthsize = aead_setauthsize,
3157 			.encrypt = aead_encrypt,
3158 			.decrypt = aead_decrypt,
3159 			.ivsize = CTR_RFC3686_IV_SIZE,
3160 			.maxauthsize = SHA384_DIGEST_SIZE,
3161 		},
3162 		.caam = {
3163 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3164 					   OP_ALG_AAI_CTR_MOD128,
3165 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3166 					   OP_ALG_AAI_HMAC_PRECOMP,
3167 			.rfc3686 = true,
3168 		},
3169 	},
3170 	{
3171 		.aead = {
3172 			.base = {
3173 				.cra_name = "seqiv(authenc(hmac(sha384),"
3174 					    "rfc3686(ctr(aes))))",
3175 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3176 						   "rfc3686-ctr-aes-caam",
3177 				.cra_blocksize = 1,
3178 			},
3179 			.setkey = aead_setkey,
3180 			.setauthsize = aead_setauthsize,
3181 			.encrypt = aead_encrypt,
3182 			.decrypt = aead_decrypt,
3183 			.ivsize = CTR_RFC3686_IV_SIZE,
3184 			.maxauthsize = SHA384_DIGEST_SIZE,
3185 		},
3186 		.caam = {
3187 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3188 					   OP_ALG_AAI_CTR_MOD128,
3189 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3190 					   OP_ALG_AAI_HMAC_PRECOMP,
3191 			.rfc3686 = true,
3192 			.geniv = true,
3193 		},
3194 	},
3195 	{
3196 		.aead = {
3197 			.base = {
3198 				.cra_name = "authenc(hmac(sha512),"
3199 					    "rfc3686(ctr(aes)))",
3200 				.cra_driver_name = "authenc-hmac-sha512-"
3201 						   "rfc3686-ctr-aes-caam",
3202 				.cra_blocksize = 1,
3203 			},
3204 			.setkey = aead_setkey,
3205 			.setauthsize = aead_setauthsize,
3206 			.encrypt = aead_encrypt,
3207 			.decrypt = aead_decrypt,
3208 			.ivsize = CTR_RFC3686_IV_SIZE,
3209 			.maxauthsize = SHA512_DIGEST_SIZE,
3210 		},
3211 		.caam = {
3212 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3213 					   OP_ALG_AAI_CTR_MOD128,
3214 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3215 					   OP_ALG_AAI_HMAC_PRECOMP,
3216 			.rfc3686 = true,
3217 		},
3218 	},
3219 	{
3220 		.aead = {
3221 			.base = {
3222 				.cra_name = "seqiv(authenc(hmac(sha512),"
3223 					    "rfc3686(ctr(aes))))",
3224 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3225 						   "rfc3686-ctr-aes-caam",
3226 				.cra_blocksize = 1,
3227 			},
3228 			.setkey = aead_setkey,
3229 			.setauthsize = aead_setauthsize,
3230 			.encrypt = aead_encrypt,
3231 			.decrypt = aead_decrypt,
3232 			.ivsize = CTR_RFC3686_IV_SIZE,
3233 			.maxauthsize = SHA512_DIGEST_SIZE,
3234 		},
3235 		.caam = {
3236 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3237 					   OP_ALG_AAI_CTR_MOD128,
3238 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3239 					   OP_ALG_AAI_HMAC_PRECOMP,
3240 			.rfc3686 = true,
3241 			.geniv = true,
3242 		},
3243 	},
3244 };
3245 
3246 struct caam_crypto_alg {
3247 	struct crypto_alg crypto_alg;
3248 	struct list_head entry;
3249 	struct caam_alg_entry caam;
3250 };
3251 
3252 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3253 {
3254 	dma_addr_t dma_addr;
3255 
3256 	ctx->jrdev = caam_jr_alloc();
3257 	if (IS_ERR(ctx->jrdev)) {
3258 		pr_err("Job Ring Device allocation for transform failed\n");
3259 		return PTR_ERR(ctx->jrdev);
3260 	}
3261 
3262 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3263 					offsetof(struct caam_ctx,
3264 						 sh_desc_enc_dma),
3265 					DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3266 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3267 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3268 		caam_jr_free(ctx->jrdev);
3269 		return -ENOMEM;
3270 	}
3271 
3272 	ctx->sh_desc_enc_dma = dma_addr;
3273 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3274 						   sh_desc_dec);
3275 	ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3276 						      sh_desc_givenc);
3277 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3278 
3279 	/* copy descriptor header template value */
3280 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3281 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3282 
3283 	return 0;
3284 }
3285 
3286 static int caam_cra_init(struct crypto_tfm *tfm)
3287 {
3288 	struct crypto_alg *alg = tfm->__crt_alg;
3289 	struct caam_crypto_alg *caam_alg =
3290 		 container_of(alg, struct caam_crypto_alg, crypto_alg);
3291 	struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3292 
3293 	return caam_init_common(ctx, &caam_alg->caam);
3294 }
3295 
3296 static int caam_aead_init(struct crypto_aead *tfm)
3297 {
3298 	struct aead_alg *alg = crypto_aead_alg(tfm);
3299 	struct caam_aead_alg *caam_alg =
3300 		 container_of(alg, struct caam_aead_alg, aead);
3301 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3302 
3303 	return caam_init_common(ctx, &caam_alg->caam);
3304 }
3305 
3306 static void caam_exit_common(struct caam_ctx *ctx)
3307 {
3308 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3309 			       offsetof(struct caam_ctx, sh_desc_enc_dma),
3310 			       DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3311 	caam_jr_free(ctx->jrdev);
3312 }
3313 
3314 static void caam_cra_exit(struct crypto_tfm *tfm)
3315 {
3316 	caam_exit_common(crypto_tfm_ctx(tfm));
3317 }
3318 
3319 static void caam_aead_exit(struct crypto_aead *tfm)
3320 {
3321 	caam_exit_common(crypto_aead_ctx(tfm));
3322 }
3323 
3324 static void __exit caam_algapi_exit(void)
3325 {
3326 
3327 	struct caam_crypto_alg *t_alg, *n;
3328 	int i;
3329 
3330 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3331 		struct caam_aead_alg *t_alg = driver_aeads + i;
3332 
3333 		if (t_alg->registered)
3334 			crypto_unregister_aead(&t_alg->aead);
3335 	}
3336 
3337 	if (!alg_list.next)
3338 		return;
3339 
3340 	list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3341 		crypto_unregister_alg(&t_alg->crypto_alg);
3342 		list_del(&t_alg->entry);
3343 		kfree(t_alg);
3344 	}
3345 }
3346 
3347 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3348 					      *template)
3349 {
3350 	struct caam_crypto_alg *t_alg;
3351 	struct crypto_alg *alg;
3352 
3353 	t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3354 	if (!t_alg) {
3355 		pr_err("failed to allocate t_alg\n");
3356 		return ERR_PTR(-ENOMEM);
3357 	}
3358 
3359 	alg = &t_alg->crypto_alg;
3360 
3361 	snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3362 	snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3363 		 template->driver_name);
3364 	alg->cra_module = THIS_MODULE;
3365 	alg->cra_init = caam_cra_init;
3366 	alg->cra_exit = caam_cra_exit;
3367 	alg->cra_priority = CAAM_CRA_PRIORITY;
3368 	alg->cra_blocksize = template->blocksize;
3369 	alg->cra_alignmask = 0;
3370 	alg->cra_ctxsize = sizeof(struct caam_ctx);
3371 	alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3372 			 template->type;
3373 	switch (template->type) {
3374 	case CRYPTO_ALG_TYPE_GIVCIPHER:
3375 		alg->cra_type = &crypto_givcipher_type;
3376 		alg->cra_ablkcipher = template->template_ablkcipher;
3377 		break;
3378 	case CRYPTO_ALG_TYPE_ABLKCIPHER:
3379 		alg->cra_type = &crypto_ablkcipher_type;
3380 		alg->cra_ablkcipher = template->template_ablkcipher;
3381 		break;
3382 	}
3383 
3384 	t_alg->caam.class1_alg_type = template->class1_alg_type;
3385 	t_alg->caam.class2_alg_type = template->class2_alg_type;
3386 
3387 	return t_alg;
3388 }
3389 
3390 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3391 {
3392 	struct aead_alg *alg = &t_alg->aead;
3393 
3394 	alg->base.cra_module = THIS_MODULE;
3395 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3396 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3397 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3398 
3399 	alg->init = caam_aead_init;
3400 	alg->exit = caam_aead_exit;
3401 }
3402 
3403 static int __init caam_algapi_init(void)
3404 {
3405 	struct device_node *dev_node;
3406 	struct platform_device *pdev;
3407 	struct device *ctrldev;
3408 	struct caam_drv_private *priv;
3409 	int i = 0, err = 0;
3410 	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3411 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3412 	bool registered = false;
3413 
3414 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3415 	if (!dev_node) {
3416 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3417 		if (!dev_node)
3418 			return -ENODEV;
3419 	}
3420 
3421 	pdev = of_find_device_by_node(dev_node);
3422 	if (!pdev) {
3423 		of_node_put(dev_node);
3424 		return -ENODEV;
3425 	}
3426 
3427 	ctrldev = &pdev->dev;
3428 	priv = dev_get_drvdata(ctrldev);
3429 	of_node_put(dev_node);
3430 
3431 	/*
3432 	 * If priv is NULL, it's probably because the caam driver wasn't
3433 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3434 	 */
3435 	if (!priv)
3436 		return -ENODEV;
3437 
3438 
3439 	INIT_LIST_HEAD(&alg_list);
3440 
3441 	/*
3442 	 * Register crypto algorithms the device supports.
3443 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3444 	 */
3445 	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3446 	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3447 	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3448 	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3449 	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3450 
3451 	/* If MD is present, limit digest size based on LP256 */
3452 	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3453 		md_limit = SHA256_DIGEST_SIZE;
3454 
3455 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3456 		struct caam_crypto_alg *t_alg;
3457 		struct caam_alg_template *alg = driver_algs + i;
3458 		u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3459 
3460 		/* Skip DES algorithms if not supported by device */
3461 		if (!des_inst &&
3462 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3463 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3464 				continue;
3465 
3466 		/* Skip AES algorithms if not supported by device */
3467 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3468 				continue;
3469 
3470 		/*
3471 		 * Check support for AES modes not available
3472 		 * on LP devices.
3473 		 */
3474 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3475 			if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3476 			     OP_ALG_AAI_XTS)
3477 				continue;
3478 
3479 		t_alg = caam_alg_alloc(alg);
3480 		if (IS_ERR(t_alg)) {
3481 			err = PTR_ERR(t_alg);
3482 			pr_warn("%s alg allocation failed\n", alg->driver_name);
3483 			continue;
3484 		}
3485 
3486 		err = crypto_register_alg(&t_alg->crypto_alg);
3487 		if (err) {
3488 			pr_warn("%s alg registration failed\n",
3489 				t_alg->crypto_alg.cra_driver_name);
3490 			kfree(t_alg);
3491 			continue;
3492 		}
3493 
3494 		list_add_tail(&t_alg->entry, &alg_list);
3495 		registered = true;
3496 	}
3497 
3498 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3499 		struct caam_aead_alg *t_alg = driver_aeads + i;
3500 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3501 				 OP_ALG_ALGSEL_MASK;
3502 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3503 				 OP_ALG_ALGSEL_MASK;
3504 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3505 
3506 		/* Skip DES algorithms if not supported by device */
3507 		if (!des_inst &&
3508 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3509 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3510 				continue;
3511 
3512 		/* Skip AES algorithms if not supported by device */
3513 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3514 				continue;
3515 
3516 		/*
3517 		 * Check support for AES algorithms not available
3518 		 * on LP devices.
3519 		 */
3520 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3521 			if (alg_aai == OP_ALG_AAI_GCM)
3522 				continue;
3523 
3524 		/*
3525 		 * Skip algorithms requiring message digests
3526 		 * if MD or MD size is not supported by device.
3527 		 */
3528 		if (c2_alg_sel &&
3529 		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3530 				continue;
3531 
3532 		caam_aead_alg_init(t_alg);
3533 
3534 		err = crypto_register_aead(&t_alg->aead);
3535 		if (err) {
3536 			pr_warn("%s alg registration failed\n",
3537 				t_alg->aead.base.cra_driver_name);
3538 			continue;
3539 		}
3540 
3541 		t_alg->registered = true;
3542 		registered = true;
3543 	}
3544 
3545 	if (registered)
3546 		pr_info("caam algorithms registered in /proc/crypto\n");
3547 
3548 	return err;
3549 }
3550 
3551 module_init(caam_algapi_init);
3552 module_exit(caam_algapi_exit);
3553 
3554 MODULE_LICENSE("GPL");
3555 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3556 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
3557