xref: /openbmc/linux/drivers/crypto/caam/caamalg.c (revision 5d0e4d78)
1 /*
2  * caam - Freescale FSL CAAM support for crypto API
3  *
4  * Copyright 2008-2011 Freescale Semiconductor, Inc.
5  * Copyright 2016 NXP
6  *
7  * Based on talitos crypto API driver.
8  *
9  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
10  *
11  * ---------------                     ---------------
12  * | JobDesc #1  |-------------------->|  ShareDesc  |
13  * | *(packet 1) |                     |   (PDB)     |
14  * ---------------      |------------->|  (hashKey)  |
15  *       .              |              | (cipherKey) |
16  *       .              |    |-------->| (operation) |
17  * ---------------      |    |         ---------------
18  * | JobDesc #2  |------|    |
19  * | *(packet 2) |           |
20  * ---------------           |
21  *       .                   |
22  *       .                   |
23  * ---------------           |
24  * | JobDesc #3  |------------
25  * | *(packet 3) |
26  * ---------------
27  *
28  * The SharedDesc never changes for a connection unless rekeyed, but
29  * each packet will likely be in a different place. So all we need
30  * to know to process the packet is where the input is, where the
31  * output goes, and what context we want to process with. Context is
32  * in the SharedDesc, packet references in the JobDesc.
33  *
34  * So, a job desc looks like:
35  *
36  * ---------------------
37  * | Header            |
38  * | ShareDesc Pointer |
39  * | SEQ_OUT_PTR       |
40  * | (output buffer)   |
41  * | (output length)   |
42  * | SEQ_IN_PTR        |
43  * | (input buffer)    |
44  * | (input length)    |
45  * ---------------------
46  */
47 
48 #include "compat.h"
49 
50 #include "regs.h"
51 #include "intern.h"
52 #include "desc_constr.h"
53 #include "jr.h"
54 #include "error.h"
55 #include "sg_sw_sec4.h"
56 #include "key_gen.h"
57 #include "caamalg_desc.h"
58 
59 /*
60  * crypto alg
61  */
62 #define CAAM_CRA_PRIORITY		3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
65 					 CTR_RFC3686_NONCE_SIZE + \
66 					 SHA512_DIGEST_SIZE * 2)
67 
68 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
70 					 CAAM_CMD_SZ * 4)
71 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
72 					 CAAM_CMD_SZ * 5)
73 
74 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
76 
77 #ifdef DEBUG
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
80 #else
81 #define debug(format, arg...)
82 #endif
83 
84 #ifdef DEBUG
85 #include <linux/highmem.h>
86 
87 static void dbg_dump_sg(const char *level, const char *prefix_str,
88 			int prefix_type, int rowsize, int groupsize,
89 			struct scatterlist *sg, size_t tlen, bool ascii)
90 {
91 	struct scatterlist *it;
92 	void *it_page;
93 	size_t len;
94 	void *buf;
95 
96 	for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
97 		/*
98 		 * make sure the scatterlist's page
99 		 * has a valid virtual memory mapping
100 		 */
101 		it_page = kmap_atomic(sg_page(it));
102 		if (unlikely(!it_page)) {
103 			printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
104 			return;
105 		}
106 
107 		buf = it_page + it->offset;
108 		len = min_t(size_t, tlen, it->length);
109 		print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 			       groupsize, buf, len, ascii);
111 		tlen -= len;
112 
113 		kunmap_atomic(it_page);
114 	}
115 }
116 #endif
117 
118 static struct list_head alg_list;
119 
120 struct caam_alg_entry {
121 	int class1_alg_type;
122 	int class2_alg_type;
123 	bool rfc3686;
124 	bool geniv;
125 };
126 
127 struct caam_aead_alg {
128 	struct aead_alg aead;
129 	struct caam_alg_entry caam;
130 	bool registered;
131 };
132 
133 /*
134  * per-session context
135  */
136 struct caam_ctx {
137 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
138 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
139 	u32 sh_desc_givenc[DESC_MAX_USED_LEN];
140 	u8 key[CAAM_MAX_KEY_SIZE];
141 	dma_addr_t sh_desc_enc_dma;
142 	dma_addr_t sh_desc_dec_dma;
143 	dma_addr_t sh_desc_givenc_dma;
144 	dma_addr_t key_dma;
145 	struct device *jrdev;
146 	struct alginfo adata;
147 	struct alginfo cdata;
148 	unsigned int authsize;
149 };
150 
151 static int aead_null_set_sh_desc(struct crypto_aead *aead)
152 {
153 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 	struct device *jrdev = ctx->jrdev;
155 	u32 *desc;
156 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 			ctx->adata.keylen_pad;
158 
159 	/*
160 	 * Job Descriptor and Shared Descriptors
161 	 * must all fit into the 64-word Descriptor h/w Buffer
162 	 */
163 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
164 		ctx->adata.key_inline = true;
165 		ctx->adata.key_virt = ctx->key;
166 	} else {
167 		ctx->adata.key_inline = false;
168 		ctx->adata.key_dma = ctx->key_dma;
169 	}
170 
171 	/* aead_encrypt shared descriptor */
172 	desc = ctx->sh_desc_enc;
173 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
174 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
175 				   desc_bytes(desc), DMA_TO_DEVICE);
176 
177 	/*
178 	 * Job Descriptor and Shared Descriptors
179 	 * must all fit into the 64-word Descriptor h/w Buffer
180 	 */
181 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
182 		ctx->adata.key_inline = true;
183 		ctx->adata.key_virt = ctx->key;
184 	} else {
185 		ctx->adata.key_inline = false;
186 		ctx->adata.key_dma = ctx->key_dma;
187 	}
188 
189 	/* aead_decrypt shared descriptor */
190 	desc = ctx->sh_desc_dec;
191 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
192 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
193 				   desc_bytes(desc), DMA_TO_DEVICE);
194 
195 	return 0;
196 }
197 
198 static int aead_set_sh_desc(struct crypto_aead *aead)
199 {
200 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
201 						 struct caam_aead_alg, aead);
202 	unsigned int ivsize = crypto_aead_ivsize(aead);
203 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
204 	struct device *jrdev = ctx->jrdev;
205 	u32 ctx1_iv_off = 0;
206 	u32 *desc, *nonce = NULL;
207 	u32 inl_mask;
208 	unsigned int data_len[2];
209 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
210 			       OP_ALG_AAI_CTR_MOD128);
211 	const bool is_rfc3686 = alg->caam.rfc3686;
212 
213 	if (!ctx->authsize)
214 		return 0;
215 
216 	/* NULL encryption / decryption */
217 	if (!ctx->cdata.keylen)
218 		return aead_null_set_sh_desc(aead);
219 
220 	/*
221 	 * AES-CTR needs to load IV in CONTEXT1 reg
222 	 * at an offset of 128bits (16bytes)
223 	 * CONTEXT1[255:128] = IV
224 	 */
225 	if (ctr_mode)
226 		ctx1_iv_off = 16;
227 
228 	/*
229 	 * RFC3686 specific:
230 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
231 	 */
232 	if (is_rfc3686) {
233 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
234 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
235 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
236 	}
237 
238 	data_len[0] = ctx->adata.keylen_pad;
239 	data_len[1] = ctx->cdata.keylen;
240 
241 	if (alg->caam.geniv)
242 		goto skip_enc;
243 
244 	/*
245 	 * Job Descriptor and Shared Descriptors
246 	 * must all fit into the 64-word Descriptor h/w Buffer
247 	 */
248 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
249 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
250 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
251 			      ARRAY_SIZE(data_len)) < 0)
252 		return -EINVAL;
253 
254 	if (inl_mask & 1)
255 		ctx->adata.key_virt = ctx->key;
256 	else
257 		ctx->adata.key_dma = ctx->key_dma;
258 
259 	if (inl_mask & 2)
260 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
261 	else
262 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
263 
264 	ctx->adata.key_inline = !!(inl_mask & 1);
265 	ctx->cdata.key_inline = !!(inl_mask & 2);
266 
267 	/* aead_encrypt shared descriptor */
268 	desc = ctx->sh_desc_enc;
269 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
270 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
271 			       false);
272 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
273 				   desc_bytes(desc), DMA_TO_DEVICE);
274 
275 skip_enc:
276 	/*
277 	 * Job Descriptor and Shared Descriptors
278 	 * must all fit into the 64-word Descriptor h/w Buffer
279 	 */
280 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
281 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
282 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
283 			      ARRAY_SIZE(data_len)) < 0)
284 		return -EINVAL;
285 
286 	if (inl_mask & 1)
287 		ctx->adata.key_virt = ctx->key;
288 	else
289 		ctx->adata.key_dma = ctx->key_dma;
290 
291 	if (inl_mask & 2)
292 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
293 	else
294 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
295 
296 	ctx->adata.key_inline = !!(inl_mask & 1);
297 	ctx->cdata.key_inline = !!(inl_mask & 2);
298 
299 	/* aead_decrypt shared descriptor */
300 	desc = ctx->sh_desc_dec;
301 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
302 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
303 			       nonce, ctx1_iv_off, false);
304 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
305 				   desc_bytes(desc), DMA_TO_DEVICE);
306 
307 	if (!alg->caam.geniv)
308 		goto skip_givenc;
309 
310 	/*
311 	 * Job Descriptor and Shared Descriptors
312 	 * must all fit into the 64-word Descriptor h/w Buffer
313 	 */
314 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
315 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
316 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
317 			      ARRAY_SIZE(data_len)) < 0)
318 		return -EINVAL;
319 
320 	if (inl_mask & 1)
321 		ctx->adata.key_virt = ctx->key;
322 	else
323 		ctx->adata.key_dma = ctx->key_dma;
324 
325 	if (inl_mask & 2)
326 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
327 	else
328 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
329 
330 	ctx->adata.key_inline = !!(inl_mask & 1);
331 	ctx->cdata.key_inline = !!(inl_mask & 2);
332 
333 	/* aead_givencrypt shared descriptor */
334 	desc = ctx->sh_desc_enc;
335 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
336 				  ctx->authsize, is_rfc3686, nonce,
337 				  ctx1_iv_off, false);
338 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
339 				   desc_bytes(desc), DMA_TO_DEVICE);
340 
341 skip_givenc:
342 	return 0;
343 }
344 
345 static int aead_setauthsize(struct crypto_aead *authenc,
346 				    unsigned int authsize)
347 {
348 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
349 
350 	ctx->authsize = authsize;
351 	aead_set_sh_desc(authenc);
352 
353 	return 0;
354 }
355 
356 static int gcm_set_sh_desc(struct crypto_aead *aead)
357 {
358 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
359 	struct device *jrdev = ctx->jrdev;
360 	u32 *desc;
361 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
362 			ctx->cdata.keylen;
363 
364 	if (!ctx->cdata.keylen || !ctx->authsize)
365 		return 0;
366 
367 	/*
368 	 * AES GCM encrypt shared descriptor
369 	 * Job Descriptor and Shared Descriptor
370 	 * must fit into the 64-word Descriptor h/w Buffer
371 	 */
372 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
373 		ctx->cdata.key_inline = true;
374 		ctx->cdata.key_virt = ctx->key;
375 	} else {
376 		ctx->cdata.key_inline = false;
377 		ctx->cdata.key_dma = ctx->key_dma;
378 	}
379 
380 	desc = ctx->sh_desc_enc;
381 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
382 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
383 				   desc_bytes(desc), DMA_TO_DEVICE);
384 
385 	/*
386 	 * Job Descriptor and Shared Descriptors
387 	 * must all fit into the 64-word Descriptor h/w Buffer
388 	 */
389 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
390 		ctx->cdata.key_inline = true;
391 		ctx->cdata.key_virt = ctx->key;
392 	} else {
393 		ctx->cdata.key_inline = false;
394 		ctx->cdata.key_dma = ctx->key_dma;
395 	}
396 
397 	desc = ctx->sh_desc_dec;
398 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
399 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
400 				   desc_bytes(desc), DMA_TO_DEVICE);
401 
402 	return 0;
403 }
404 
405 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
406 {
407 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
408 
409 	ctx->authsize = authsize;
410 	gcm_set_sh_desc(authenc);
411 
412 	return 0;
413 }
414 
415 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
416 {
417 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
418 	struct device *jrdev = ctx->jrdev;
419 	u32 *desc;
420 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
421 			ctx->cdata.keylen;
422 
423 	if (!ctx->cdata.keylen || !ctx->authsize)
424 		return 0;
425 
426 	/*
427 	 * RFC4106 encrypt shared descriptor
428 	 * Job Descriptor and Shared Descriptor
429 	 * must fit into the 64-word Descriptor h/w Buffer
430 	 */
431 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
432 		ctx->cdata.key_inline = true;
433 		ctx->cdata.key_virt = ctx->key;
434 	} else {
435 		ctx->cdata.key_inline = false;
436 		ctx->cdata.key_dma = ctx->key_dma;
437 	}
438 
439 	desc = ctx->sh_desc_enc;
440 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
441 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
442 				   desc_bytes(desc), DMA_TO_DEVICE);
443 
444 	/*
445 	 * Job Descriptor and Shared Descriptors
446 	 * must all fit into the 64-word Descriptor h/w Buffer
447 	 */
448 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
449 		ctx->cdata.key_inline = true;
450 		ctx->cdata.key_virt = ctx->key;
451 	} else {
452 		ctx->cdata.key_inline = false;
453 		ctx->cdata.key_dma = ctx->key_dma;
454 	}
455 
456 	desc = ctx->sh_desc_dec;
457 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
458 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
459 				   desc_bytes(desc), DMA_TO_DEVICE);
460 
461 	return 0;
462 }
463 
464 static int rfc4106_setauthsize(struct crypto_aead *authenc,
465 			       unsigned int authsize)
466 {
467 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
468 
469 	ctx->authsize = authsize;
470 	rfc4106_set_sh_desc(authenc);
471 
472 	return 0;
473 }
474 
475 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
476 {
477 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
478 	struct device *jrdev = ctx->jrdev;
479 	u32 *desc;
480 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
481 			ctx->cdata.keylen;
482 
483 	if (!ctx->cdata.keylen || !ctx->authsize)
484 		return 0;
485 
486 	/*
487 	 * RFC4543 encrypt shared descriptor
488 	 * Job Descriptor and Shared Descriptor
489 	 * must fit into the 64-word Descriptor h/w Buffer
490 	 */
491 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
492 		ctx->cdata.key_inline = true;
493 		ctx->cdata.key_virt = ctx->key;
494 	} else {
495 		ctx->cdata.key_inline = false;
496 		ctx->cdata.key_dma = ctx->key_dma;
497 	}
498 
499 	desc = ctx->sh_desc_enc;
500 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
501 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
502 				   desc_bytes(desc), DMA_TO_DEVICE);
503 
504 	/*
505 	 * Job Descriptor and Shared Descriptors
506 	 * must all fit into the 64-word Descriptor h/w Buffer
507 	 */
508 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
509 		ctx->cdata.key_inline = true;
510 		ctx->cdata.key_virt = ctx->key;
511 	} else {
512 		ctx->cdata.key_inline = false;
513 		ctx->cdata.key_dma = ctx->key_dma;
514 	}
515 
516 	desc = ctx->sh_desc_dec;
517 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
518 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
519 				   desc_bytes(desc), DMA_TO_DEVICE);
520 
521 	return 0;
522 }
523 
524 static int rfc4543_setauthsize(struct crypto_aead *authenc,
525 			       unsigned int authsize)
526 {
527 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
528 
529 	ctx->authsize = authsize;
530 	rfc4543_set_sh_desc(authenc);
531 
532 	return 0;
533 }
534 
535 static int aead_setkey(struct crypto_aead *aead,
536 			       const u8 *key, unsigned int keylen)
537 {
538 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
539 	struct device *jrdev = ctx->jrdev;
540 	struct crypto_authenc_keys keys;
541 	int ret = 0;
542 
543 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
544 		goto badkey;
545 
546 #ifdef DEBUG
547 	printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
548 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
549 	       keys.authkeylen);
550 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
551 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
552 #endif
553 
554 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
555 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
556 			    keys.enckeylen);
557 	if (ret) {
558 		goto badkey;
559 	}
560 
561 	/* postpend encryption key to auth split key */
562 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
563 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
564 				   keys.enckeylen, DMA_TO_DEVICE);
565 #ifdef DEBUG
566 	print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
567 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
568 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
569 #endif
570 	ctx->cdata.keylen = keys.enckeylen;
571 	return aead_set_sh_desc(aead);
572 badkey:
573 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
574 	return -EINVAL;
575 }
576 
577 static int gcm_setkey(struct crypto_aead *aead,
578 		      const u8 *key, unsigned int keylen)
579 {
580 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
581 	struct device *jrdev = ctx->jrdev;
582 
583 #ifdef DEBUG
584 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
585 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
586 #endif
587 
588 	memcpy(ctx->key, key, keylen);
589 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
590 	ctx->cdata.keylen = keylen;
591 
592 	return gcm_set_sh_desc(aead);
593 }
594 
595 static int rfc4106_setkey(struct crypto_aead *aead,
596 			  const u8 *key, unsigned int keylen)
597 {
598 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
599 	struct device *jrdev = ctx->jrdev;
600 
601 	if (keylen < 4)
602 		return -EINVAL;
603 
604 #ifdef DEBUG
605 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
606 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
607 #endif
608 
609 	memcpy(ctx->key, key, keylen);
610 
611 	/*
612 	 * The last four bytes of the key material are used as the salt value
613 	 * in the nonce. Update the AES key length.
614 	 */
615 	ctx->cdata.keylen = keylen - 4;
616 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
617 				   DMA_TO_DEVICE);
618 	return rfc4106_set_sh_desc(aead);
619 }
620 
621 static int rfc4543_setkey(struct crypto_aead *aead,
622 			  const u8 *key, unsigned int keylen)
623 {
624 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
625 	struct device *jrdev = ctx->jrdev;
626 
627 	if (keylen < 4)
628 		return -EINVAL;
629 
630 #ifdef DEBUG
631 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
632 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
633 #endif
634 
635 	memcpy(ctx->key, key, keylen);
636 
637 	/*
638 	 * The last four bytes of the key material are used as the salt value
639 	 * in the nonce. Update the AES key length.
640 	 */
641 	ctx->cdata.keylen = keylen - 4;
642 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
643 				   DMA_TO_DEVICE);
644 	return rfc4543_set_sh_desc(aead);
645 }
646 
647 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
648 			     const u8 *key, unsigned int keylen)
649 {
650 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
651 	struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
652 	const char *alg_name = crypto_tfm_alg_name(tfm);
653 	struct device *jrdev = ctx->jrdev;
654 	unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
655 	u32 *desc;
656 	u32 ctx1_iv_off = 0;
657 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
658 			       OP_ALG_AAI_CTR_MOD128);
659 	const bool is_rfc3686 = (ctr_mode &&
660 				 (strstr(alg_name, "rfc3686") != NULL));
661 
662 	memcpy(ctx->key, key, keylen);
663 #ifdef DEBUG
664 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
665 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
666 #endif
667 	/*
668 	 * AES-CTR needs to load IV in CONTEXT1 reg
669 	 * at an offset of 128bits (16bytes)
670 	 * CONTEXT1[255:128] = IV
671 	 */
672 	if (ctr_mode)
673 		ctx1_iv_off = 16;
674 
675 	/*
676 	 * RFC3686 specific:
677 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
678 	 *	| *key = {KEY, NONCE}
679 	 */
680 	if (is_rfc3686) {
681 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
682 		keylen -= CTR_RFC3686_NONCE_SIZE;
683 	}
684 
685 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
686 	ctx->cdata.keylen = keylen;
687 	ctx->cdata.key_virt = ctx->key;
688 	ctx->cdata.key_inline = true;
689 
690 	/* ablkcipher_encrypt shared descriptor */
691 	desc = ctx->sh_desc_enc;
692 	cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
693 				     ctx1_iv_off);
694 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
695 				   desc_bytes(desc), DMA_TO_DEVICE);
696 
697 	/* ablkcipher_decrypt shared descriptor */
698 	desc = ctx->sh_desc_dec;
699 	cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
700 				     ctx1_iv_off);
701 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
702 				   desc_bytes(desc), DMA_TO_DEVICE);
703 
704 	/* ablkcipher_givencrypt shared descriptor */
705 	desc = ctx->sh_desc_givenc;
706 	cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
707 					ctx1_iv_off);
708 	dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
709 				   desc_bytes(desc), DMA_TO_DEVICE);
710 
711 	return 0;
712 }
713 
714 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
715 				 const u8 *key, unsigned int keylen)
716 {
717 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
718 	struct device *jrdev = ctx->jrdev;
719 	u32 *desc;
720 
721 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
722 		crypto_ablkcipher_set_flags(ablkcipher,
723 					    CRYPTO_TFM_RES_BAD_KEY_LEN);
724 		dev_err(jrdev, "key size mismatch\n");
725 		return -EINVAL;
726 	}
727 
728 	memcpy(ctx->key, key, keylen);
729 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
730 	ctx->cdata.keylen = keylen;
731 	ctx->cdata.key_virt = ctx->key;
732 	ctx->cdata.key_inline = true;
733 
734 	/* xts_ablkcipher_encrypt shared descriptor */
735 	desc = ctx->sh_desc_enc;
736 	cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
737 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
738 				   desc_bytes(desc), DMA_TO_DEVICE);
739 
740 	/* xts_ablkcipher_decrypt shared descriptor */
741 	desc = ctx->sh_desc_dec;
742 	cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
743 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
744 				   desc_bytes(desc), DMA_TO_DEVICE);
745 
746 	return 0;
747 }
748 
749 /*
750  * aead_edesc - s/w-extended aead descriptor
751  * @src_nents: number of segments in input s/w scatterlist
752  * @dst_nents: number of segments in output s/w scatterlist
753  * @sec4_sg_bytes: length of dma mapped sec4_sg space
754  * @sec4_sg_dma: bus physical mapped address of h/w link table
755  * @sec4_sg: pointer to h/w link table
756  * @hw_desc: the h/w job descriptor followed by any referenced link tables
757  */
758 struct aead_edesc {
759 	int src_nents;
760 	int dst_nents;
761 	int sec4_sg_bytes;
762 	dma_addr_t sec4_sg_dma;
763 	struct sec4_sg_entry *sec4_sg;
764 	u32 hw_desc[];
765 };
766 
767 /*
768  * ablkcipher_edesc - s/w-extended ablkcipher descriptor
769  * @src_nents: number of segments in input s/w scatterlist
770  * @dst_nents: number of segments in output s/w scatterlist
771  * @iv_dma: dma address of iv for checking continuity and link table
772  * @sec4_sg_bytes: length of dma mapped sec4_sg space
773  * @sec4_sg_dma: bus physical mapped address of h/w link table
774  * @sec4_sg: pointer to h/w link table
775  * @hw_desc: the h/w job descriptor followed by any referenced link tables
776  */
777 struct ablkcipher_edesc {
778 	int src_nents;
779 	int dst_nents;
780 	dma_addr_t iv_dma;
781 	int sec4_sg_bytes;
782 	dma_addr_t sec4_sg_dma;
783 	struct sec4_sg_entry *sec4_sg;
784 	u32 hw_desc[0];
785 };
786 
787 static void caam_unmap(struct device *dev, struct scatterlist *src,
788 		       struct scatterlist *dst, int src_nents,
789 		       int dst_nents,
790 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
791 		       int sec4_sg_bytes)
792 {
793 	if (dst != src) {
794 		if (src_nents)
795 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
796 		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
797 	} else {
798 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
799 	}
800 
801 	if (iv_dma)
802 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
803 	if (sec4_sg_bytes)
804 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
805 				 DMA_TO_DEVICE);
806 }
807 
808 static void aead_unmap(struct device *dev,
809 		       struct aead_edesc *edesc,
810 		       struct aead_request *req)
811 {
812 	caam_unmap(dev, req->src, req->dst,
813 		   edesc->src_nents, edesc->dst_nents, 0, 0,
814 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
815 }
816 
817 static void ablkcipher_unmap(struct device *dev,
818 			     struct ablkcipher_edesc *edesc,
819 			     struct ablkcipher_request *req)
820 {
821 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
822 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
823 
824 	caam_unmap(dev, req->src, req->dst,
825 		   edesc->src_nents, edesc->dst_nents,
826 		   edesc->iv_dma, ivsize,
827 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
828 }
829 
830 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
831 				   void *context)
832 {
833 	struct aead_request *req = context;
834 	struct aead_edesc *edesc;
835 
836 #ifdef DEBUG
837 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
838 #endif
839 
840 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
841 
842 	if (err)
843 		caam_jr_strstatus(jrdev, err);
844 
845 	aead_unmap(jrdev, edesc, req);
846 
847 	kfree(edesc);
848 
849 	aead_request_complete(req, err);
850 }
851 
852 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
853 				   void *context)
854 {
855 	struct aead_request *req = context;
856 	struct aead_edesc *edesc;
857 
858 #ifdef DEBUG
859 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
860 #endif
861 
862 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
863 
864 	if (err)
865 		caam_jr_strstatus(jrdev, err);
866 
867 	aead_unmap(jrdev, edesc, req);
868 
869 	/*
870 	 * verify hw auth check passed else return -EBADMSG
871 	 */
872 	if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
873 		err = -EBADMSG;
874 
875 	kfree(edesc);
876 
877 	aead_request_complete(req, err);
878 }
879 
880 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
881 				   void *context)
882 {
883 	struct ablkcipher_request *req = context;
884 	struct ablkcipher_edesc *edesc;
885 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
886 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
887 
888 #ifdef DEBUG
889 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
890 #endif
891 
892 	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
893 
894 	if (err)
895 		caam_jr_strstatus(jrdev, err);
896 
897 #ifdef DEBUG
898 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
899 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
900 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
901 	dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
902 		    DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
903 		    edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
904 #endif
905 
906 	ablkcipher_unmap(jrdev, edesc, req);
907 
908 	/*
909 	 * The crypto API expects us to set the IV (req->info) to the last
910 	 * ciphertext block. This is used e.g. by the CTS mode.
911 	 */
912 	scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize,
913 				 ivsize, 0);
914 
915 	kfree(edesc);
916 
917 	ablkcipher_request_complete(req, err);
918 }
919 
920 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
921 				    void *context)
922 {
923 	struct ablkcipher_request *req = context;
924 	struct ablkcipher_edesc *edesc;
925 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
926 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
927 
928 #ifdef DEBUG
929 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
930 #endif
931 
932 	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
933 	if (err)
934 		caam_jr_strstatus(jrdev, err);
935 
936 #ifdef DEBUG
937 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
938 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
939 		       ivsize, 1);
940 	dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
941 		    DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
942 		    edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
943 #endif
944 
945 	ablkcipher_unmap(jrdev, edesc, req);
946 
947 	/*
948 	 * The crypto API expects us to set the IV (req->info) to the last
949 	 * ciphertext block.
950 	 */
951 	scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize,
952 				 ivsize, 0);
953 
954 	kfree(edesc);
955 
956 	ablkcipher_request_complete(req, err);
957 }
958 
959 /*
960  * Fill in aead job descriptor
961  */
962 static void init_aead_job(struct aead_request *req,
963 			  struct aead_edesc *edesc,
964 			  bool all_contig, bool encrypt)
965 {
966 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
967 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
968 	int authsize = ctx->authsize;
969 	u32 *desc = edesc->hw_desc;
970 	u32 out_options, in_options;
971 	dma_addr_t dst_dma, src_dma;
972 	int len, sec4_sg_index = 0;
973 	dma_addr_t ptr;
974 	u32 *sh_desc;
975 
976 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
977 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
978 
979 	len = desc_len(sh_desc);
980 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
981 
982 	if (all_contig) {
983 		src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
984 		in_options = 0;
985 	} else {
986 		src_dma = edesc->sec4_sg_dma;
987 		sec4_sg_index += edesc->src_nents;
988 		in_options = LDST_SGF;
989 	}
990 
991 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
992 			  in_options);
993 
994 	dst_dma = src_dma;
995 	out_options = in_options;
996 
997 	if (unlikely(req->src != req->dst)) {
998 		if (edesc->dst_nents == 1) {
999 			dst_dma = sg_dma_address(req->dst);
1000 		} else {
1001 			dst_dma = edesc->sec4_sg_dma +
1002 				  sec4_sg_index *
1003 				  sizeof(struct sec4_sg_entry);
1004 			out_options = LDST_SGF;
1005 		}
1006 	}
1007 
1008 	if (encrypt)
1009 		append_seq_out_ptr(desc, dst_dma,
1010 				   req->assoclen + req->cryptlen + authsize,
1011 				   out_options);
1012 	else
1013 		append_seq_out_ptr(desc, dst_dma,
1014 				   req->assoclen + req->cryptlen - authsize,
1015 				   out_options);
1016 
1017 	/* REG3 = assoclen */
1018 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1019 }
1020 
1021 static void init_gcm_job(struct aead_request *req,
1022 			 struct aead_edesc *edesc,
1023 			 bool all_contig, bool encrypt)
1024 {
1025 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1026 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1027 	unsigned int ivsize = crypto_aead_ivsize(aead);
1028 	u32 *desc = edesc->hw_desc;
1029 	bool generic_gcm = (ivsize == 12);
1030 	unsigned int last;
1031 
1032 	init_aead_job(req, edesc, all_contig, encrypt);
1033 
1034 	/* BUG This should not be specific to generic GCM. */
1035 	last = 0;
1036 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1037 		last = FIFOLD_TYPE_LAST1;
1038 
1039 	/* Read GCM IV */
1040 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1041 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1042 	/* Append Salt */
1043 	if (!generic_gcm)
1044 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1045 	/* Append IV */
1046 	append_data(desc, req->iv, ivsize);
1047 	/* End of blank commands */
1048 }
1049 
1050 static void init_authenc_job(struct aead_request *req,
1051 			     struct aead_edesc *edesc,
1052 			     bool all_contig, bool encrypt)
1053 {
1054 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1055 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1056 						 struct caam_aead_alg, aead);
1057 	unsigned int ivsize = crypto_aead_ivsize(aead);
1058 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1059 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1060 			       OP_ALG_AAI_CTR_MOD128);
1061 	const bool is_rfc3686 = alg->caam.rfc3686;
1062 	u32 *desc = edesc->hw_desc;
1063 	u32 ivoffset = 0;
1064 
1065 	/*
1066 	 * AES-CTR needs to load IV in CONTEXT1 reg
1067 	 * at an offset of 128bits (16bytes)
1068 	 * CONTEXT1[255:128] = IV
1069 	 */
1070 	if (ctr_mode)
1071 		ivoffset = 16;
1072 
1073 	/*
1074 	 * RFC3686 specific:
1075 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1076 	 */
1077 	if (is_rfc3686)
1078 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1079 
1080 	init_aead_job(req, edesc, all_contig, encrypt);
1081 
1082 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1083 		append_load_as_imm(desc, req->iv, ivsize,
1084 				   LDST_CLASS_1_CCB |
1085 				   LDST_SRCDST_BYTE_CONTEXT |
1086 				   (ivoffset << LDST_OFFSET_SHIFT));
1087 }
1088 
1089 /*
1090  * Fill in ablkcipher job descriptor
1091  */
1092 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1093 				struct ablkcipher_edesc *edesc,
1094 				struct ablkcipher_request *req,
1095 				bool iv_contig)
1096 {
1097 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1098 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1099 	u32 *desc = edesc->hw_desc;
1100 	u32 out_options = 0, in_options;
1101 	dma_addr_t dst_dma, src_dma;
1102 	int len, sec4_sg_index = 0;
1103 
1104 #ifdef DEBUG
1105 	print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1106 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1107 		       ivsize, 1);
1108 	pr_err("asked=%d, nbytes%d\n",
1109 	       (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1110 	dbg_dump_sg(KERN_ERR, "src    @"__stringify(__LINE__)": ",
1111 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1112 		    edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1113 #endif
1114 
1115 	len = desc_len(sh_desc);
1116 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1117 
1118 	if (iv_contig) {
1119 		src_dma = edesc->iv_dma;
1120 		in_options = 0;
1121 	} else {
1122 		src_dma = edesc->sec4_sg_dma;
1123 		sec4_sg_index += edesc->src_nents + 1;
1124 		in_options = LDST_SGF;
1125 	}
1126 	append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1127 
1128 	if (likely(req->src == req->dst)) {
1129 		if (edesc->src_nents == 1 && iv_contig) {
1130 			dst_dma = sg_dma_address(req->src);
1131 		} else {
1132 			dst_dma = edesc->sec4_sg_dma +
1133 				sizeof(struct sec4_sg_entry);
1134 			out_options = LDST_SGF;
1135 		}
1136 	} else {
1137 		if (edesc->dst_nents == 1) {
1138 			dst_dma = sg_dma_address(req->dst);
1139 		} else {
1140 			dst_dma = edesc->sec4_sg_dma +
1141 				sec4_sg_index * sizeof(struct sec4_sg_entry);
1142 			out_options = LDST_SGF;
1143 		}
1144 	}
1145 	append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1146 }
1147 
1148 /*
1149  * Fill in ablkcipher givencrypt job descriptor
1150  */
1151 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1152 				    struct ablkcipher_edesc *edesc,
1153 				    struct ablkcipher_request *req,
1154 				    bool iv_contig)
1155 {
1156 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1157 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1158 	u32 *desc = edesc->hw_desc;
1159 	u32 out_options, in_options;
1160 	dma_addr_t dst_dma, src_dma;
1161 	int len, sec4_sg_index = 0;
1162 
1163 #ifdef DEBUG
1164 	print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1165 		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1166 		       ivsize, 1);
1167 	dbg_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__) ": ",
1168 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1169 		    edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1170 #endif
1171 
1172 	len = desc_len(sh_desc);
1173 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1174 
1175 	if (edesc->src_nents == 1) {
1176 		src_dma = sg_dma_address(req->src);
1177 		in_options = 0;
1178 	} else {
1179 		src_dma = edesc->sec4_sg_dma;
1180 		sec4_sg_index += edesc->src_nents;
1181 		in_options = LDST_SGF;
1182 	}
1183 	append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1184 
1185 	if (iv_contig) {
1186 		dst_dma = edesc->iv_dma;
1187 		out_options = 0;
1188 	} else {
1189 		dst_dma = edesc->sec4_sg_dma +
1190 			  sec4_sg_index * sizeof(struct sec4_sg_entry);
1191 		out_options = LDST_SGF;
1192 	}
1193 	append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1194 }
1195 
1196 /*
1197  * allocate and map the aead extended descriptor
1198  */
1199 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1200 					   int desc_bytes, bool *all_contig_ptr,
1201 					   bool encrypt)
1202 {
1203 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1204 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1205 	struct device *jrdev = ctx->jrdev;
1206 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1207 		       GFP_KERNEL : GFP_ATOMIC;
1208 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1209 	struct aead_edesc *edesc;
1210 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1211 	unsigned int authsize = ctx->authsize;
1212 
1213 	if (unlikely(req->dst != req->src)) {
1214 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1215 					     req->cryptlen);
1216 		if (unlikely(src_nents < 0)) {
1217 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1218 				req->assoclen + req->cryptlen);
1219 			return ERR_PTR(src_nents);
1220 		}
1221 
1222 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1223 					     req->cryptlen +
1224 						(encrypt ? authsize :
1225 							   (-authsize)));
1226 		if (unlikely(dst_nents < 0)) {
1227 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1228 				req->assoclen + req->cryptlen +
1229 				(encrypt ? authsize : (-authsize)));
1230 			return ERR_PTR(dst_nents);
1231 		}
1232 	} else {
1233 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1234 					     req->cryptlen +
1235 					     (encrypt ? authsize : 0));
1236 		if (unlikely(src_nents < 0)) {
1237 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1238 				req->assoclen + req->cryptlen +
1239 				(encrypt ? authsize : 0));
1240 			return ERR_PTR(src_nents);
1241 		}
1242 	}
1243 
1244 	if (likely(req->src == req->dst)) {
1245 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1246 					      DMA_BIDIRECTIONAL);
1247 		if (unlikely(!mapped_src_nents)) {
1248 			dev_err(jrdev, "unable to map source\n");
1249 			return ERR_PTR(-ENOMEM);
1250 		}
1251 	} else {
1252 		/* Cover also the case of null (zero length) input data */
1253 		if (src_nents) {
1254 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1255 						      src_nents, DMA_TO_DEVICE);
1256 			if (unlikely(!mapped_src_nents)) {
1257 				dev_err(jrdev, "unable to map source\n");
1258 				return ERR_PTR(-ENOMEM);
1259 			}
1260 		} else {
1261 			mapped_src_nents = 0;
1262 		}
1263 
1264 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1265 					      DMA_FROM_DEVICE);
1266 		if (unlikely(!mapped_dst_nents)) {
1267 			dev_err(jrdev, "unable to map destination\n");
1268 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1269 			return ERR_PTR(-ENOMEM);
1270 		}
1271 	}
1272 
1273 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1274 	sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1275 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1276 
1277 	/* allocate space for base edesc and hw desc commands, link tables */
1278 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1279 			GFP_DMA | flags);
1280 	if (!edesc) {
1281 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1282 			   0, 0, 0);
1283 		return ERR_PTR(-ENOMEM);
1284 	}
1285 
1286 	edesc->src_nents = src_nents;
1287 	edesc->dst_nents = dst_nents;
1288 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1289 			 desc_bytes;
1290 	*all_contig_ptr = !(mapped_src_nents > 1);
1291 
1292 	sec4_sg_index = 0;
1293 	if (mapped_src_nents > 1) {
1294 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1295 				   edesc->sec4_sg + sec4_sg_index, 0);
1296 		sec4_sg_index += mapped_src_nents;
1297 	}
1298 	if (mapped_dst_nents > 1) {
1299 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1300 				   edesc->sec4_sg + sec4_sg_index, 0);
1301 	}
1302 
1303 	if (!sec4_sg_bytes)
1304 		return edesc;
1305 
1306 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1307 					    sec4_sg_bytes, DMA_TO_DEVICE);
1308 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1309 		dev_err(jrdev, "unable to map S/G table\n");
1310 		aead_unmap(jrdev, edesc, req);
1311 		kfree(edesc);
1312 		return ERR_PTR(-ENOMEM);
1313 	}
1314 
1315 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1316 
1317 	return edesc;
1318 }
1319 
1320 static int gcm_encrypt(struct aead_request *req)
1321 {
1322 	struct aead_edesc *edesc;
1323 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1324 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1325 	struct device *jrdev = ctx->jrdev;
1326 	bool all_contig;
1327 	u32 *desc;
1328 	int ret = 0;
1329 
1330 	/* allocate extended descriptor */
1331 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1332 	if (IS_ERR(edesc))
1333 		return PTR_ERR(edesc);
1334 
1335 	/* Create and submit job descriptor */
1336 	init_gcm_job(req, edesc, all_contig, true);
1337 #ifdef DEBUG
1338 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1339 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1340 		       desc_bytes(edesc->hw_desc), 1);
1341 #endif
1342 
1343 	desc = edesc->hw_desc;
1344 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1345 	if (!ret) {
1346 		ret = -EINPROGRESS;
1347 	} else {
1348 		aead_unmap(jrdev, edesc, req);
1349 		kfree(edesc);
1350 	}
1351 
1352 	return ret;
1353 }
1354 
1355 static int ipsec_gcm_encrypt(struct aead_request *req)
1356 {
1357 	if (req->assoclen < 8)
1358 		return -EINVAL;
1359 
1360 	return gcm_encrypt(req);
1361 }
1362 
1363 static int aead_encrypt(struct aead_request *req)
1364 {
1365 	struct aead_edesc *edesc;
1366 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1367 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1368 	struct device *jrdev = ctx->jrdev;
1369 	bool all_contig;
1370 	u32 *desc;
1371 	int ret = 0;
1372 
1373 	/* allocate extended descriptor */
1374 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1375 				 &all_contig, true);
1376 	if (IS_ERR(edesc))
1377 		return PTR_ERR(edesc);
1378 
1379 	/* Create and submit job descriptor */
1380 	init_authenc_job(req, edesc, all_contig, true);
1381 #ifdef DEBUG
1382 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1383 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1384 		       desc_bytes(edesc->hw_desc), 1);
1385 #endif
1386 
1387 	desc = edesc->hw_desc;
1388 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1389 	if (!ret) {
1390 		ret = -EINPROGRESS;
1391 	} else {
1392 		aead_unmap(jrdev, edesc, req);
1393 		kfree(edesc);
1394 	}
1395 
1396 	return ret;
1397 }
1398 
1399 static int gcm_decrypt(struct aead_request *req)
1400 {
1401 	struct aead_edesc *edesc;
1402 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1403 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1404 	struct device *jrdev = ctx->jrdev;
1405 	bool all_contig;
1406 	u32 *desc;
1407 	int ret = 0;
1408 
1409 	/* allocate extended descriptor */
1410 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1411 	if (IS_ERR(edesc))
1412 		return PTR_ERR(edesc);
1413 
1414 	/* Create and submit job descriptor*/
1415 	init_gcm_job(req, edesc, all_contig, false);
1416 #ifdef DEBUG
1417 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1418 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1419 		       desc_bytes(edesc->hw_desc), 1);
1420 #endif
1421 
1422 	desc = edesc->hw_desc;
1423 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1424 	if (!ret) {
1425 		ret = -EINPROGRESS;
1426 	} else {
1427 		aead_unmap(jrdev, edesc, req);
1428 		kfree(edesc);
1429 	}
1430 
1431 	return ret;
1432 }
1433 
1434 static int ipsec_gcm_decrypt(struct aead_request *req)
1435 {
1436 	if (req->assoclen < 8)
1437 		return -EINVAL;
1438 
1439 	return gcm_decrypt(req);
1440 }
1441 
1442 static int aead_decrypt(struct aead_request *req)
1443 {
1444 	struct aead_edesc *edesc;
1445 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1446 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1447 	struct device *jrdev = ctx->jrdev;
1448 	bool all_contig;
1449 	u32 *desc;
1450 	int ret = 0;
1451 
1452 #ifdef DEBUG
1453 	dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1454 		    DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1455 		    req->assoclen + req->cryptlen, 1);
1456 #endif
1457 
1458 	/* allocate extended descriptor */
1459 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1460 				 &all_contig, false);
1461 	if (IS_ERR(edesc))
1462 		return PTR_ERR(edesc);
1463 
1464 	/* Create and submit job descriptor*/
1465 	init_authenc_job(req, edesc, all_contig, false);
1466 #ifdef DEBUG
1467 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1468 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1469 		       desc_bytes(edesc->hw_desc), 1);
1470 #endif
1471 
1472 	desc = edesc->hw_desc;
1473 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1474 	if (!ret) {
1475 		ret = -EINPROGRESS;
1476 	} else {
1477 		aead_unmap(jrdev, edesc, req);
1478 		kfree(edesc);
1479 	}
1480 
1481 	return ret;
1482 }
1483 
1484 /*
1485  * allocate and map the ablkcipher extended descriptor for ablkcipher
1486  */
1487 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1488 						       *req, int desc_bytes,
1489 						       bool *iv_contig_out)
1490 {
1491 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1492 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1493 	struct device *jrdev = ctx->jrdev;
1494 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1495 		       GFP_KERNEL : GFP_ATOMIC;
1496 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1497 	struct ablkcipher_edesc *edesc;
1498 	dma_addr_t iv_dma = 0;
1499 	bool in_contig;
1500 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1501 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1502 
1503 	src_nents = sg_nents_for_len(req->src, req->nbytes);
1504 	if (unlikely(src_nents < 0)) {
1505 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1506 			req->nbytes);
1507 		return ERR_PTR(src_nents);
1508 	}
1509 
1510 	if (req->dst != req->src) {
1511 		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1512 		if (unlikely(dst_nents < 0)) {
1513 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1514 				req->nbytes);
1515 			return ERR_PTR(dst_nents);
1516 		}
1517 	}
1518 
1519 	if (likely(req->src == req->dst)) {
1520 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1521 					      DMA_BIDIRECTIONAL);
1522 		if (unlikely(!mapped_src_nents)) {
1523 			dev_err(jrdev, "unable to map source\n");
1524 			return ERR_PTR(-ENOMEM);
1525 		}
1526 	} else {
1527 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1528 					      DMA_TO_DEVICE);
1529 		if (unlikely(!mapped_src_nents)) {
1530 			dev_err(jrdev, "unable to map source\n");
1531 			return ERR_PTR(-ENOMEM);
1532 		}
1533 
1534 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1535 					      DMA_FROM_DEVICE);
1536 		if (unlikely(!mapped_dst_nents)) {
1537 			dev_err(jrdev, "unable to map destination\n");
1538 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1539 			return ERR_PTR(-ENOMEM);
1540 		}
1541 	}
1542 
1543 	iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1544 	if (dma_mapping_error(jrdev, iv_dma)) {
1545 		dev_err(jrdev, "unable to map IV\n");
1546 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1547 			   0, 0, 0);
1548 		return ERR_PTR(-ENOMEM);
1549 	}
1550 
1551 	if (mapped_src_nents == 1 &&
1552 	    iv_dma + ivsize == sg_dma_address(req->src)) {
1553 		in_contig = true;
1554 		sec4_sg_ents = 0;
1555 	} else {
1556 		in_contig = false;
1557 		sec4_sg_ents = 1 + mapped_src_nents;
1558 	}
1559 	dst_sg_idx = sec4_sg_ents;
1560 	sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1561 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1562 
1563 	/* allocate space for base edesc and hw desc commands, link tables */
1564 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1565 			GFP_DMA | flags);
1566 	if (!edesc) {
1567 		dev_err(jrdev, "could not allocate extended descriptor\n");
1568 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1569 			   iv_dma, ivsize, 0, 0);
1570 		return ERR_PTR(-ENOMEM);
1571 	}
1572 
1573 	edesc->src_nents = src_nents;
1574 	edesc->dst_nents = dst_nents;
1575 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1576 	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1577 			 desc_bytes;
1578 
1579 	if (!in_contig) {
1580 		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1581 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1582 				   edesc->sec4_sg + 1, 0);
1583 	}
1584 
1585 	if (mapped_dst_nents > 1) {
1586 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1587 				   edesc->sec4_sg + dst_sg_idx, 0);
1588 	}
1589 
1590 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1591 					    sec4_sg_bytes, DMA_TO_DEVICE);
1592 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1593 		dev_err(jrdev, "unable to map S/G table\n");
1594 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1595 			   iv_dma, ivsize, 0, 0);
1596 		kfree(edesc);
1597 		return ERR_PTR(-ENOMEM);
1598 	}
1599 
1600 	edesc->iv_dma = iv_dma;
1601 
1602 #ifdef DEBUG
1603 	print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1604 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1605 		       sec4_sg_bytes, 1);
1606 #endif
1607 
1608 	*iv_contig_out = in_contig;
1609 	return edesc;
1610 }
1611 
1612 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1613 {
1614 	struct ablkcipher_edesc *edesc;
1615 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1616 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1617 	struct device *jrdev = ctx->jrdev;
1618 	bool iv_contig;
1619 	u32 *desc;
1620 	int ret = 0;
1621 
1622 	/* allocate extended descriptor */
1623 	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1624 				       CAAM_CMD_SZ, &iv_contig);
1625 	if (IS_ERR(edesc))
1626 		return PTR_ERR(edesc);
1627 
1628 	/* Create and submit job descriptor*/
1629 	init_ablkcipher_job(ctx->sh_desc_enc,
1630 		ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1631 #ifdef DEBUG
1632 	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1633 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1634 		       desc_bytes(edesc->hw_desc), 1);
1635 #endif
1636 	desc = edesc->hw_desc;
1637 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1638 
1639 	if (!ret) {
1640 		ret = -EINPROGRESS;
1641 	} else {
1642 		ablkcipher_unmap(jrdev, edesc, req);
1643 		kfree(edesc);
1644 	}
1645 
1646 	return ret;
1647 }
1648 
1649 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1650 {
1651 	struct ablkcipher_edesc *edesc;
1652 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1653 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1654 	struct device *jrdev = ctx->jrdev;
1655 	bool iv_contig;
1656 	u32 *desc;
1657 	int ret = 0;
1658 
1659 	/* allocate extended descriptor */
1660 	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1661 				       CAAM_CMD_SZ, &iv_contig);
1662 	if (IS_ERR(edesc))
1663 		return PTR_ERR(edesc);
1664 
1665 	/* Create and submit job descriptor*/
1666 	init_ablkcipher_job(ctx->sh_desc_dec,
1667 		ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1668 	desc = edesc->hw_desc;
1669 #ifdef DEBUG
1670 	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1671 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1672 		       desc_bytes(edesc->hw_desc), 1);
1673 #endif
1674 
1675 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1676 	if (!ret) {
1677 		ret = -EINPROGRESS;
1678 	} else {
1679 		ablkcipher_unmap(jrdev, edesc, req);
1680 		kfree(edesc);
1681 	}
1682 
1683 	return ret;
1684 }
1685 
1686 /*
1687  * allocate and map the ablkcipher extended descriptor
1688  * for ablkcipher givencrypt
1689  */
1690 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1691 				struct skcipher_givcrypt_request *greq,
1692 				int desc_bytes,
1693 				bool *iv_contig_out)
1694 {
1695 	struct ablkcipher_request *req = &greq->creq;
1696 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1697 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1698 	struct device *jrdev = ctx->jrdev;
1699 	gfp_t flags = (req->base.flags &  CRYPTO_TFM_REQ_MAY_SLEEP) ?
1700 		       GFP_KERNEL : GFP_ATOMIC;
1701 	int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1702 	struct ablkcipher_edesc *edesc;
1703 	dma_addr_t iv_dma = 0;
1704 	bool out_contig;
1705 	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1706 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1707 
1708 	src_nents = sg_nents_for_len(req->src, req->nbytes);
1709 	if (unlikely(src_nents < 0)) {
1710 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1711 			req->nbytes);
1712 		return ERR_PTR(src_nents);
1713 	}
1714 
1715 	if (likely(req->src == req->dst)) {
1716 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1717 					      DMA_BIDIRECTIONAL);
1718 		if (unlikely(!mapped_src_nents)) {
1719 			dev_err(jrdev, "unable to map source\n");
1720 			return ERR_PTR(-ENOMEM);
1721 		}
1722 
1723 		dst_nents = src_nents;
1724 		mapped_dst_nents = src_nents;
1725 	} else {
1726 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1727 					      DMA_TO_DEVICE);
1728 		if (unlikely(!mapped_src_nents)) {
1729 			dev_err(jrdev, "unable to map source\n");
1730 			return ERR_PTR(-ENOMEM);
1731 		}
1732 
1733 		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1734 		if (unlikely(dst_nents < 0)) {
1735 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1736 				req->nbytes);
1737 			return ERR_PTR(dst_nents);
1738 		}
1739 
1740 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1741 					      DMA_FROM_DEVICE);
1742 		if (unlikely(!mapped_dst_nents)) {
1743 			dev_err(jrdev, "unable to map destination\n");
1744 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1745 			return ERR_PTR(-ENOMEM);
1746 		}
1747 	}
1748 
1749 	/*
1750 	 * Check if iv can be contiguous with source and destination.
1751 	 * If so, include it. If not, create scatterlist.
1752 	 */
1753 	iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1754 	if (dma_mapping_error(jrdev, iv_dma)) {
1755 		dev_err(jrdev, "unable to map IV\n");
1756 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1757 			   0, 0, 0);
1758 		return ERR_PTR(-ENOMEM);
1759 	}
1760 
1761 	sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1762 	dst_sg_idx = sec4_sg_ents;
1763 	if (mapped_dst_nents == 1 &&
1764 	    iv_dma + ivsize == sg_dma_address(req->dst)) {
1765 		out_contig = true;
1766 	} else {
1767 		out_contig = false;
1768 		sec4_sg_ents += 1 + mapped_dst_nents;
1769 	}
1770 
1771 	/* allocate space for base edesc and hw desc commands, link tables */
1772 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1773 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1774 			GFP_DMA | flags);
1775 	if (!edesc) {
1776 		dev_err(jrdev, "could not allocate extended descriptor\n");
1777 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1778 			   iv_dma, ivsize, 0, 0);
1779 		return ERR_PTR(-ENOMEM);
1780 	}
1781 
1782 	edesc->src_nents = src_nents;
1783 	edesc->dst_nents = dst_nents;
1784 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1785 	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1786 			 desc_bytes;
1787 
1788 	if (mapped_src_nents > 1)
1789 		sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1790 				   0);
1791 
1792 	if (!out_contig) {
1793 		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
1794 				   iv_dma, ivsize, 0);
1795 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1796 				   edesc->sec4_sg + dst_sg_idx + 1, 0);
1797 	}
1798 
1799 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1800 					    sec4_sg_bytes, DMA_TO_DEVICE);
1801 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1802 		dev_err(jrdev, "unable to map S/G table\n");
1803 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1804 			   iv_dma, ivsize, 0, 0);
1805 		kfree(edesc);
1806 		return ERR_PTR(-ENOMEM);
1807 	}
1808 	edesc->iv_dma = iv_dma;
1809 
1810 #ifdef DEBUG
1811 	print_hex_dump(KERN_ERR,
1812 		       "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1813 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1814 		       sec4_sg_bytes, 1);
1815 #endif
1816 
1817 	*iv_contig_out = out_contig;
1818 	return edesc;
1819 }
1820 
1821 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1822 {
1823 	struct ablkcipher_request *req = &creq->creq;
1824 	struct ablkcipher_edesc *edesc;
1825 	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1826 	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1827 	struct device *jrdev = ctx->jrdev;
1828 	bool iv_contig = false;
1829 	u32 *desc;
1830 	int ret = 0;
1831 
1832 	/* allocate extended descriptor */
1833 	edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1834 				       CAAM_CMD_SZ, &iv_contig);
1835 	if (IS_ERR(edesc))
1836 		return PTR_ERR(edesc);
1837 
1838 	/* Create and submit job descriptor*/
1839 	init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1840 				edesc, req, iv_contig);
1841 #ifdef DEBUG
1842 	print_hex_dump(KERN_ERR,
1843 		       "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1844 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1845 		       desc_bytes(edesc->hw_desc), 1);
1846 #endif
1847 	desc = edesc->hw_desc;
1848 	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1849 
1850 	if (!ret) {
1851 		ret = -EINPROGRESS;
1852 	} else {
1853 		ablkcipher_unmap(jrdev, edesc, req);
1854 		kfree(edesc);
1855 	}
1856 
1857 	return ret;
1858 }
1859 
1860 #define template_aead		template_u.aead
1861 #define template_ablkcipher	template_u.ablkcipher
1862 struct caam_alg_template {
1863 	char name[CRYPTO_MAX_ALG_NAME];
1864 	char driver_name[CRYPTO_MAX_ALG_NAME];
1865 	unsigned int blocksize;
1866 	u32 type;
1867 	union {
1868 		struct ablkcipher_alg ablkcipher;
1869 	} template_u;
1870 	u32 class1_alg_type;
1871 	u32 class2_alg_type;
1872 };
1873 
1874 static struct caam_alg_template driver_algs[] = {
1875 	/* ablkcipher descriptor */
1876 	{
1877 		.name = "cbc(aes)",
1878 		.driver_name = "cbc-aes-caam",
1879 		.blocksize = AES_BLOCK_SIZE,
1880 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1881 		.template_ablkcipher = {
1882 			.setkey = ablkcipher_setkey,
1883 			.encrypt = ablkcipher_encrypt,
1884 			.decrypt = ablkcipher_decrypt,
1885 			.givencrypt = ablkcipher_givencrypt,
1886 			.geniv = "<built-in>",
1887 			.min_keysize = AES_MIN_KEY_SIZE,
1888 			.max_keysize = AES_MAX_KEY_SIZE,
1889 			.ivsize = AES_BLOCK_SIZE,
1890 			},
1891 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1892 	},
1893 	{
1894 		.name = "cbc(des3_ede)",
1895 		.driver_name = "cbc-3des-caam",
1896 		.blocksize = DES3_EDE_BLOCK_SIZE,
1897 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1898 		.template_ablkcipher = {
1899 			.setkey = ablkcipher_setkey,
1900 			.encrypt = ablkcipher_encrypt,
1901 			.decrypt = ablkcipher_decrypt,
1902 			.givencrypt = ablkcipher_givencrypt,
1903 			.geniv = "<built-in>",
1904 			.min_keysize = DES3_EDE_KEY_SIZE,
1905 			.max_keysize = DES3_EDE_KEY_SIZE,
1906 			.ivsize = DES3_EDE_BLOCK_SIZE,
1907 			},
1908 		.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1909 	},
1910 	{
1911 		.name = "cbc(des)",
1912 		.driver_name = "cbc-des-caam",
1913 		.blocksize = DES_BLOCK_SIZE,
1914 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1915 		.template_ablkcipher = {
1916 			.setkey = ablkcipher_setkey,
1917 			.encrypt = ablkcipher_encrypt,
1918 			.decrypt = ablkcipher_decrypt,
1919 			.givencrypt = ablkcipher_givencrypt,
1920 			.geniv = "<built-in>",
1921 			.min_keysize = DES_KEY_SIZE,
1922 			.max_keysize = DES_KEY_SIZE,
1923 			.ivsize = DES_BLOCK_SIZE,
1924 			},
1925 		.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1926 	},
1927 	{
1928 		.name = "ctr(aes)",
1929 		.driver_name = "ctr-aes-caam",
1930 		.blocksize = 1,
1931 		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1932 		.template_ablkcipher = {
1933 			.setkey = ablkcipher_setkey,
1934 			.encrypt = ablkcipher_encrypt,
1935 			.decrypt = ablkcipher_decrypt,
1936 			.geniv = "chainiv",
1937 			.min_keysize = AES_MIN_KEY_SIZE,
1938 			.max_keysize = AES_MAX_KEY_SIZE,
1939 			.ivsize = AES_BLOCK_SIZE,
1940 			},
1941 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1942 	},
1943 	{
1944 		.name = "rfc3686(ctr(aes))",
1945 		.driver_name = "rfc3686-ctr-aes-caam",
1946 		.blocksize = 1,
1947 		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1948 		.template_ablkcipher = {
1949 			.setkey = ablkcipher_setkey,
1950 			.encrypt = ablkcipher_encrypt,
1951 			.decrypt = ablkcipher_decrypt,
1952 			.givencrypt = ablkcipher_givencrypt,
1953 			.geniv = "<built-in>",
1954 			.min_keysize = AES_MIN_KEY_SIZE +
1955 				       CTR_RFC3686_NONCE_SIZE,
1956 			.max_keysize = AES_MAX_KEY_SIZE +
1957 				       CTR_RFC3686_NONCE_SIZE,
1958 			.ivsize = CTR_RFC3686_IV_SIZE,
1959 			},
1960 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1961 	},
1962 	{
1963 		.name = "xts(aes)",
1964 		.driver_name = "xts-aes-caam",
1965 		.blocksize = AES_BLOCK_SIZE,
1966 		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1967 		.template_ablkcipher = {
1968 			.setkey = xts_ablkcipher_setkey,
1969 			.encrypt = ablkcipher_encrypt,
1970 			.decrypt = ablkcipher_decrypt,
1971 			.geniv = "eseqiv",
1972 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1973 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1974 			.ivsize = AES_BLOCK_SIZE,
1975 			},
1976 		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1977 	},
1978 };
1979 
1980 static struct caam_aead_alg driver_aeads[] = {
1981 	{
1982 		.aead = {
1983 			.base = {
1984 				.cra_name = "rfc4106(gcm(aes))",
1985 				.cra_driver_name = "rfc4106-gcm-aes-caam",
1986 				.cra_blocksize = 1,
1987 			},
1988 			.setkey = rfc4106_setkey,
1989 			.setauthsize = rfc4106_setauthsize,
1990 			.encrypt = ipsec_gcm_encrypt,
1991 			.decrypt = ipsec_gcm_decrypt,
1992 			.ivsize = 8,
1993 			.maxauthsize = AES_BLOCK_SIZE,
1994 		},
1995 		.caam = {
1996 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1997 		},
1998 	},
1999 	{
2000 		.aead = {
2001 			.base = {
2002 				.cra_name = "rfc4543(gcm(aes))",
2003 				.cra_driver_name = "rfc4543-gcm-aes-caam",
2004 				.cra_blocksize = 1,
2005 			},
2006 			.setkey = rfc4543_setkey,
2007 			.setauthsize = rfc4543_setauthsize,
2008 			.encrypt = ipsec_gcm_encrypt,
2009 			.decrypt = ipsec_gcm_decrypt,
2010 			.ivsize = 8,
2011 			.maxauthsize = AES_BLOCK_SIZE,
2012 		},
2013 		.caam = {
2014 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2015 		},
2016 	},
2017 	/* Galois Counter Mode */
2018 	{
2019 		.aead = {
2020 			.base = {
2021 				.cra_name = "gcm(aes)",
2022 				.cra_driver_name = "gcm-aes-caam",
2023 				.cra_blocksize = 1,
2024 			},
2025 			.setkey = gcm_setkey,
2026 			.setauthsize = gcm_setauthsize,
2027 			.encrypt = gcm_encrypt,
2028 			.decrypt = gcm_decrypt,
2029 			.ivsize = 12,
2030 			.maxauthsize = AES_BLOCK_SIZE,
2031 		},
2032 		.caam = {
2033 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2034 		},
2035 	},
2036 	/* single-pass ipsec_esp descriptor */
2037 	{
2038 		.aead = {
2039 			.base = {
2040 				.cra_name = "authenc(hmac(md5),"
2041 					    "ecb(cipher_null))",
2042 				.cra_driver_name = "authenc-hmac-md5-"
2043 						   "ecb-cipher_null-caam",
2044 				.cra_blocksize = NULL_BLOCK_SIZE,
2045 			},
2046 			.setkey = aead_setkey,
2047 			.setauthsize = aead_setauthsize,
2048 			.encrypt = aead_encrypt,
2049 			.decrypt = aead_decrypt,
2050 			.ivsize = NULL_IV_SIZE,
2051 			.maxauthsize = MD5_DIGEST_SIZE,
2052 		},
2053 		.caam = {
2054 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2055 					   OP_ALG_AAI_HMAC_PRECOMP,
2056 		},
2057 	},
2058 	{
2059 		.aead = {
2060 			.base = {
2061 				.cra_name = "authenc(hmac(sha1),"
2062 					    "ecb(cipher_null))",
2063 				.cra_driver_name = "authenc-hmac-sha1-"
2064 						   "ecb-cipher_null-caam",
2065 				.cra_blocksize = NULL_BLOCK_SIZE,
2066 			},
2067 			.setkey = aead_setkey,
2068 			.setauthsize = aead_setauthsize,
2069 			.encrypt = aead_encrypt,
2070 			.decrypt = aead_decrypt,
2071 			.ivsize = NULL_IV_SIZE,
2072 			.maxauthsize = SHA1_DIGEST_SIZE,
2073 		},
2074 		.caam = {
2075 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2076 					   OP_ALG_AAI_HMAC_PRECOMP,
2077 		},
2078 	},
2079 	{
2080 		.aead = {
2081 			.base = {
2082 				.cra_name = "authenc(hmac(sha224),"
2083 					    "ecb(cipher_null))",
2084 				.cra_driver_name = "authenc-hmac-sha224-"
2085 						   "ecb-cipher_null-caam",
2086 				.cra_blocksize = NULL_BLOCK_SIZE,
2087 			},
2088 			.setkey = aead_setkey,
2089 			.setauthsize = aead_setauthsize,
2090 			.encrypt = aead_encrypt,
2091 			.decrypt = aead_decrypt,
2092 			.ivsize = NULL_IV_SIZE,
2093 			.maxauthsize = SHA224_DIGEST_SIZE,
2094 		},
2095 		.caam = {
2096 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2097 					   OP_ALG_AAI_HMAC_PRECOMP,
2098 		},
2099 	},
2100 	{
2101 		.aead = {
2102 			.base = {
2103 				.cra_name = "authenc(hmac(sha256),"
2104 					    "ecb(cipher_null))",
2105 				.cra_driver_name = "authenc-hmac-sha256-"
2106 						   "ecb-cipher_null-caam",
2107 				.cra_blocksize = NULL_BLOCK_SIZE,
2108 			},
2109 			.setkey = aead_setkey,
2110 			.setauthsize = aead_setauthsize,
2111 			.encrypt = aead_encrypt,
2112 			.decrypt = aead_decrypt,
2113 			.ivsize = NULL_IV_SIZE,
2114 			.maxauthsize = SHA256_DIGEST_SIZE,
2115 		},
2116 		.caam = {
2117 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2118 					   OP_ALG_AAI_HMAC_PRECOMP,
2119 		},
2120 	},
2121 	{
2122 		.aead = {
2123 			.base = {
2124 				.cra_name = "authenc(hmac(sha384),"
2125 					    "ecb(cipher_null))",
2126 				.cra_driver_name = "authenc-hmac-sha384-"
2127 						   "ecb-cipher_null-caam",
2128 				.cra_blocksize = NULL_BLOCK_SIZE,
2129 			},
2130 			.setkey = aead_setkey,
2131 			.setauthsize = aead_setauthsize,
2132 			.encrypt = aead_encrypt,
2133 			.decrypt = aead_decrypt,
2134 			.ivsize = NULL_IV_SIZE,
2135 			.maxauthsize = SHA384_DIGEST_SIZE,
2136 		},
2137 		.caam = {
2138 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2139 					   OP_ALG_AAI_HMAC_PRECOMP,
2140 		},
2141 	},
2142 	{
2143 		.aead = {
2144 			.base = {
2145 				.cra_name = "authenc(hmac(sha512),"
2146 					    "ecb(cipher_null))",
2147 				.cra_driver_name = "authenc-hmac-sha512-"
2148 						   "ecb-cipher_null-caam",
2149 				.cra_blocksize = NULL_BLOCK_SIZE,
2150 			},
2151 			.setkey = aead_setkey,
2152 			.setauthsize = aead_setauthsize,
2153 			.encrypt = aead_encrypt,
2154 			.decrypt = aead_decrypt,
2155 			.ivsize = NULL_IV_SIZE,
2156 			.maxauthsize = SHA512_DIGEST_SIZE,
2157 		},
2158 		.caam = {
2159 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2160 					   OP_ALG_AAI_HMAC_PRECOMP,
2161 		},
2162 	},
2163 	{
2164 		.aead = {
2165 			.base = {
2166 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2167 				.cra_driver_name = "authenc-hmac-md5-"
2168 						   "cbc-aes-caam",
2169 				.cra_blocksize = AES_BLOCK_SIZE,
2170 			},
2171 			.setkey = aead_setkey,
2172 			.setauthsize = aead_setauthsize,
2173 			.encrypt = aead_encrypt,
2174 			.decrypt = aead_decrypt,
2175 			.ivsize = AES_BLOCK_SIZE,
2176 			.maxauthsize = MD5_DIGEST_SIZE,
2177 		},
2178 		.caam = {
2179 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2180 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2181 					   OP_ALG_AAI_HMAC_PRECOMP,
2182 		},
2183 	},
2184 	{
2185 		.aead = {
2186 			.base = {
2187 				.cra_name = "echainiv(authenc(hmac(md5),"
2188 					    "cbc(aes)))",
2189 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2190 						   "cbc-aes-caam",
2191 				.cra_blocksize = AES_BLOCK_SIZE,
2192 			},
2193 			.setkey = aead_setkey,
2194 			.setauthsize = aead_setauthsize,
2195 			.encrypt = aead_encrypt,
2196 			.decrypt = aead_decrypt,
2197 			.ivsize = AES_BLOCK_SIZE,
2198 			.maxauthsize = MD5_DIGEST_SIZE,
2199 		},
2200 		.caam = {
2201 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2202 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2203 					   OP_ALG_AAI_HMAC_PRECOMP,
2204 			.geniv = true,
2205 		},
2206 	},
2207 	{
2208 		.aead = {
2209 			.base = {
2210 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2211 				.cra_driver_name = "authenc-hmac-sha1-"
2212 						   "cbc-aes-caam",
2213 				.cra_blocksize = AES_BLOCK_SIZE,
2214 			},
2215 			.setkey = aead_setkey,
2216 			.setauthsize = aead_setauthsize,
2217 			.encrypt = aead_encrypt,
2218 			.decrypt = aead_decrypt,
2219 			.ivsize = AES_BLOCK_SIZE,
2220 			.maxauthsize = SHA1_DIGEST_SIZE,
2221 		},
2222 		.caam = {
2223 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2224 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2225 					   OP_ALG_AAI_HMAC_PRECOMP,
2226 		},
2227 	},
2228 	{
2229 		.aead = {
2230 			.base = {
2231 				.cra_name = "echainiv(authenc(hmac(sha1),"
2232 					    "cbc(aes)))",
2233 				.cra_driver_name = "echainiv-authenc-"
2234 						   "hmac-sha1-cbc-aes-caam",
2235 				.cra_blocksize = AES_BLOCK_SIZE,
2236 			},
2237 			.setkey = aead_setkey,
2238 			.setauthsize = aead_setauthsize,
2239 			.encrypt = aead_encrypt,
2240 			.decrypt = aead_decrypt,
2241 			.ivsize = AES_BLOCK_SIZE,
2242 			.maxauthsize = SHA1_DIGEST_SIZE,
2243 		},
2244 		.caam = {
2245 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2246 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2247 					   OP_ALG_AAI_HMAC_PRECOMP,
2248 			.geniv = true,
2249 		},
2250 	},
2251 	{
2252 		.aead = {
2253 			.base = {
2254 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2255 				.cra_driver_name = "authenc-hmac-sha224-"
2256 						   "cbc-aes-caam",
2257 				.cra_blocksize = AES_BLOCK_SIZE,
2258 			},
2259 			.setkey = aead_setkey,
2260 			.setauthsize = aead_setauthsize,
2261 			.encrypt = aead_encrypt,
2262 			.decrypt = aead_decrypt,
2263 			.ivsize = AES_BLOCK_SIZE,
2264 			.maxauthsize = SHA224_DIGEST_SIZE,
2265 		},
2266 		.caam = {
2267 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2268 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2269 					   OP_ALG_AAI_HMAC_PRECOMP,
2270 		},
2271 	},
2272 	{
2273 		.aead = {
2274 			.base = {
2275 				.cra_name = "echainiv(authenc(hmac(sha224),"
2276 					    "cbc(aes)))",
2277 				.cra_driver_name = "echainiv-authenc-"
2278 						   "hmac-sha224-cbc-aes-caam",
2279 				.cra_blocksize = AES_BLOCK_SIZE,
2280 			},
2281 			.setkey = aead_setkey,
2282 			.setauthsize = aead_setauthsize,
2283 			.encrypt = aead_encrypt,
2284 			.decrypt = aead_decrypt,
2285 			.ivsize = AES_BLOCK_SIZE,
2286 			.maxauthsize = SHA224_DIGEST_SIZE,
2287 		},
2288 		.caam = {
2289 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2290 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2291 					   OP_ALG_AAI_HMAC_PRECOMP,
2292 			.geniv = true,
2293 		},
2294 	},
2295 	{
2296 		.aead = {
2297 			.base = {
2298 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2299 				.cra_driver_name = "authenc-hmac-sha256-"
2300 						   "cbc-aes-caam",
2301 				.cra_blocksize = AES_BLOCK_SIZE,
2302 			},
2303 			.setkey = aead_setkey,
2304 			.setauthsize = aead_setauthsize,
2305 			.encrypt = aead_encrypt,
2306 			.decrypt = aead_decrypt,
2307 			.ivsize = AES_BLOCK_SIZE,
2308 			.maxauthsize = SHA256_DIGEST_SIZE,
2309 		},
2310 		.caam = {
2311 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2312 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2313 					   OP_ALG_AAI_HMAC_PRECOMP,
2314 		},
2315 	},
2316 	{
2317 		.aead = {
2318 			.base = {
2319 				.cra_name = "echainiv(authenc(hmac(sha256),"
2320 					    "cbc(aes)))",
2321 				.cra_driver_name = "echainiv-authenc-"
2322 						   "hmac-sha256-cbc-aes-caam",
2323 				.cra_blocksize = AES_BLOCK_SIZE,
2324 			},
2325 			.setkey = aead_setkey,
2326 			.setauthsize = aead_setauthsize,
2327 			.encrypt = aead_encrypt,
2328 			.decrypt = aead_decrypt,
2329 			.ivsize = AES_BLOCK_SIZE,
2330 			.maxauthsize = SHA256_DIGEST_SIZE,
2331 		},
2332 		.caam = {
2333 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2334 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2335 					   OP_ALG_AAI_HMAC_PRECOMP,
2336 			.geniv = true,
2337 		},
2338 	},
2339 	{
2340 		.aead = {
2341 			.base = {
2342 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2343 				.cra_driver_name = "authenc-hmac-sha384-"
2344 						   "cbc-aes-caam",
2345 				.cra_blocksize = AES_BLOCK_SIZE,
2346 			},
2347 			.setkey = aead_setkey,
2348 			.setauthsize = aead_setauthsize,
2349 			.encrypt = aead_encrypt,
2350 			.decrypt = aead_decrypt,
2351 			.ivsize = AES_BLOCK_SIZE,
2352 			.maxauthsize = SHA384_DIGEST_SIZE,
2353 		},
2354 		.caam = {
2355 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2356 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2357 					   OP_ALG_AAI_HMAC_PRECOMP,
2358 		},
2359 	},
2360 	{
2361 		.aead = {
2362 			.base = {
2363 				.cra_name = "echainiv(authenc(hmac(sha384),"
2364 					    "cbc(aes)))",
2365 				.cra_driver_name = "echainiv-authenc-"
2366 						   "hmac-sha384-cbc-aes-caam",
2367 				.cra_blocksize = AES_BLOCK_SIZE,
2368 			},
2369 			.setkey = aead_setkey,
2370 			.setauthsize = aead_setauthsize,
2371 			.encrypt = aead_encrypt,
2372 			.decrypt = aead_decrypt,
2373 			.ivsize = AES_BLOCK_SIZE,
2374 			.maxauthsize = SHA384_DIGEST_SIZE,
2375 		},
2376 		.caam = {
2377 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2378 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2379 					   OP_ALG_AAI_HMAC_PRECOMP,
2380 			.geniv = true,
2381 		},
2382 	},
2383 	{
2384 		.aead = {
2385 			.base = {
2386 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2387 				.cra_driver_name = "authenc-hmac-sha512-"
2388 						   "cbc-aes-caam",
2389 				.cra_blocksize = AES_BLOCK_SIZE,
2390 			},
2391 			.setkey = aead_setkey,
2392 			.setauthsize = aead_setauthsize,
2393 			.encrypt = aead_encrypt,
2394 			.decrypt = aead_decrypt,
2395 			.ivsize = AES_BLOCK_SIZE,
2396 			.maxauthsize = SHA512_DIGEST_SIZE,
2397 		},
2398 		.caam = {
2399 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2400 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2401 					   OP_ALG_AAI_HMAC_PRECOMP,
2402 		},
2403 	},
2404 	{
2405 		.aead = {
2406 			.base = {
2407 				.cra_name = "echainiv(authenc(hmac(sha512),"
2408 					    "cbc(aes)))",
2409 				.cra_driver_name = "echainiv-authenc-"
2410 						   "hmac-sha512-cbc-aes-caam",
2411 				.cra_blocksize = AES_BLOCK_SIZE,
2412 			},
2413 			.setkey = aead_setkey,
2414 			.setauthsize = aead_setauthsize,
2415 			.encrypt = aead_encrypt,
2416 			.decrypt = aead_decrypt,
2417 			.ivsize = AES_BLOCK_SIZE,
2418 			.maxauthsize = SHA512_DIGEST_SIZE,
2419 		},
2420 		.caam = {
2421 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2422 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2423 					   OP_ALG_AAI_HMAC_PRECOMP,
2424 			.geniv = true,
2425 		},
2426 	},
2427 	{
2428 		.aead = {
2429 			.base = {
2430 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2431 				.cra_driver_name = "authenc-hmac-md5-"
2432 						   "cbc-des3_ede-caam",
2433 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2434 			},
2435 			.setkey = aead_setkey,
2436 			.setauthsize = aead_setauthsize,
2437 			.encrypt = aead_encrypt,
2438 			.decrypt = aead_decrypt,
2439 			.ivsize = DES3_EDE_BLOCK_SIZE,
2440 			.maxauthsize = MD5_DIGEST_SIZE,
2441 		},
2442 		.caam = {
2443 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2444 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2445 					   OP_ALG_AAI_HMAC_PRECOMP,
2446 		}
2447 	},
2448 	{
2449 		.aead = {
2450 			.base = {
2451 				.cra_name = "echainiv(authenc(hmac(md5),"
2452 					    "cbc(des3_ede)))",
2453 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2454 						   "cbc-des3_ede-caam",
2455 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2456 			},
2457 			.setkey = aead_setkey,
2458 			.setauthsize = aead_setauthsize,
2459 			.encrypt = aead_encrypt,
2460 			.decrypt = aead_decrypt,
2461 			.ivsize = DES3_EDE_BLOCK_SIZE,
2462 			.maxauthsize = MD5_DIGEST_SIZE,
2463 		},
2464 		.caam = {
2465 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2466 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2467 					   OP_ALG_AAI_HMAC_PRECOMP,
2468 			.geniv = true,
2469 		}
2470 	},
2471 	{
2472 		.aead = {
2473 			.base = {
2474 				.cra_name = "authenc(hmac(sha1),"
2475 					    "cbc(des3_ede))",
2476 				.cra_driver_name = "authenc-hmac-sha1-"
2477 						   "cbc-des3_ede-caam",
2478 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2479 			},
2480 			.setkey = aead_setkey,
2481 			.setauthsize = aead_setauthsize,
2482 			.encrypt = aead_encrypt,
2483 			.decrypt = aead_decrypt,
2484 			.ivsize = DES3_EDE_BLOCK_SIZE,
2485 			.maxauthsize = SHA1_DIGEST_SIZE,
2486 		},
2487 		.caam = {
2488 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2489 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2490 					   OP_ALG_AAI_HMAC_PRECOMP,
2491 		},
2492 	},
2493 	{
2494 		.aead = {
2495 			.base = {
2496 				.cra_name = "echainiv(authenc(hmac(sha1),"
2497 					    "cbc(des3_ede)))",
2498 				.cra_driver_name = "echainiv-authenc-"
2499 						   "hmac-sha1-"
2500 						   "cbc-des3_ede-caam",
2501 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2502 			},
2503 			.setkey = aead_setkey,
2504 			.setauthsize = aead_setauthsize,
2505 			.encrypt = aead_encrypt,
2506 			.decrypt = aead_decrypt,
2507 			.ivsize = DES3_EDE_BLOCK_SIZE,
2508 			.maxauthsize = SHA1_DIGEST_SIZE,
2509 		},
2510 		.caam = {
2511 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2512 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2513 					   OP_ALG_AAI_HMAC_PRECOMP,
2514 			.geniv = true,
2515 		},
2516 	},
2517 	{
2518 		.aead = {
2519 			.base = {
2520 				.cra_name = "authenc(hmac(sha224),"
2521 					    "cbc(des3_ede))",
2522 				.cra_driver_name = "authenc-hmac-sha224-"
2523 						   "cbc-des3_ede-caam",
2524 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2525 			},
2526 			.setkey = aead_setkey,
2527 			.setauthsize = aead_setauthsize,
2528 			.encrypt = aead_encrypt,
2529 			.decrypt = aead_decrypt,
2530 			.ivsize = DES3_EDE_BLOCK_SIZE,
2531 			.maxauthsize = SHA224_DIGEST_SIZE,
2532 		},
2533 		.caam = {
2534 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2535 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2536 					   OP_ALG_AAI_HMAC_PRECOMP,
2537 		},
2538 	},
2539 	{
2540 		.aead = {
2541 			.base = {
2542 				.cra_name = "echainiv(authenc(hmac(sha224),"
2543 					    "cbc(des3_ede)))",
2544 				.cra_driver_name = "echainiv-authenc-"
2545 						   "hmac-sha224-"
2546 						   "cbc-des3_ede-caam",
2547 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2548 			},
2549 			.setkey = aead_setkey,
2550 			.setauthsize = aead_setauthsize,
2551 			.encrypt = aead_encrypt,
2552 			.decrypt = aead_decrypt,
2553 			.ivsize = DES3_EDE_BLOCK_SIZE,
2554 			.maxauthsize = SHA224_DIGEST_SIZE,
2555 		},
2556 		.caam = {
2557 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2558 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2559 					   OP_ALG_AAI_HMAC_PRECOMP,
2560 			.geniv = true,
2561 		},
2562 	},
2563 	{
2564 		.aead = {
2565 			.base = {
2566 				.cra_name = "authenc(hmac(sha256),"
2567 					    "cbc(des3_ede))",
2568 				.cra_driver_name = "authenc-hmac-sha256-"
2569 						   "cbc-des3_ede-caam",
2570 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2571 			},
2572 			.setkey = aead_setkey,
2573 			.setauthsize = aead_setauthsize,
2574 			.encrypt = aead_encrypt,
2575 			.decrypt = aead_decrypt,
2576 			.ivsize = DES3_EDE_BLOCK_SIZE,
2577 			.maxauthsize = SHA256_DIGEST_SIZE,
2578 		},
2579 		.caam = {
2580 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2581 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2582 					   OP_ALG_AAI_HMAC_PRECOMP,
2583 		},
2584 	},
2585 	{
2586 		.aead = {
2587 			.base = {
2588 				.cra_name = "echainiv(authenc(hmac(sha256),"
2589 					    "cbc(des3_ede)))",
2590 				.cra_driver_name = "echainiv-authenc-"
2591 						   "hmac-sha256-"
2592 						   "cbc-des3_ede-caam",
2593 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2594 			},
2595 			.setkey = aead_setkey,
2596 			.setauthsize = aead_setauthsize,
2597 			.encrypt = aead_encrypt,
2598 			.decrypt = aead_decrypt,
2599 			.ivsize = DES3_EDE_BLOCK_SIZE,
2600 			.maxauthsize = SHA256_DIGEST_SIZE,
2601 		},
2602 		.caam = {
2603 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2604 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2605 					   OP_ALG_AAI_HMAC_PRECOMP,
2606 			.geniv = true,
2607 		},
2608 	},
2609 	{
2610 		.aead = {
2611 			.base = {
2612 				.cra_name = "authenc(hmac(sha384),"
2613 					    "cbc(des3_ede))",
2614 				.cra_driver_name = "authenc-hmac-sha384-"
2615 						   "cbc-des3_ede-caam",
2616 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2617 			},
2618 			.setkey = aead_setkey,
2619 			.setauthsize = aead_setauthsize,
2620 			.encrypt = aead_encrypt,
2621 			.decrypt = aead_decrypt,
2622 			.ivsize = DES3_EDE_BLOCK_SIZE,
2623 			.maxauthsize = SHA384_DIGEST_SIZE,
2624 		},
2625 		.caam = {
2626 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2627 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2628 					   OP_ALG_AAI_HMAC_PRECOMP,
2629 		},
2630 	},
2631 	{
2632 		.aead = {
2633 			.base = {
2634 				.cra_name = "echainiv(authenc(hmac(sha384),"
2635 					    "cbc(des3_ede)))",
2636 				.cra_driver_name = "echainiv-authenc-"
2637 						   "hmac-sha384-"
2638 						   "cbc-des3_ede-caam",
2639 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2640 			},
2641 			.setkey = aead_setkey,
2642 			.setauthsize = aead_setauthsize,
2643 			.encrypt = aead_encrypt,
2644 			.decrypt = aead_decrypt,
2645 			.ivsize = DES3_EDE_BLOCK_SIZE,
2646 			.maxauthsize = SHA384_DIGEST_SIZE,
2647 		},
2648 		.caam = {
2649 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2650 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2651 					   OP_ALG_AAI_HMAC_PRECOMP,
2652 			.geniv = true,
2653 		},
2654 	},
2655 	{
2656 		.aead = {
2657 			.base = {
2658 				.cra_name = "authenc(hmac(sha512),"
2659 					    "cbc(des3_ede))",
2660 				.cra_driver_name = "authenc-hmac-sha512-"
2661 						   "cbc-des3_ede-caam",
2662 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2663 			},
2664 			.setkey = aead_setkey,
2665 			.setauthsize = aead_setauthsize,
2666 			.encrypt = aead_encrypt,
2667 			.decrypt = aead_decrypt,
2668 			.ivsize = DES3_EDE_BLOCK_SIZE,
2669 			.maxauthsize = SHA512_DIGEST_SIZE,
2670 		},
2671 		.caam = {
2672 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2673 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2674 					   OP_ALG_AAI_HMAC_PRECOMP,
2675 		},
2676 	},
2677 	{
2678 		.aead = {
2679 			.base = {
2680 				.cra_name = "echainiv(authenc(hmac(sha512),"
2681 					    "cbc(des3_ede)))",
2682 				.cra_driver_name = "echainiv-authenc-"
2683 						   "hmac-sha512-"
2684 						   "cbc-des3_ede-caam",
2685 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2686 			},
2687 			.setkey = aead_setkey,
2688 			.setauthsize = aead_setauthsize,
2689 			.encrypt = aead_encrypt,
2690 			.decrypt = aead_decrypt,
2691 			.ivsize = DES3_EDE_BLOCK_SIZE,
2692 			.maxauthsize = SHA512_DIGEST_SIZE,
2693 		},
2694 		.caam = {
2695 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2696 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2697 					   OP_ALG_AAI_HMAC_PRECOMP,
2698 			.geniv = true,
2699 		},
2700 	},
2701 	{
2702 		.aead = {
2703 			.base = {
2704 				.cra_name = "authenc(hmac(md5),cbc(des))",
2705 				.cra_driver_name = "authenc-hmac-md5-"
2706 						   "cbc-des-caam",
2707 				.cra_blocksize = DES_BLOCK_SIZE,
2708 			},
2709 			.setkey = aead_setkey,
2710 			.setauthsize = aead_setauthsize,
2711 			.encrypt = aead_encrypt,
2712 			.decrypt = aead_decrypt,
2713 			.ivsize = DES_BLOCK_SIZE,
2714 			.maxauthsize = MD5_DIGEST_SIZE,
2715 		},
2716 		.caam = {
2717 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2718 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2719 					   OP_ALG_AAI_HMAC_PRECOMP,
2720 		},
2721 	},
2722 	{
2723 		.aead = {
2724 			.base = {
2725 				.cra_name = "echainiv(authenc(hmac(md5),"
2726 					    "cbc(des)))",
2727 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2728 						   "cbc-des-caam",
2729 				.cra_blocksize = DES_BLOCK_SIZE,
2730 			},
2731 			.setkey = aead_setkey,
2732 			.setauthsize = aead_setauthsize,
2733 			.encrypt = aead_encrypt,
2734 			.decrypt = aead_decrypt,
2735 			.ivsize = DES_BLOCK_SIZE,
2736 			.maxauthsize = MD5_DIGEST_SIZE,
2737 		},
2738 		.caam = {
2739 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2740 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2741 					   OP_ALG_AAI_HMAC_PRECOMP,
2742 			.geniv = true,
2743 		},
2744 	},
2745 	{
2746 		.aead = {
2747 			.base = {
2748 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2749 				.cra_driver_name = "authenc-hmac-sha1-"
2750 						   "cbc-des-caam",
2751 				.cra_blocksize = DES_BLOCK_SIZE,
2752 			},
2753 			.setkey = aead_setkey,
2754 			.setauthsize = aead_setauthsize,
2755 			.encrypt = aead_encrypt,
2756 			.decrypt = aead_decrypt,
2757 			.ivsize = DES_BLOCK_SIZE,
2758 			.maxauthsize = SHA1_DIGEST_SIZE,
2759 		},
2760 		.caam = {
2761 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2762 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2763 					   OP_ALG_AAI_HMAC_PRECOMP,
2764 		},
2765 	},
2766 	{
2767 		.aead = {
2768 			.base = {
2769 				.cra_name = "echainiv(authenc(hmac(sha1),"
2770 					    "cbc(des)))",
2771 				.cra_driver_name = "echainiv-authenc-"
2772 						   "hmac-sha1-cbc-des-caam",
2773 				.cra_blocksize = DES_BLOCK_SIZE,
2774 			},
2775 			.setkey = aead_setkey,
2776 			.setauthsize = aead_setauthsize,
2777 			.encrypt = aead_encrypt,
2778 			.decrypt = aead_decrypt,
2779 			.ivsize = DES_BLOCK_SIZE,
2780 			.maxauthsize = SHA1_DIGEST_SIZE,
2781 		},
2782 		.caam = {
2783 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2784 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2785 					   OP_ALG_AAI_HMAC_PRECOMP,
2786 			.geniv = true,
2787 		},
2788 	},
2789 	{
2790 		.aead = {
2791 			.base = {
2792 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2793 				.cra_driver_name = "authenc-hmac-sha224-"
2794 						   "cbc-des-caam",
2795 				.cra_blocksize = DES_BLOCK_SIZE,
2796 			},
2797 			.setkey = aead_setkey,
2798 			.setauthsize = aead_setauthsize,
2799 			.encrypt = aead_encrypt,
2800 			.decrypt = aead_decrypt,
2801 			.ivsize = DES_BLOCK_SIZE,
2802 			.maxauthsize = SHA224_DIGEST_SIZE,
2803 		},
2804 		.caam = {
2805 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2806 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2807 					   OP_ALG_AAI_HMAC_PRECOMP,
2808 		},
2809 	},
2810 	{
2811 		.aead = {
2812 			.base = {
2813 				.cra_name = "echainiv(authenc(hmac(sha224),"
2814 					    "cbc(des)))",
2815 				.cra_driver_name = "echainiv-authenc-"
2816 						   "hmac-sha224-cbc-des-caam",
2817 				.cra_blocksize = DES_BLOCK_SIZE,
2818 			},
2819 			.setkey = aead_setkey,
2820 			.setauthsize = aead_setauthsize,
2821 			.encrypt = aead_encrypt,
2822 			.decrypt = aead_decrypt,
2823 			.ivsize = DES_BLOCK_SIZE,
2824 			.maxauthsize = SHA224_DIGEST_SIZE,
2825 		},
2826 		.caam = {
2827 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2828 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2829 					   OP_ALG_AAI_HMAC_PRECOMP,
2830 			.geniv = true,
2831 		},
2832 	},
2833 	{
2834 		.aead = {
2835 			.base = {
2836 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2837 				.cra_driver_name = "authenc-hmac-sha256-"
2838 						   "cbc-des-caam",
2839 				.cra_blocksize = DES_BLOCK_SIZE,
2840 			},
2841 			.setkey = aead_setkey,
2842 			.setauthsize = aead_setauthsize,
2843 			.encrypt = aead_encrypt,
2844 			.decrypt = aead_decrypt,
2845 			.ivsize = DES_BLOCK_SIZE,
2846 			.maxauthsize = SHA256_DIGEST_SIZE,
2847 		},
2848 		.caam = {
2849 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2850 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2851 					   OP_ALG_AAI_HMAC_PRECOMP,
2852 		},
2853 	},
2854 	{
2855 		.aead = {
2856 			.base = {
2857 				.cra_name = "echainiv(authenc(hmac(sha256),"
2858 					    "cbc(des)))",
2859 				.cra_driver_name = "echainiv-authenc-"
2860 						   "hmac-sha256-cbc-des-caam",
2861 				.cra_blocksize = DES_BLOCK_SIZE,
2862 			},
2863 			.setkey = aead_setkey,
2864 			.setauthsize = aead_setauthsize,
2865 			.encrypt = aead_encrypt,
2866 			.decrypt = aead_decrypt,
2867 			.ivsize = DES_BLOCK_SIZE,
2868 			.maxauthsize = SHA256_DIGEST_SIZE,
2869 		},
2870 		.caam = {
2871 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2872 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2873 					   OP_ALG_AAI_HMAC_PRECOMP,
2874 			.geniv = true,
2875 		},
2876 	},
2877 	{
2878 		.aead = {
2879 			.base = {
2880 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2881 				.cra_driver_name = "authenc-hmac-sha384-"
2882 						   "cbc-des-caam",
2883 				.cra_blocksize = DES_BLOCK_SIZE,
2884 			},
2885 			.setkey = aead_setkey,
2886 			.setauthsize = aead_setauthsize,
2887 			.encrypt = aead_encrypt,
2888 			.decrypt = aead_decrypt,
2889 			.ivsize = DES_BLOCK_SIZE,
2890 			.maxauthsize = SHA384_DIGEST_SIZE,
2891 		},
2892 		.caam = {
2893 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2894 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2895 					   OP_ALG_AAI_HMAC_PRECOMP,
2896 		},
2897 	},
2898 	{
2899 		.aead = {
2900 			.base = {
2901 				.cra_name = "echainiv(authenc(hmac(sha384),"
2902 					    "cbc(des)))",
2903 				.cra_driver_name = "echainiv-authenc-"
2904 						   "hmac-sha384-cbc-des-caam",
2905 				.cra_blocksize = DES_BLOCK_SIZE,
2906 			},
2907 			.setkey = aead_setkey,
2908 			.setauthsize = aead_setauthsize,
2909 			.encrypt = aead_encrypt,
2910 			.decrypt = aead_decrypt,
2911 			.ivsize = DES_BLOCK_SIZE,
2912 			.maxauthsize = SHA384_DIGEST_SIZE,
2913 		},
2914 		.caam = {
2915 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2916 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2917 					   OP_ALG_AAI_HMAC_PRECOMP,
2918 			.geniv = true,
2919 		},
2920 	},
2921 	{
2922 		.aead = {
2923 			.base = {
2924 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2925 				.cra_driver_name = "authenc-hmac-sha512-"
2926 						   "cbc-des-caam",
2927 				.cra_blocksize = DES_BLOCK_SIZE,
2928 			},
2929 			.setkey = aead_setkey,
2930 			.setauthsize = aead_setauthsize,
2931 			.encrypt = aead_encrypt,
2932 			.decrypt = aead_decrypt,
2933 			.ivsize = DES_BLOCK_SIZE,
2934 			.maxauthsize = SHA512_DIGEST_SIZE,
2935 		},
2936 		.caam = {
2937 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2938 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2939 					   OP_ALG_AAI_HMAC_PRECOMP,
2940 		},
2941 	},
2942 	{
2943 		.aead = {
2944 			.base = {
2945 				.cra_name = "echainiv(authenc(hmac(sha512),"
2946 					    "cbc(des)))",
2947 				.cra_driver_name = "echainiv-authenc-"
2948 						   "hmac-sha512-cbc-des-caam",
2949 				.cra_blocksize = DES_BLOCK_SIZE,
2950 			},
2951 			.setkey = aead_setkey,
2952 			.setauthsize = aead_setauthsize,
2953 			.encrypt = aead_encrypt,
2954 			.decrypt = aead_decrypt,
2955 			.ivsize = DES_BLOCK_SIZE,
2956 			.maxauthsize = SHA512_DIGEST_SIZE,
2957 		},
2958 		.caam = {
2959 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2960 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2961 					   OP_ALG_AAI_HMAC_PRECOMP,
2962 			.geniv = true,
2963 		},
2964 	},
2965 	{
2966 		.aead = {
2967 			.base = {
2968 				.cra_name = "authenc(hmac(md5),"
2969 					    "rfc3686(ctr(aes)))",
2970 				.cra_driver_name = "authenc-hmac-md5-"
2971 						   "rfc3686-ctr-aes-caam",
2972 				.cra_blocksize = 1,
2973 			},
2974 			.setkey = aead_setkey,
2975 			.setauthsize = aead_setauthsize,
2976 			.encrypt = aead_encrypt,
2977 			.decrypt = aead_decrypt,
2978 			.ivsize = CTR_RFC3686_IV_SIZE,
2979 			.maxauthsize = MD5_DIGEST_SIZE,
2980 		},
2981 		.caam = {
2982 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2983 					   OP_ALG_AAI_CTR_MOD128,
2984 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2985 					   OP_ALG_AAI_HMAC_PRECOMP,
2986 			.rfc3686 = true,
2987 		},
2988 	},
2989 	{
2990 		.aead = {
2991 			.base = {
2992 				.cra_name = "seqiv(authenc("
2993 					    "hmac(md5),rfc3686(ctr(aes))))",
2994 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
2995 						   "rfc3686-ctr-aes-caam",
2996 				.cra_blocksize = 1,
2997 			},
2998 			.setkey = aead_setkey,
2999 			.setauthsize = aead_setauthsize,
3000 			.encrypt = aead_encrypt,
3001 			.decrypt = aead_decrypt,
3002 			.ivsize = CTR_RFC3686_IV_SIZE,
3003 			.maxauthsize = MD5_DIGEST_SIZE,
3004 		},
3005 		.caam = {
3006 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3007 					   OP_ALG_AAI_CTR_MOD128,
3008 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3009 					   OP_ALG_AAI_HMAC_PRECOMP,
3010 			.rfc3686 = true,
3011 			.geniv = true,
3012 		},
3013 	},
3014 	{
3015 		.aead = {
3016 			.base = {
3017 				.cra_name = "authenc(hmac(sha1),"
3018 					    "rfc3686(ctr(aes)))",
3019 				.cra_driver_name = "authenc-hmac-sha1-"
3020 						   "rfc3686-ctr-aes-caam",
3021 				.cra_blocksize = 1,
3022 			},
3023 			.setkey = aead_setkey,
3024 			.setauthsize = aead_setauthsize,
3025 			.encrypt = aead_encrypt,
3026 			.decrypt = aead_decrypt,
3027 			.ivsize = CTR_RFC3686_IV_SIZE,
3028 			.maxauthsize = SHA1_DIGEST_SIZE,
3029 		},
3030 		.caam = {
3031 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3032 					   OP_ALG_AAI_CTR_MOD128,
3033 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3034 					   OP_ALG_AAI_HMAC_PRECOMP,
3035 			.rfc3686 = true,
3036 		},
3037 	},
3038 	{
3039 		.aead = {
3040 			.base = {
3041 				.cra_name = "seqiv(authenc("
3042 					    "hmac(sha1),rfc3686(ctr(aes))))",
3043 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3044 						   "rfc3686-ctr-aes-caam",
3045 				.cra_blocksize = 1,
3046 			},
3047 			.setkey = aead_setkey,
3048 			.setauthsize = aead_setauthsize,
3049 			.encrypt = aead_encrypt,
3050 			.decrypt = aead_decrypt,
3051 			.ivsize = CTR_RFC3686_IV_SIZE,
3052 			.maxauthsize = SHA1_DIGEST_SIZE,
3053 		},
3054 		.caam = {
3055 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3056 					   OP_ALG_AAI_CTR_MOD128,
3057 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3058 					   OP_ALG_AAI_HMAC_PRECOMP,
3059 			.rfc3686 = true,
3060 			.geniv = true,
3061 		},
3062 	},
3063 	{
3064 		.aead = {
3065 			.base = {
3066 				.cra_name = "authenc(hmac(sha224),"
3067 					    "rfc3686(ctr(aes)))",
3068 				.cra_driver_name = "authenc-hmac-sha224-"
3069 						   "rfc3686-ctr-aes-caam",
3070 				.cra_blocksize = 1,
3071 			},
3072 			.setkey = aead_setkey,
3073 			.setauthsize = aead_setauthsize,
3074 			.encrypt = aead_encrypt,
3075 			.decrypt = aead_decrypt,
3076 			.ivsize = CTR_RFC3686_IV_SIZE,
3077 			.maxauthsize = SHA224_DIGEST_SIZE,
3078 		},
3079 		.caam = {
3080 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3081 					   OP_ALG_AAI_CTR_MOD128,
3082 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3083 					   OP_ALG_AAI_HMAC_PRECOMP,
3084 			.rfc3686 = true,
3085 		},
3086 	},
3087 	{
3088 		.aead = {
3089 			.base = {
3090 				.cra_name = "seqiv(authenc("
3091 					    "hmac(sha224),rfc3686(ctr(aes))))",
3092 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3093 						   "rfc3686-ctr-aes-caam",
3094 				.cra_blocksize = 1,
3095 			},
3096 			.setkey = aead_setkey,
3097 			.setauthsize = aead_setauthsize,
3098 			.encrypt = aead_encrypt,
3099 			.decrypt = aead_decrypt,
3100 			.ivsize = CTR_RFC3686_IV_SIZE,
3101 			.maxauthsize = SHA224_DIGEST_SIZE,
3102 		},
3103 		.caam = {
3104 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3105 					   OP_ALG_AAI_CTR_MOD128,
3106 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3107 					   OP_ALG_AAI_HMAC_PRECOMP,
3108 			.rfc3686 = true,
3109 			.geniv = true,
3110 		},
3111 	},
3112 	{
3113 		.aead = {
3114 			.base = {
3115 				.cra_name = "authenc(hmac(sha256),"
3116 					    "rfc3686(ctr(aes)))",
3117 				.cra_driver_name = "authenc-hmac-sha256-"
3118 						   "rfc3686-ctr-aes-caam",
3119 				.cra_blocksize = 1,
3120 			},
3121 			.setkey = aead_setkey,
3122 			.setauthsize = aead_setauthsize,
3123 			.encrypt = aead_encrypt,
3124 			.decrypt = aead_decrypt,
3125 			.ivsize = CTR_RFC3686_IV_SIZE,
3126 			.maxauthsize = SHA256_DIGEST_SIZE,
3127 		},
3128 		.caam = {
3129 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3130 					   OP_ALG_AAI_CTR_MOD128,
3131 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3132 					   OP_ALG_AAI_HMAC_PRECOMP,
3133 			.rfc3686 = true,
3134 		},
3135 	},
3136 	{
3137 		.aead = {
3138 			.base = {
3139 				.cra_name = "seqiv(authenc(hmac(sha256),"
3140 					    "rfc3686(ctr(aes))))",
3141 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3142 						   "rfc3686-ctr-aes-caam",
3143 				.cra_blocksize = 1,
3144 			},
3145 			.setkey = aead_setkey,
3146 			.setauthsize = aead_setauthsize,
3147 			.encrypt = aead_encrypt,
3148 			.decrypt = aead_decrypt,
3149 			.ivsize = CTR_RFC3686_IV_SIZE,
3150 			.maxauthsize = SHA256_DIGEST_SIZE,
3151 		},
3152 		.caam = {
3153 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3154 					   OP_ALG_AAI_CTR_MOD128,
3155 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3156 					   OP_ALG_AAI_HMAC_PRECOMP,
3157 			.rfc3686 = true,
3158 			.geniv = true,
3159 		},
3160 	},
3161 	{
3162 		.aead = {
3163 			.base = {
3164 				.cra_name = "authenc(hmac(sha384),"
3165 					    "rfc3686(ctr(aes)))",
3166 				.cra_driver_name = "authenc-hmac-sha384-"
3167 						   "rfc3686-ctr-aes-caam",
3168 				.cra_blocksize = 1,
3169 			},
3170 			.setkey = aead_setkey,
3171 			.setauthsize = aead_setauthsize,
3172 			.encrypt = aead_encrypt,
3173 			.decrypt = aead_decrypt,
3174 			.ivsize = CTR_RFC3686_IV_SIZE,
3175 			.maxauthsize = SHA384_DIGEST_SIZE,
3176 		},
3177 		.caam = {
3178 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3179 					   OP_ALG_AAI_CTR_MOD128,
3180 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3181 					   OP_ALG_AAI_HMAC_PRECOMP,
3182 			.rfc3686 = true,
3183 		},
3184 	},
3185 	{
3186 		.aead = {
3187 			.base = {
3188 				.cra_name = "seqiv(authenc(hmac(sha384),"
3189 					    "rfc3686(ctr(aes))))",
3190 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3191 						   "rfc3686-ctr-aes-caam",
3192 				.cra_blocksize = 1,
3193 			},
3194 			.setkey = aead_setkey,
3195 			.setauthsize = aead_setauthsize,
3196 			.encrypt = aead_encrypt,
3197 			.decrypt = aead_decrypt,
3198 			.ivsize = CTR_RFC3686_IV_SIZE,
3199 			.maxauthsize = SHA384_DIGEST_SIZE,
3200 		},
3201 		.caam = {
3202 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3203 					   OP_ALG_AAI_CTR_MOD128,
3204 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3205 					   OP_ALG_AAI_HMAC_PRECOMP,
3206 			.rfc3686 = true,
3207 			.geniv = true,
3208 		},
3209 	},
3210 	{
3211 		.aead = {
3212 			.base = {
3213 				.cra_name = "authenc(hmac(sha512),"
3214 					    "rfc3686(ctr(aes)))",
3215 				.cra_driver_name = "authenc-hmac-sha512-"
3216 						   "rfc3686-ctr-aes-caam",
3217 				.cra_blocksize = 1,
3218 			},
3219 			.setkey = aead_setkey,
3220 			.setauthsize = aead_setauthsize,
3221 			.encrypt = aead_encrypt,
3222 			.decrypt = aead_decrypt,
3223 			.ivsize = CTR_RFC3686_IV_SIZE,
3224 			.maxauthsize = SHA512_DIGEST_SIZE,
3225 		},
3226 		.caam = {
3227 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3228 					   OP_ALG_AAI_CTR_MOD128,
3229 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3230 					   OP_ALG_AAI_HMAC_PRECOMP,
3231 			.rfc3686 = true,
3232 		},
3233 	},
3234 	{
3235 		.aead = {
3236 			.base = {
3237 				.cra_name = "seqiv(authenc(hmac(sha512),"
3238 					    "rfc3686(ctr(aes))))",
3239 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3240 						   "rfc3686-ctr-aes-caam",
3241 				.cra_blocksize = 1,
3242 			},
3243 			.setkey = aead_setkey,
3244 			.setauthsize = aead_setauthsize,
3245 			.encrypt = aead_encrypt,
3246 			.decrypt = aead_decrypt,
3247 			.ivsize = CTR_RFC3686_IV_SIZE,
3248 			.maxauthsize = SHA512_DIGEST_SIZE,
3249 		},
3250 		.caam = {
3251 			.class1_alg_type = OP_ALG_ALGSEL_AES |
3252 					   OP_ALG_AAI_CTR_MOD128,
3253 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3254 					   OP_ALG_AAI_HMAC_PRECOMP,
3255 			.rfc3686 = true,
3256 			.geniv = true,
3257 		},
3258 	},
3259 };
3260 
3261 struct caam_crypto_alg {
3262 	struct crypto_alg crypto_alg;
3263 	struct list_head entry;
3264 	struct caam_alg_entry caam;
3265 };
3266 
3267 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3268 {
3269 	dma_addr_t dma_addr;
3270 
3271 	ctx->jrdev = caam_jr_alloc();
3272 	if (IS_ERR(ctx->jrdev)) {
3273 		pr_err("Job Ring Device allocation for transform failed\n");
3274 		return PTR_ERR(ctx->jrdev);
3275 	}
3276 
3277 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3278 					offsetof(struct caam_ctx,
3279 						 sh_desc_enc_dma),
3280 					DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3281 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3282 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3283 		caam_jr_free(ctx->jrdev);
3284 		return -ENOMEM;
3285 	}
3286 
3287 	ctx->sh_desc_enc_dma = dma_addr;
3288 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3289 						   sh_desc_dec);
3290 	ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3291 						      sh_desc_givenc);
3292 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3293 
3294 	/* copy descriptor header template value */
3295 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3296 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3297 
3298 	return 0;
3299 }
3300 
3301 static int caam_cra_init(struct crypto_tfm *tfm)
3302 {
3303 	struct crypto_alg *alg = tfm->__crt_alg;
3304 	struct caam_crypto_alg *caam_alg =
3305 		 container_of(alg, struct caam_crypto_alg, crypto_alg);
3306 	struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3307 
3308 	return caam_init_common(ctx, &caam_alg->caam);
3309 }
3310 
3311 static int caam_aead_init(struct crypto_aead *tfm)
3312 {
3313 	struct aead_alg *alg = crypto_aead_alg(tfm);
3314 	struct caam_aead_alg *caam_alg =
3315 		 container_of(alg, struct caam_aead_alg, aead);
3316 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3317 
3318 	return caam_init_common(ctx, &caam_alg->caam);
3319 }
3320 
3321 static void caam_exit_common(struct caam_ctx *ctx)
3322 {
3323 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3324 			       offsetof(struct caam_ctx, sh_desc_enc_dma),
3325 			       DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3326 	caam_jr_free(ctx->jrdev);
3327 }
3328 
3329 static void caam_cra_exit(struct crypto_tfm *tfm)
3330 {
3331 	caam_exit_common(crypto_tfm_ctx(tfm));
3332 }
3333 
3334 static void caam_aead_exit(struct crypto_aead *tfm)
3335 {
3336 	caam_exit_common(crypto_aead_ctx(tfm));
3337 }
3338 
3339 static void __exit caam_algapi_exit(void)
3340 {
3341 
3342 	struct caam_crypto_alg *t_alg, *n;
3343 	int i;
3344 
3345 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3346 		struct caam_aead_alg *t_alg = driver_aeads + i;
3347 
3348 		if (t_alg->registered)
3349 			crypto_unregister_aead(&t_alg->aead);
3350 	}
3351 
3352 	if (!alg_list.next)
3353 		return;
3354 
3355 	list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3356 		crypto_unregister_alg(&t_alg->crypto_alg);
3357 		list_del(&t_alg->entry);
3358 		kfree(t_alg);
3359 	}
3360 }
3361 
3362 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3363 					      *template)
3364 {
3365 	struct caam_crypto_alg *t_alg;
3366 	struct crypto_alg *alg;
3367 
3368 	t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3369 	if (!t_alg) {
3370 		pr_err("failed to allocate t_alg\n");
3371 		return ERR_PTR(-ENOMEM);
3372 	}
3373 
3374 	alg = &t_alg->crypto_alg;
3375 
3376 	snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3377 	snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3378 		 template->driver_name);
3379 	alg->cra_module = THIS_MODULE;
3380 	alg->cra_init = caam_cra_init;
3381 	alg->cra_exit = caam_cra_exit;
3382 	alg->cra_priority = CAAM_CRA_PRIORITY;
3383 	alg->cra_blocksize = template->blocksize;
3384 	alg->cra_alignmask = 0;
3385 	alg->cra_ctxsize = sizeof(struct caam_ctx);
3386 	alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3387 			 template->type;
3388 	switch (template->type) {
3389 	case CRYPTO_ALG_TYPE_GIVCIPHER:
3390 		alg->cra_type = &crypto_givcipher_type;
3391 		alg->cra_ablkcipher = template->template_ablkcipher;
3392 		break;
3393 	case CRYPTO_ALG_TYPE_ABLKCIPHER:
3394 		alg->cra_type = &crypto_ablkcipher_type;
3395 		alg->cra_ablkcipher = template->template_ablkcipher;
3396 		break;
3397 	}
3398 
3399 	t_alg->caam.class1_alg_type = template->class1_alg_type;
3400 	t_alg->caam.class2_alg_type = template->class2_alg_type;
3401 
3402 	return t_alg;
3403 }
3404 
3405 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3406 {
3407 	struct aead_alg *alg = &t_alg->aead;
3408 
3409 	alg->base.cra_module = THIS_MODULE;
3410 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3411 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3412 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3413 
3414 	alg->init = caam_aead_init;
3415 	alg->exit = caam_aead_exit;
3416 }
3417 
3418 static int __init caam_algapi_init(void)
3419 {
3420 	struct device_node *dev_node;
3421 	struct platform_device *pdev;
3422 	struct device *ctrldev;
3423 	struct caam_drv_private *priv;
3424 	int i = 0, err = 0;
3425 	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3426 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3427 	bool registered = false;
3428 
3429 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3430 	if (!dev_node) {
3431 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3432 		if (!dev_node)
3433 			return -ENODEV;
3434 	}
3435 
3436 	pdev = of_find_device_by_node(dev_node);
3437 	if (!pdev) {
3438 		of_node_put(dev_node);
3439 		return -ENODEV;
3440 	}
3441 
3442 	ctrldev = &pdev->dev;
3443 	priv = dev_get_drvdata(ctrldev);
3444 	of_node_put(dev_node);
3445 
3446 	/*
3447 	 * If priv is NULL, it's probably because the caam driver wasn't
3448 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3449 	 */
3450 	if (!priv)
3451 		return -ENODEV;
3452 
3453 
3454 	INIT_LIST_HEAD(&alg_list);
3455 
3456 	/*
3457 	 * Register crypto algorithms the device supports.
3458 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3459 	 */
3460 	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3461 	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3462 	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3463 	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3464 	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3465 
3466 	/* If MD is present, limit digest size based on LP256 */
3467 	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3468 		md_limit = SHA256_DIGEST_SIZE;
3469 
3470 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3471 		struct caam_crypto_alg *t_alg;
3472 		struct caam_alg_template *alg = driver_algs + i;
3473 		u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3474 
3475 		/* Skip DES algorithms if not supported by device */
3476 		if (!des_inst &&
3477 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3478 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3479 				continue;
3480 
3481 		/* Skip AES algorithms if not supported by device */
3482 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3483 				continue;
3484 
3485 		/*
3486 		 * Check support for AES modes not available
3487 		 * on LP devices.
3488 		 */
3489 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3490 			if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3491 			     OP_ALG_AAI_XTS)
3492 				continue;
3493 
3494 		t_alg = caam_alg_alloc(alg);
3495 		if (IS_ERR(t_alg)) {
3496 			err = PTR_ERR(t_alg);
3497 			pr_warn("%s alg allocation failed\n", alg->driver_name);
3498 			continue;
3499 		}
3500 
3501 		err = crypto_register_alg(&t_alg->crypto_alg);
3502 		if (err) {
3503 			pr_warn("%s alg registration failed\n",
3504 				t_alg->crypto_alg.cra_driver_name);
3505 			kfree(t_alg);
3506 			continue;
3507 		}
3508 
3509 		list_add_tail(&t_alg->entry, &alg_list);
3510 		registered = true;
3511 	}
3512 
3513 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3514 		struct caam_aead_alg *t_alg = driver_aeads + i;
3515 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3516 				 OP_ALG_ALGSEL_MASK;
3517 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3518 				 OP_ALG_ALGSEL_MASK;
3519 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3520 
3521 		/* Skip DES algorithms if not supported by device */
3522 		if (!des_inst &&
3523 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3524 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3525 				continue;
3526 
3527 		/* Skip AES algorithms if not supported by device */
3528 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3529 				continue;
3530 
3531 		/*
3532 		 * Check support for AES algorithms not available
3533 		 * on LP devices.
3534 		 */
3535 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3536 			if (alg_aai == OP_ALG_AAI_GCM)
3537 				continue;
3538 
3539 		/*
3540 		 * Skip algorithms requiring message digests
3541 		 * if MD or MD size is not supported by device.
3542 		 */
3543 		if (c2_alg_sel &&
3544 		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3545 				continue;
3546 
3547 		caam_aead_alg_init(t_alg);
3548 
3549 		err = crypto_register_aead(&t_alg->aead);
3550 		if (err) {
3551 			pr_warn("%s alg registration failed\n",
3552 				t_alg->aead.base.cra_driver_name);
3553 			continue;
3554 		}
3555 
3556 		t_alg->registered = true;
3557 		registered = true;
3558 	}
3559 
3560 	if (registered)
3561 		pr_info("caam algorithms registered in /proc/crypto\n");
3562 
3563 	return err;
3564 }
3565 
3566 module_init(caam_algapi_init);
3567 module_exit(caam_algapi_exit);
3568 
3569 MODULE_LICENSE("GPL");
3570 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3571 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
3572