xref: /openbmc/linux/drivers/crypto/caam/caamalg.c (revision 6d99a79c)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * caam - Freescale FSL CAAM support for crypto API
4  *
5  * Copyright 2008-2011 Freescale Semiconductor, Inc.
6  * Copyright 2016-2018 NXP
7  *
8  * Based on talitos crypto API driver.
9  *
10  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11  *
12  * ---------------                     ---------------
13  * | JobDesc #1  |-------------------->|  ShareDesc  |
14  * | *(packet 1) |                     |   (PDB)     |
15  * ---------------      |------------->|  (hashKey)  |
16  *       .              |              | (cipherKey) |
17  *       .              |    |-------->| (operation) |
18  * ---------------      |    |         ---------------
19  * | JobDesc #2  |------|    |
20  * | *(packet 2) |           |
21  * ---------------           |
22  *       .                   |
23  *       .                   |
24  * ---------------           |
25  * | JobDesc #3  |------------
26  * | *(packet 3) |
27  * ---------------
28  *
29  * The SharedDesc never changes for a connection unless rekeyed, but
30  * each packet will likely be in a different place. So all we need
31  * to know to process the packet is where the input is, where the
32  * output goes, and what context we want to process with. Context is
33  * in the SharedDesc, packet references in the JobDesc.
34  *
35  * So, a job desc looks like:
36  *
37  * ---------------------
38  * | Header            |
39  * | ShareDesc Pointer |
40  * | SEQ_OUT_PTR       |
41  * | (output buffer)   |
42  * | (output length)   |
43  * | SEQ_IN_PTR        |
44  * | (input buffer)    |
45  * | (input length)    |
46  * ---------------------
47  */
48 
49 #include "compat.h"
50 
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 
60 /*
61  * crypto alg
62  */
63 #define CAAM_CRA_PRIORITY		3000
64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
65 #define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
66 					 CTR_RFC3686_NONCE_SIZE + \
67 					 SHA512_DIGEST_SIZE * 2)
68 
69 #define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
70 #define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
71 					 CAAM_CMD_SZ * 4)
72 #define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
73 					 CAAM_CMD_SZ * 5)
74 
75 #define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
76 #define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
77 
78 #ifdef DEBUG
79 /* for print_hex_dumps with line references */
80 #define debug(format, arg...) printk(format, arg)
81 #else
82 #define debug(format, arg...)
83 #endif
84 
85 struct caam_alg_entry {
86 	int class1_alg_type;
87 	int class2_alg_type;
88 	bool rfc3686;
89 	bool geniv;
90 };
91 
92 struct caam_aead_alg {
93 	struct aead_alg aead;
94 	struct caam_alg_entry caam;
95 	bool registered;
96 };
97 
98 struct caam_skcipher_alg {
99 	struct skcipher_alg skcipher;
100 	struct caam_alg_entry caam;
101 	bool registered;
102 };
103 
104 /*
105  * per-session context
106  */
107 struct caam_ctx {
108 	u32 sh_desc_enc[DESC_MAX_USED_LEN];
109 	u32 sh_desc_dec[DESC_MAX_USED_LEN];
110 	u8 key[CAAM_MAX_KEY_SIZE];
111 	dma_addr_t sh_desc_enc_dma;
112 	dma_addr_t sh_desc_dec_dma;
113 	dma_addr_t key_dma;
114 	enum dma_data_direction dir;
115 	struct device *jrdev;
116 	struct alginfo adata;
117 	struct alginfo cdata;
118 	unsigned int authsize;
119 };
120 
121 static int aead_null_set_sh_desc(struct crypto_aead *aead)
122 {
123 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
124 	struct device *jrdev = ctx->jrdev;
125 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
126 	u32 *desc;
127 	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
128 			ctx->adata.keylen_pad;
129 
130 	/*
131 	 * Job Descriptor and Shared Descriptors
132 	 * must all fit into the 64-word Descriptor h/w Buffer
133 	 */
134 	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
135 		ctx->adata.key_inline = true;
136 		ctx->adata.key_virt = ctx->key;
137 	} else {
138 		ctx->adata.key_inline = false;
139 		ctx->adata.key_dma = ctx->key_dma;
140 	}
141 
142 	/* aead_encrypt shared descriptor */
143 	desc = ctx->sh_desc_enc;
144 	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
145 				    ctrlpriv->era);
146 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
147 				   desc_bytes(desc), ctx->dir);
148 
149 	/*
150 	 * Job Descriptor and Shared Descriptors
151 	 * must all fit into the 64-word Descriptor h/w Buffer
152 	 */
153 	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
154 		ctx->adata.key_inline = true;
155 		ctx->adata.key_virt = ctx->key;
156 	} else {
157 		ctx->adata.key_inline = false;
158 		ctx->adata.key_dma = ctx->key_dma;
159 	}
160 
161 	/* aead_decrypt shared descriptor */
162 	desc = ctx->sh_desc_dec;
163 	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
164 				    ctrlpriv->era);
165 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
166 				   desc_bytes(desc), ctx->dir);
167 
168 	return 0;
169 }
170 
171 static int aead_set_sh_desc(struct crypto_aead *aead)
172 {
173 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
174 						 struct caam_aead_alg, aead);
175 	unsigned int ivsize = crypto_aead_ivsize(aead);
176 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
177 	struct device *jrdev = ctx->jrdev;
178 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
179 	u32 ctx1_iv_off = 0;
180 	u32 *desc, *nonce = NULL;
181 	u32 inl_mask;
182 	unsigned int data_len[2];
183 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
184 			       OP_ALG_AAI_CTR_MOD128);
185 	const bool is_rfc3686 = alg->caam.rfc3686;
186 
187 	if (!ctx->authsize)
188 		return 0;
189 
190 	/* NULL encryption / decryption */
191 	if (!ctx->cdata.keylen)
192 		return aead_null_set_sh_desc(aead);
193 
194 	/*
195 	 * AES-CTR needs to load IV in CONTEXT1 reg
196 	 * at an offset of 128bits (16bytes)
197 	 * CONTEXT1[255:128] = IV
198 	 */
199 	if (ctr_mode)
200 		ctx1_iv_off = 16;
201 
202 	/*
203 	 * RFC3686 specific:
204 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
205 	 */
206 	if (is_rfc3686) {
207 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
208 		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
209 				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
210 	}
211 
212 	data_len[0] = ctx->adata.keylen_pad;
213 	data_len[1] = ctx->cdata.keylen;
214 
215 	if (alg->caam.geniv)
216 		goto skip_enc;
217 
218 	/*
219 	 * Job Descriptor and Shared Descriptors
220 	 * must all fit into the 64-word Descriptor h/w Buffer
221 	 */
222 	if (desc_inline_query(DESC_AEAD_ENC_LEN +
223 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
224 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
225 			      ARRAY_SIZE(data_len)) < 0)
226 		return -EINVAL;
227 
228 	if (inl_mask & 1)
229 		ctx->adata.key_virt = ctx->key;
230 	else
231 		ctx->adata.key_dma = ctx->key_dma;
232 
233 	if (inl_mask & 2)
234 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
235 	else
236 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
237 
238 	ctx->adata.key_inline = !!(inl_mask & 1);
239 	ctx->cdata.key_inline = !!(inl_mask & 2);
240 
241 	/* aead_encrypt shared descriptor */
242 	desc = ctx->sh_desc_enc;
243 	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
244 			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
245 			       false, ctrlpriv->era);
246 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
247 				   desc_bytes(desc), ctx->dir);
248 
249 skip_enc:
250 	/*
251 	 * Job Descriptor and Shared Descriptors
252 	 * must all fit into the 64-word Descriptor h/w Buffer
253 	 */
254 	if (desc_inline_query(DESC_AEAD_DEC_LEN +
255 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
256 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
257 			      ARRAY_SIZE(data_len)) < 0)
258 		return -EINVAL;
259 
260 	if (inl_mask & 1)
261 		ctx->adata.key_virt = ctx->key;
262 	else
263 		ctx->adata.key_dma = ctx->key_dma;
264 
265 	if (inl_mask & 2)
266 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
267 	else
268 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
269 
270 	ctx->adata.key_inline = !!(inl_mask & 1);
271 	ctx->cdata.key_inline = !!(inl_mask & 2);
272 
273 	/* aead_decrypt shared descriptor */
274 	desc = ctx->sh_desc_dec;
275 	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
276 			       ctx->authsize, alg->caam.geniv, is_rfc3686,
277 			       nonce, ctx1_iv_off, false, ctrlpriv->era);
278 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
279 				   desc_bytes(desc), ctx->dir);
280 
281 	if (!alg->caam.geniv)
282 		goto skip_givenc;
283 
284 	/*
285 	 * Job Descriptor and Shared Descriptors
286 	 * must all fit into the 64-word Descriptor h/w Buffer
287 	 */
288 	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
289 			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
290 			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
291 			      ARRAY_SIZE(data_len)) < 0)
292 		return -EINVAL;
293 
294 	if (inl_mask & 1)
295 		ctx->adata.key_virt = ctx->key;
296 	else
297 		ctx->adata.key_dma = ctx->key_dma;
298 
299 	if (inl_mask & 2)
300 		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
301 	else
302 		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
303 
304 	ctx->adata.key_inline = !!(inl_mask & 1);
305 	ctx->cdata.key_inline = !!(inl_mask & 2);
306 
307 	/* aead_givencrypt shared descriptor */
308 	desc = ctx->sh_desc_enc;
309 	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
310 				  ctx->authsize, is_rfc3686, nonce,
311 				  ctx1_iv_off, false, ctrlpriv->era);
312 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
313 				   desc_bytes(desc), ctx->dir);
314 
315 skip_givenc:
316 	return 0;
317 }
318 
319 static int aead_setauthsize(struct crypto_aead *authenc,
320 				    unsigned int authsize)
321 {
322 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
323 
324 	ctx->authsize = authsize;
325 	aead_set_sh_desc(authenc);
326 
327 	return 0;
328 }
329 
330 static int gcm_set_sh_desc(struct crypto_aead *aead)
331 {
332 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
333 	struct device *jrdev = ctx->jrdev;
334 	unsigned int ivsize = crypto_aead_ivsize(aead);
335 	u32 *desc;
336 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
337 			ctx->cdata.keylen;
338 
339 	if (!ctx->cdata.keylen || !ctx->authsize)
340 		return 0;
341 
342 	/*
343 	 * AES GCM encrypt shared descriptor
344 	 * Job Descriptor and Shared Descriptor
345 	 * must fit into the 64-word Descriptor h/w Buffer
346 	 */
347 	if (rem_bytes >= DESC_GCM_ENC_LEN) {
348 		ctx->cdata.key_inline = true;
349 		ctx->cdata.key_virt = ctx->key;
350 	} else {
351 		ctx->cdata.key_inline = false;
352 		ctx->cdata.key_dma = ctx->key_dma;
353 	}
354 
355 	desc = ctx->sh_desc_enc;
356 	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
357 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
358 				   desc_bytes(desc), ctx->dir);
359 
360 	/*
361 	 * Job Descriptor and Shared Descriptors
362 	 * must all fit into the 64-word Descriptor h/w Buffer
363 	 */
364 	if (rem_bytes >= DESC_GCM_DEC_LEN) {
365 		ctx->cdata.key_inline = true;
366 		ctx->cdata.key_virt = ctx->key;
367 	} else {
368 		ctx->cdata.key_inline = false;
369 		ctx->cdata.key_dma = ctx->key_dma;
370 	}
371 
372 	desc = ctx->sh_desc_dec;
373 	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
374 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
375 				   desc_bytes(desc), ctx->dir);
376 
377 	return 0;
378 }
379 
380 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
381 {
382 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
383 
384 	ctx->authsize = authsize;
385 	gcm_set_sh_desc(authenc);
386 
387 	return 0;
388 }
389 
390 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
391 {
392 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
393 	struct device *jrdev = ctx->jrdev;
394 	unsigned int ivsize = crypto_aead_ivsize(aead);
395 	u32 *desc;
396 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
397 			ctx->cdata.keylen;
398 
399 	if (!ctx->cdata.keylen || !ctx->authsize)
400 		return 0;
401 
402 	/*
403 	 * RFC4106 encrypt shared descriptor
404 	 * Job Descriptor and Shared Descriptor
405 	 * must fit into the 64-word Descriptor h/w Buffer
406 	 */
407 	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
408 		ctx->cdata.key_inline = true;
409 		ctx->cdata.key_virt = ctx->key;
410 	} else {
411 		ctx->cdata.key_inline = false;
412 		ctx->cdata.key_dma = ctx->key_dma;
413 	}
414 
415 	desc = ctx->sh_desc_enc;
416 	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
417 				  false);
418 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
419 				   desc_bytes(desc), ctx->dir);
420 
421 	/*
422 	 * Job Descriptor and Shared Descriptors
423 	 * must all fit into the 64-word Descriptor h/w Buffer
424 	 */
425 	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
426 		ctx->cdata.key_inline = true;
427 		ctx->cdata.key_virt = ctx->key;
428 	} else {
429 		ctx->cdata.key_inline = false;
430 		ctx->cdata.key_dma = ctx->key_dma;
431 	}
432 
433 	desc = ctx->sh_desc_dec;
434 	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
435 				  false);
436 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
437 				   desc_bytes(desc), ctx->dir);
438 
439 	return 0;
440 }
441 
442 static int rfc4106_setauthsize(struct crypto_aead *authenc,
443 			       unsigned int authsize)
444 {
445 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
446 
447 	ctx->authsize = authsize;
448 	rfc4106_set_sh_desc(authenc);
449 
450 	return 0;
451 }
452 
453 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
454 {
455 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
456 	struct device *jrdev = ctx->jrdev;
457 	unsigned int ivsize = crypto_aead_ivsize(aead);
458 	u32 *desc;
459 	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
460 			ctx->cdata.keylen;
461 
462 	if (!ctx->cdata.keylen || !ctx->authsize)
463 		return 0;
464 
465 	/*
466 	 * RFC4543 encrypt shared descriptor
467 	 * Job Descriptor and Shared Descriptor
468 	 * must fit into the 64-word Descriptor h/w Buffer
469 	 */
470 	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
471 		ctx->cdata.key_inline = true;
472 		ctx->cdata.key_virt = ctx->key;
473 	} else {
474 		ctx->cdata.key_inline = false;
475 		ctx->cdata.key_dma = ctx->key_dma;
476 	}
477 
478 	desc = ctx->sh_desc_enc;
479 	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
480 				  false);
481 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
482 				   desc_bytes(desc), ctx->dir);
483 
484 	/*
485 	 * Job Descriptor and Shared Descriptors
486 	 * must all fit into the 64-word Descriptor h/w Buffer
487 	 */
488 	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
489 		ctx->cdata.key_inline = true;
490 		ctx->cdata.key_virt = ctx->key;
491 	} else {
492 		ctx->cdata.key_inline = false;
493 		ctx->cdata.key_dma = ctx->key_dma;
494 	}
495 
496 	desc = ctx->sh_desc_dec;
497 	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
498 				  false);
499 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
500 				   desc_bytes(desc), ctx->dir);
501 
502 	return 0;
503 }
504 
505 static int rfc4543_setauthsize(struct crypto_aead *authenc,
506 			       unsigned int authsize)
507 {
508 	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
509 
510 	ctx->authsize = authsize;
511 	rfc4543_set_sh_desc(authenc);
512 
513 	return 0;
514 }
515 
516 static int aead_setkey(struct crypto_aead *aead,
517 			       const u8 *key, unsigned int keylen)
518 {
519 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
520 	struct device *jrdev = ctx->jrdev;
521 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
522 	struct crypto_authenc_keys keys;
523 	int ret = 0;
524 
525 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
526 		goto badkey;
527 
528 #ifdef DEBUG
529 	printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
530 	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
531 	       keys.authkeylen);
532 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
533 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
534 #endif
535 
536 	/*
537 	 * If DKP is supported, use it in the shared descriptor to generate
538 	 * the split key.
539 	 */
540 	if (ctrlpriv->era >= 6) {
541 		ctx->adata.keylen = keys.authkeylen;
542 		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
543 						      OP_ALG_ALGSEL_MASK);
544 
545 		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
546 			goto badkey;
547 
548 		memcpy(ctx->key, keys.authkey, keys.authkeylen);
549 		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
550 		       keys.enckeylen);
551 		dma_sync_single_for_device(jrdev, ctx->key_dma,
552 					   ctx->adata.keylen_pad +
553 					   keys.enckeylen, ctx->dir);
554 		goto skip_split_key;
555 	}
556 
557 	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
558 			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
559 			    keys.enckeylen);
560 	if (ret) {
561 		goto badkey;
562 	}
563 
564 	/* postpend encryption key to auth split key */
565 	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
566 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
567 				   keys.enckeylen, ctx->dir);
568 #ifdef DEBUG
569 	print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
570 		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
571 		       ctx->adata.keylen_pad + keys.enckeylen, 1);
572 #endif
573 
574 skip_split_key:
575 	ctx->cdata.keylen = keys.enckeylen;
576 	memzero_explicit(&keys, sizeof(keys));
577 	return aead_set_sh_desc(aead);
578 badkey:
579 	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
580 	memzero_explicit(&keys, sizeof(keys));
581 	return -EINVAL;
582 }
583 
584 static int gcm_setkey(struct crypto_aead *aead,
585 		      const u8 *key, unsigned int keylen)
586 {
587 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
588 	struct device *jrdev = ctx->jrdev;
589 
590 #ifdef DEBUG
591 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
592 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
593 #endif
594 
595 	memcpy(ctx->key, key, keylen);
596 	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
597 	ctx->cdata.keylen = keylen;
598 
599 	return gcm_set_sh_desc(aead);
600 }
601 
602 static int rfc4106_setkey(struct crypto_aead *aead,
603 			  const u8 *key, unsigned int keylen)
604 {
605 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
606 	struct device *jrdev = ctx->jrdev;
607 
608 	if (keylen < 4)
609 		return -EINVAL;
610 
611 #ifdef DEBUG
612 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
613 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
614 #endif
615 
616 	memcpy(ctx->key, key, keylen);
617 
618 	/*
619 	 * The last four bytes of the key material are used as the salt value
620 	 * in the nonce. Update the AES key length.
621 	 */
622 	ctx->cdata.keylen = keylen - 4;
623 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
624 				   ctx->dir);
625 	return rfc4106_set_sh_desc(aead);
626 }
627 
628 static int rfc4543_setkey(struct crypto_aead *aead,
629 			  const u8 *key, unsigned int keylen)
630 {
631 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
632 	struct device *jrdev = ctx->jrdev;
633 
634 	if (keylen < 4)
635 		return -EINVAL;
636 
637 #ifdef DEBUG
638 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
639 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
640 #endif
641 
642 	memcpy(ctx->key, key, keylen);
643 
644 	/*
645 	 * The last four bytes of the key material are used as the salt value
646 	 * in the nonce. Update the AES key length.
647 	 */
648 	ctx->cdata.keylen = keylen - 4;
649 	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
650 				   ctx->dir);
651 	return rfc4543_set_sh_desc(aead);
652 }
653 
654 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
655 			   unsigned int keylen)
656 {
657 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
658 	struct caam_skcipher_alg *alg =
659 		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
660 			     skcipher);
661 	struct device *jrdev = ctx->jrdev;
662 	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
663 	u32 *desc;
664 	u32 ctx1_iv_off = 0;
665 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
666 			       OP_ALG_AAI_CTR_MOD128);
667 	const bool is_rfc3686 = alg->caam.rfc3686;
668 
669 #ifdef DEBUG
670 	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
671 		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
672 #endif
673 	/*
674 	 * AES-CTR needs to load IV in CONTEXT1 reg
675 	 * at an offset of 128bits (16bytes)
676 	 * CONTEXT1[255:128] = IV
677 	 */
678 	if (ctr_mode)
679 		ctx1_iv_off = 16;
680 
681 	/*
682 	 * RFC3686 specific:
683 	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
684 	 *	| *key = {KEY, NONCE}
685 	 */
686 	if (is_rfc3686) {
687 		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
688 		keylen -= CTR_RFC3686_NONCE_SIZE;
689 	}
690 
691 	ctx->cdata.keylen = keylen;
692 	ctx->cdata.key_virt = key;
693 	ctx->cdata.key_inline = true;
694 
695 	/* skcipher_encrypt shared descriptor */
696 	desc = ctx->sh_desc_enc;
697 	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
698 				   ctx1_iv_off);
699 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
700 				   desc_bytes(desc), ctx->dir);
701 
702 	/* skcipher_decrypt shared descriptor */
703 	desc = ctx->sh_desc_dec;
704 	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
705 				   ctx1_iv_off);
706 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
707 				   desc_bytes(desc), ctx->dir);
708 
709 	return 0;
710 }
711 
712 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
713 			       unsigned int keylen)
714 {
715 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
716 	struct device *jrdev = ctx->jrdev;
717 	u32 *desc;
718 
719 	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
720 		crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
721 		dev_err(jrdev, "key size mismatch\n");
722 		return -EINVAL;
723 	}
724 
725 	ctx->cdata.keylen = keylen;
726 	ctx->cdata.key_virt = key;
727 	ctx->cdata.key_inline = true;
728 
729 	/* xts_skcipher_encrypt shared descriptor */
730 	desc = ctx->sh_desc_enc;
731 	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
732 	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
733 				   desc_bytes(desc), ctx->dir);
734 
735 	/* xts_skcipher_decrypt shared descriptor */
736 	desc = ctx->sh_desc_dec;
737 	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
738 	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
739 				   desc_bytes(desc), ctx->dir);
740 
741 	return 0;
742 }
743 
744 /*
745  * aead_edesc - s/w-extended aead descriptor
746  * @src_nents: number of segments in input s/w scatterlist
747  * @dst_nents: number of segments in output s/w scatterlist
748  * @sec4_sg_bytes: length of dma mapped sec4_sg space
749  * @sec4_sg_dma: bus physical mapped address of h/w link table
750  * @sec4_sg: pointer to h/w link table
751  * @hw_desc: the h/w job descriptor followed by any referenced link tables
752  */
753 struct aead_edesc {
754 	int src_nents;
755 	int dst_nents;
756 	int sec4_sg_bytes;
757 	dma_addr_t sec4_sg_dma;
758 	struct sec4_sg_entry *sec4_sg;
759 	u32 hw_desc[];
760 };
761 
762 /*
763  * skcipher_edesc - s/w-extended skcipher descriptor
764  * @src_nents: number of segments in input s/w scatterlist
765  * @dst_nents: number of segments in output s/w scatterlist
766  * @iv_dma: dma address of iv for checking continuity and link table
767  * @sec4_sg_bytes: length of dma mapped sec4_sg space
768  * @sec4_sg_dma: bus physical mapped address of h/w link table
769  * @sec4_sg: pointer to h/w link table
770  * @hw_desc: the h/w job descriptor followed by any referenced link tables
771  *	     and IV
772  */
773 struct skcipher_edesc {
774 	int src_nents;
775 	int dst_nents;
776 	dma_addr_t iv_dma;
777 	int sec4_sg_bytes;
778 	dma_addr_t sec4_sg_dma;
779 	struct sec4_sg_entry *sec4_sg;
780 	u32 hw_desc[0];
781 };
782 
783 static void caam_unmap(struct device *dev, struct scatterlist *src,
784 		       struct scatterlist *dst, int src_nents,
785 		       int dst_nents,
786 		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
787 		       int sec4_sg_bytes)
788 {
789 	if (dst != src) {
790 		if (src_nents)
791 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
792 		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
793 	} else {
794 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
795 	}
796 
797 	if (iv_dma)
798 		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
799 	if (sec4_sg_bytes)
800 		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
801 				 DMA_TO_DEVICE);
802 }
803 
804 static void aead_unmap(struct device *dev,
805 		       struct aead_edesc *edesc,
806 		       struct aead_request *req)
807 {
808 	caam_unmap(dev, req->src, req->dst,
809 		   edesc->src_nents, edesc->dst_nents, 0, 0,
810 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
811 }
812 
813 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
814 			   struct skcipher_request *req)
815 {
816 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
817 	int ivsize = crypto_skcipher_ivsize(skcipher);
818 
819 	caam_unmap(dev, req->src, req->dst,
820 		   edesc->src_nents, edesc->dst_nents,
821 		   edesc->iv_dma, ivsize,
822 		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
823 }
824 
825 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
826 				   void *context)
827 {
828 	struct aead_request *req = context;
829 	struct aead_edesc *edesc;
830 
831 #ifdef DEBUG
832 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
833 #endif
834 
835 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
836 
837 	if (err)
838 		caam_jr_strstatus(jrdev, err);
839 
840 	aead_unmap(jrdev, edesc, req);
841 
842 	kfree(edesc);
843 
844 	aead_request_complete(req, err);
845 }
846 
847 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
848 				   void *context)
849 {
850 	struct aead_request *req = context;
851 	struct aead_edesc *edesc;
852 
853 #ifdef DEBUG
854 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
855 #endif
856 
857 	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
858 
859 	if (err)
860 		caam_jr_strstatus(jrdev, err);
861 
862 	aead_unmap(jrdev, edesc, req);
863 
864 	/*
865 	 * verify hw auth check passed else return -EBADMSG
866 	 */
867 	if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
868 		err = -EBADMSG;
869 
870 	kfree(edesc);
871 
872 	aead_request_complete(req, err);
873 }
874 
875 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
876 				  void *context)
877 {
878 	struct skcipher_request *req = context;
879 	struct skcipher_edesc *edesc;
880 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
881 	int ivsize = crypto_skcipher_ivsize(skcipher);
882 
883 #ifdef DEBUG
884 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
885 #endif
886 
887 	edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
888 
889 	if (err)
890 		caam_jr_strstatus(jrdev, err);
891 
892 #ifdef DEBUG
893 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
894 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
895 		       edesc->src_nents > 1 ? 100 : ivsize, 1);
896 #endif
897 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
898 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
899 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
900 
901 	skcipher_unmap(jrdev, edesc, req);
902 
903 	/*
904 	 * The crypto API expects us to set the IV (req->iv) to the last
905 	 * ciphertext block. This is used e.g. by the CTS mode.
906 	 */
907 	scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - ivsize,
908 				 ivsize, 0);
909 
910 	kfree(edesc);
911 
912 	skcipher_request_complete(req, err);
913 }
914 
915 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
916 				  void *context)
917 {
918 	struct skcipher_request *req = context;
919 	struct skcipher_edesc *edesc;
920 #ifdef DEBUG
921 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
922 	int ivsize = crypto_skcipher_ivsize(skcipher);
923 
924 	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
925 #endif
926 
927 	edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]);
928 	if (err)
929 		caam_jr_strstatus(jrdev, err);
930 
931 #ifdef DEBUG
932 	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
933 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
934 #endif
935 	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
936 		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
937 		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
938 
939 	skcipher_unmap(jrdev, edesc, req);
940 	kfree(edesc);
941 
942 	skcipher_request_complete(req, err);
943 }
944 
945 /*
946  * Fill in aead job descriptor
947  */
948 static void init_aead_job(struct aead_request *req,
949 			  struct aead_edesc *edesc,
950 			  bool all_contig, bool encrypt)
951 {
952 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
953 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
954 	int authsize = ctx->authsize;
955 	u32 *desc = edesc->hw_desc;
956 	u32 out_options, in_options;
957 	dma_addr_t dst_dma, src_dma;
958 	int len, sec4_sg_index = 0;
959 	dma_addr_t ptr;
960 	u32 *sh_desc;
961 
962 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
963 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
964 
965 	len = desc_len(sh_desc);
966 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
967 
968 	if (all_contig) {
969 		src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
970 		in_options = 0;
971 	} else {
972 		src_dma = edesc->sec4_sg_dma;
973 		sec4_sg_index += edesc->src_nents;
974 		in_options = LDST_SGF;
975 	}
976 
977 	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
978 			  in_options);
979 
980 	dst_dma = src_dma;
981 	out_options = in_options;
982 
983 	if (unlikely(req->src != req->dst)) {
984 		if (edesc->dst_nents == 1) {
985 			dst_dma = sg_dma_address(req->dst);
986 		} else {
987 			dst_dma = edesc->sec4_sg_dma +
988 				  sec4_sg_index *
989 				  sizeof(struct sec4_sg_entry);
990 			out_options = LDST_SGF;
991 		}
992 	}
993 
994 	if (encrypt)
995 		append_seq_out_ptr(desc, dst_dma,
996 				   req->assoclen + req->cryptlen + authsize,
997 				   out_options);
998 	else
999 		append_seq_out_ptr(desc, dst_dma,
1000 				   req->assoclen + req->cryptlen - authsize,
1001 				   out_options);
1002 }
1003 
1004 static void init_gcm_job(struct aead_request *req,
1005 			 struct aead_edesc *edesc,
1006 			 bool all_contig, bool encrypt)
1007 {
1008 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1009 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1010 	unsigned int ivsize = crypto_aead_ivsize(aead);
1011 	u32 *desc = edesc->hw_desc;
1012 	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1013 	unsigned int last;
1014 
1015 	init_aead_job(req, edesc, all_contig, encrypt);
1016 	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1017 
1018 	/* BUG This should not be specific to generic GCM. */
1019 	last = 0;
1020 	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1021 		last = FIFOLD_TYPE_LAST1;
1022 
1023 	/* Read GCM IV */
1024 	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1025 			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1026 	/* Append Salt */
1027 	if (!generic_gcm)
1028 		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1029 	/* Append IV */
1030 	append_data(desc, req->iv, ivsize);
1031 	/* End of blank commands */
1032 }
1033 
1034 static void init_authenc_job(struct aead_request *req,
1035 			     struct aead_edesc *edesc,
1036 			     bool all_contig, bool encrypt)
1037 {
1038 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1039 	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1040 						 struct caam_aead_alg, aead);
1041 	unsigned int ivsize = crypto_aead_ivsize(aead);
1042 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1043 	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1044 	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1045 			       OP_ALG_AAI_CTR_MOD128);
1046 	const bool is_rfc3686 = alg->caam.rfc3686;
1047 	u32 *desc = edesc->hw_desc;
1048 	u32 ivoffset = 0;
1049 
1050 	/*
1051 	 * AES-CTR needs to load IV in CONTEXT1 reg
1052 	 * at an offset of 128bits (16bytes)
1053 	 * CONTEXT1[255:128] = IV
1054 	 */
1055 	if (ctr_mode)
1056 		ivoffset = 16;
1057 
1058 	/*
1059 	 * RFC3686 specific:
1060 	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1061 	 */
1062 	if (is_rfc3686)
1063 		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1064 
1065 	init_aead_job(req, edesc, all_contig, encrypt);
1066 
1067 	/*
1068 	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1069 	 * having DPOVRD as destination.
1070 	 */
1071 	if (ctrlpriv->era < 3)
1072 		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1073 	else
1074 		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1075 
1076 	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1077 		append_load_as_imm(desc, req->iv, ivsize,
1078 				   LDST_CLASS_1_CCB |
1079 				   LDST_SRCDST_BYTE_CONTEXT |
1080 				   (ivoffset << LDST_OFFSET_SHIFT));
1081 }
1082 
1083 /*
1084  * Fill in skcipher job descriptor
1085  */
1086 static void init_skcipher_job(struct skcipher_request *req,
1087 			      struct skcipher_edesc *edesc,
1088 			      const bool encrypt)
1089 {
1090 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1091 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1092 	int ivsize = crypto_skcipher_ivsize(skcipher);
1093 	u32 *desc = edesc->hw_desc;
1094 	u32 *sh_desc;
1095 	u32 out_options = 0;
1096 	dma_addr_t dst_dma, ptr;
1097 	int len;
1098 
1099 #ifdef DEBUG
1100 	print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1101 		       DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1102 	pr_err("asked=%d, cryptlen%d\n",
1103 	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1104 #endif
1105 	caam_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__)": ",
1106 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1107 		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1108 
1109 	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1110 	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1111 
1112 	len = desc_len(sh_desc);
1113 	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1114 
1115 	append_seq_in_ptr(desc, edesc->sec4_sg_dma, req->cryptlen + ivsize,
1116 			  LDST_SGF);
1117 
1118 	if (likely(req->src == req->dst)) {
1119 		dst_dma = edesc->sec4_sg_dma + sizeof(struct sec4_sg_entry);
1120 		out_options = LDST_SGF;
1121 	} else {
1122 		if (edesc->dst_nents == 1) {
1123 			dst_dma = sg_dma_address(req->dst);
1124 		} else {
1125 			dst_dma = edesc->sec4_sg_dma + (edesc->src_nents + 1) *
1126 				  sizeof(struct sec4_sg_entry);
1127 			out_options = LDST_SGF;
1128 		}
1129 	}
1130 	append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options);
1131 }
1132 
1133 /*
1134  * allocate and map the aead extended descriptor
1135  */
1136 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1137 					   int desc_bytes, bool *all_contig_ptr,
1138 					   bool encrypt)
1139 {
1140 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1141 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1142 	struct device *jrdev = ctx->jrdev;
1143 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1144 		       GFP_KERNEL : GFP_ATOMIC;
1145 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1146 	struct aead_edesc *edesc;
1147 	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1148 	unsigned int authsize = ctx->authsize;
1149 
1150 	if (unlikely(req->dst != req->src)) {
1151 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1152 					     req->cryptlen);
1153 		if (unlikely(src_nents < 0)) {
1154 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1155 				req->assoclen + req->cryptlen);
1156 			return ERR_PTR(src_nents);
1157 		}
1158 
1159 		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1160 					     req->cryptlen +
1161 						(encrypt ? authsize :
1162 							   (-authsize)));
1163 		if (unlikely(dst_nents < 0)) {
1164 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1165 				req->assoclen + req->cryptlen +
1166 				(encrypt ? authsize : (-authsize)));
1167 			return ERR_PTR(dst_nents);
1168 		}
1169 	} else {
1170 		src_nents = sg_nents_for_len(req->src, req->assoclen +
1171 					     req->cryptlen +
1172 					     (encrypt ? authsize : 0));
1173 		if (unlikely(src_nents < 0)) {
1174 			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1175 				req->assoclen + req->cryptlen +
1176 				(encrypt ? authsize : 0));
1177 			return ERR_PTR(src_nents);
1178 		}
1179 	}
1180 
1181 	if (likely(req->src == req->dst)) {
1182 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1183 					      DMA_BIDIRECTIONAL);
1184 		if (unlikely(!mapped_src_nents)) {
1185 			dev_err(jrdev, "unable to map source\n");
1186 			return ERR_PTR(-ENOMEM);
1187 		}
1188 	} else {
1189 		/* Cover also the case of null (zero length) input data */
1190 		if (src_nents) {
1191 			mapped_src_nents = dma_map_sg(jrdev, req->src,
1192 						      src_nents, DMA_TO_DEVICE);
1193 			if (unlikely(!mapped_src_nents)) {
1194 				dev_err(jrdev, "unable to map source\n");
1195 				return ERR_PTR(-ENOMEM);
1196 			}
1197 		} else {
1198 			mapped_src_nents = 0;
1199 		}
1200 
1201 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1202 					      DMA_FROM_DEVICE);
1203 		if (unlikely(!mapped_dst_nents)) {
1204 			dev_err(jrdev, "unable to map destination\n");
1205 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1206 			return ERR_PTR(-ENOMEM);
1207 		}
1208 	}
1209 
1210 	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1211 	sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1212 	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1213 
1214 	/* allocate space for base edesc and hw desc commands, link tables */
1215 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1216 			GFP_DMA | flags);
1217 	if (!edesc) {
1218 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1219 			   0, 0, 0);
1220 		return ERR_PTR(-ENOMEM);
1221 	}
1222 
1223 	edesc->src_nents = src_nents;
1224 	edesc->dst_nents = dst_nents;
1225 	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1226 			 desc_bytes;
1227 	*all_contig_ptr = !(mapped_src_nents > 1);
1228 
1229 	sec4_sg_index = 0;
1230 	if (mapped_src_nents > 1) {
1231 		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1232 				   edesc->sec4_sg + sec4_sg_index, 0);
1233 		sec4_sg_index += mapped_src_nents;
1234 	}
1235 	if (mapped_dst_nents > 1) {
1236 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1237 				   edesc->sec4_sg + sec4_sg_index, 0);
1238 	}
1239 
1240 	if (!sec4_sg_bytes)
1241 		return edesc;
1242 
1243 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1244 					    sec4_sg_bytes, DMA_TO_DEVICE);
1245 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1246 		dev_err(jrdev, "unable to map S/G table\n");
1247 		aead_unmap(jrdev, edesc, req);
1248 		kfree(edesc);
1249 		return ERR_PTR(-ENOMEM);
1250 	}
1251 
1252 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1253 
1254 	return edesc;
1255 }
1256 
1257 static int gcm_encrypt(struct aead_request *req)
1258 {
1259 	struct aead_edesc *edesc;
1260 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1261 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1262 	struct device *jrdev = ctx->jrdev;
1263 	bool all_contig;
1264 	u32 *desc;
1265 	int ret = 0;
1266 
1267 	/* allocate extended descriptor */
1268 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1269 	if (IS_ERR(edesc))
1270 		return PTR_ERR(edesc);
1271 
1272 	/* Create and submit job descriptor */
1273 	init_gcm_job(req, edesc, all_contig, true);
1274 #ifdef DEBUG
1275 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1276 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1277 		       desc_bytes(edesc->hw_desc), 1);
1278 #endif
1279 
1280 	desc = edesc->hw_desc;
1281 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1282 	if (!ret) {
1283 		ret = -EINPROGRESS;
1284 	} else {
1285 		aead_unmap(jrdev, edesc, req);
1286 		kfree(edesc);
1287 	}
1288 
1289 	return ret;
1290 }
1291 
1292 static int ipsec_gcm_encrypt(struct aead_request *req)
1293 {
1294 	if (req->assoclen < 8)
1295 		return -EINVAL;
1296 
1297 	return gcm_encrypt(req);
1298 }
1299 
1300 static int aead_encrypt(struct aead_request *req)
1301 {
1302 	struct aead_edesc *edesc;
1303 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1304 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1305 	struct device *jrdev = ctx->jrdev;
1306 	bool all_contig;
1307 	u32 *desc;
1308 	int ret = 0;
1309 
1310 	/* allocate extended descriptor */
1311 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1312 				 &all_contig, true);
1313 	if (IS_ERR(edesc))
1314 		return PTR_ERR(edesc);
1315 
1316 	/* Create and submit job descriptor */
1317 	init_authenc_job(req, edesc, all_contig, true);
1318 #ifdef DEBUG
1319 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1320 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1321 		       desc_bytes(edesc->hw_desc), 1);
1322 #endif
1323 
1324 	desc = edesc->hw_desc;
1325 	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1326 	if (!ret) {
1327 		ret = -EINPROGRESS;
1328 	} else {
1329 		aead_unmap(jrdev, edesc, req);
1330 		kfree(edesc);
1331 	}
1332 
1333 	return ret;
1334 }
1335 
1336 static int gcm_decrypt(struct aead_request *req)
1337 {
1338 	struct aead_edesc *edesc;
1339 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1340 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1341 	struct device *jrdev = ctx->jrdev;
1342 	bool all_contig;
1343 	u32 *desc;
1344 	int ret = 0;
1345 
1346 	/* allocate extended descriptor */
1347 	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1348 	if (IS_ERR(edesc))
1349 		return PTR_ERR(edesc);
1350 
1351 	/* Create and submit job descriptor*/
1352 	init_gcm_job(req, edesc, all_contig, false);
1353 #ifdef DEBUG
1354 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1355 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1356 		       desc_bytes(edesc->hw_desc), 1);
1357 #endif
1358 
1359 	desc = edesc->hw_desc;
1360 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1361 	if (!ret) {
1362 		ret = -EINPROGRESS;
1363 	} else {
1364 		aead_unmap(jrdev, edesc, req);
1365 		kfree(edesc);
1366 	}
1367 
1368 	return ret;
1369 }
1370 
1371 static int ipsec_gcm_decrypt(struct aead_request *req)
1372 {
1373 	if (req->assoclen < 8)
1374 		return -EINVAL;
1375 
1376 	return gcm_decrypt(req);
1377 }
1378 
1379 static int aead_decrypt(struct aead_request *req)
1380 {
1381 	struct aead_edesc *edesc;
1382 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1383 	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1384 	struct device *jrdev = ctx->jrdev;
1385 	bool all_contig;
1386 	u32 *desc;
1387 	int ret = 0;
1388 
1389 	caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1390 		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1391 		     req->assoclen + req->cryptlen, 1);
1392 
1393 	/* allocate extended descriptor */
1394 	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1395 				 &all_contig, false);
1396 	if (IS_ERR(edesc))
1397 		return PTR_ERR(edesc);
1398 
1399 	/* Create and submit job descriptor*/
1400 	init_authenc_job(req, edesc, all_contig, false);
1401 #ifdef DEBUG
1402 	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1403 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1404 		       desc_bytes(edesc->hw_desc), 1);
1405 #endif
1406 
1407 	desc = edesc->hw_desc;
1408 	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1409 	if (!ret) {
1410 		ret = -EINPROGRESS;
1411 	} else {
1412 		aead_unmap(jrdev, edesc, req);
1413 		kfree(edesc);
1414 	}
1415 
1416 	return ret;
1417 }
1418 
1419 /*
1420  * allocate and map the skcipher extended descriptor for skcipher
1421  */
1422 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1423 						   int desc_bytes)
1424 {
1425 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1426 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1427 	struct device *jrdev = ctx->jrdev;
1428 	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1429 		       GFP_KERNEL : GFP_ATOMIC;
1430 	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1431 	struct skcipher_edesc *edesc;
1432 	dma_addr_t iv_dma;
1433 	u8 *iv;
1434 	int ivsize = crypto_skcipher_ivsize(skcipher);
1435 	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1436 
1437 	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1438 	if (unlikely(src_nents < 0)) {
1439 		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1440 			req->cryptlen);
1441 		return ERR_PTR(src_nents);
1442 	}
1443 
1444 	if (req->dst != req->src) {
1445 		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1446 		if (unlikely(dst_nents < 0)) {
1447 			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1448 				req->cryptlen);
1449 			return ERR_PTR(dst_nents);
1450 		}
1451 	}
1452 
1453 	if (likely(req->src == req->dst)) {
1454 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1455 					      DMA_BIDIRECTIONAL);
1456 		if (unlikely(!mapped_src_nents)) {
1457 			dev_err(jrdev, "unable to map source\n");
1458 			return ERR_PTR(-ENOMEM);
1459 		}
1460 	} else {
1461 		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1462 					      DMA_TO_DEVICE);
1463 		if (unlikely(!mapped_src_nents)) {
1464 			dev_err(jrdev, "unable to map source\n");
1465 			return ERR_PTR(-ENOMEM);
1466 		}
1467 
1468 		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1469 					      DMA_FROM_DEVICE);
1470 		if (unlikely(!mapped_dst_nents)) {
1471 			dev_err(jrdev, "unable to map destination\n");
1472 			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1473 			return ERR_PTR(-ENOMEM);
1474 		}
1475 	}
1476 
1477 	sec4_sg_ents = 1 + mapped_src_nents;
1478 	dst_sg_idx = sec4_sg_ents;
1479 	sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1480 	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1481 
1482 	/*
1483 	 * allocate space for base edesc and hw desc commands, link tables, IV
1484 	 */
1485 	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1486 			GFP_DMA | flags);
1487 	if (!edesc) {
1488 		dev_err(jrdev, "could not allocate extended descriptor\n");
1489 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1490 			   0, 0, 0);
1491 		return ERR_PTR(-ENOMEM);
1492 	}
1493 
1494 	edesc->src_nents = src_nents;
1495 	edesc->dst_nents = dst_nents;
1496 	edesc->sec4_sg_bytes = sec4_sg_bytes;
1497 	edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1498 						  desc_bytes);
1499 
1500 	/* Make sure IV is located in a DMAable area */
1501 	iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes;
1502 	memcpy(iv, req->iv, ivsize);
1503 
1504 	iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE);
1505 	if (dma_mapping_error(jrdev, iv_dma)) {
1506 		dev_err(jrdev, "unable to map IV\n");
1507 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1508 			   0, 0, 0);
1509 		kfree(edesc);
1510 		return ERR_PTR(-ENOMEM);
1511 	}
1512 
1513 	dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1514 	sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1, 0);
1515 
1516 	if (mapped_dst_nents > 1) {
1517 		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1518 				   edesc->sec4_sg + dst_sg_idx, 0);
1519 	}
1520 
1521 	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1522 					    sec4_sg_bytes, DMA_TO_DEVICE);
1523 	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1524 		dev_err(jrdev, "unable to map S/G table\n");
1525 		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1526 			   iv_dma, ivsize, 0, 0);
1527 		kfree(edesc);
1528 		return ERR_PTR(-ENOMEM);
1529 	}
1530 
1531 	edesc->iv_dma = iv_dma;
1532 
1533 #ifdef DEBUG
1534 	print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ",
1535 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1536 		       sec4_sg_bytes, 1);
1537 #endif
1538 
1539 	return edesc;
1540 }
1541 
1542 static int skcipher_encrypt(struct skcipher_request *req)
1543 {
1544 	struct skcipher_edesc *edesc;
1545 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1546 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1547 	struct device *jrdev = ctx->jrdev;
1548 	u32 *desc;
1549 	int ret = 0;
1550 
1551 	/* allocate extended descriptor */
1552 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1553 	if (IS_ERR(edesc))
1554 		return PTR_ERR(edesc);
1555 
1556 	/* Create and submit job descriptor*/
1557 	init_skcipher_job(req, edesc, true);
1558 #ifdef DEBUG
1559 	print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1560 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1561 		       desc_bytes(edesc->hw_desc), 1);
1562 #endif
1563 	desc = edesc->hw_desc;
1564 	ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req);
1565 
1566 	if (!ret) {
1567 		ret = -EINPROGRESS;
1568 	} else {
1569 		skcipher_unmap(jrdev, edesc, req);
1570 		kfree(edesc);
1571 	}
1572 
1573 	return ret;
1574 }
1575 
1576 static int skcipher_decrypt(struct skcipher_request *req)
1577 {
1578 	struct skcipher_edesc *edesc;
1579 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1580 	struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1581 	int ivsize = crypto_skcipher_ivsize(skcipher);
1582 	struct device *jrdev = ctx->jrdev;
1583 	u32 *desc;
1584 	int ret = 0;
1585 
1586 	/* allocate extended descriptor */
1587 	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1588 	if (IS_ERR(edesc))
1589 		return PTR_ERR(edesc);
1590 
1591 	/*
1592 	 * The crypto API expects us to set the IV (req->iv) to the last
1593 	 * ciphertext block.
1594 	 */
1595 	scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - ivsize,
1596 				 ivsize, 0);
1597 
1598 	/* Create and submit job descriptor*/
1599 	init_skcipher_job(req, edesc, false);
1600 	desc = edesc->hw_desc;
1601 #ifdef DEBUG
1602 	print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ",
1603 		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1604 		       desc_bytes(edesc->hw_desc), 1);
1605 #endif
1606 
1607 	ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req);
1608 	if (!ret) {
1609 		ret = -EINPROGRESS;
1610 	} else {
1611 		skcipher_unmap(jrdev, edesc, req);
1612 		kfree(edesc);
1613 	}
1614 
1615 	return ret;
1616 }
1617 
1618 static struct caam_skcipher_alg driver_algs[] = {
1619 	{
1620 		.skcipher = {
1621 			.base = {
1622 				.cra_name = "cbc(aes)",
1623 				.cra_driver_name = "cbc-aes-caam",
1624 				.cra_blocksize = AES_BLOCK_SIZE,
1625 			},
1626 			.setkey = skcipher_setkey,
1627 			.encrypt = skcipher_encrypt,
1628 			.decrypt = skcipher_decrypt,
1629 			.min_keysize = AES_MIN_KEY_SIZE,
1630 			.max_keysize = AES_MAX_KEY_SIZE,
1631 			.ivsize = AES_BLOCK_SIZE,
1632 		},
1633 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1634 	},
1635 	{
1636 		.skcipher = {
1637 			.base = {
1638 				.cra_name = "cbc(des3_ede)",
1639 				.cra_driver_name = "cbc-3des-caam",
1640 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1641 			},
1642 			.setkey = skcipher_setkey,
1643 			.encrypt = skcipher_encrypt,
1644 			.decrypt = skcipher_decrypt,
1645 			.min_keysize = DES3_EDE_KEY_SIZE,
1646 			.max_keysize = DES3_EDE_KEY_SIZE,
1647 			.ivsize = DES3_EDE_BLOCK_SIZE,
1648 		},
1649 		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1650 	},
1651 	{
1652 		.skcipher = {
1653 			.base = {
1654 				.cra_name = "cbc(des)",
1655 				.cra_driver_name = "cbc-des-caam",
1656 				.cra_blocksize = DES_BLOCK_SIZE,
1657 			},
1658 			.setkey = skcipher_setkey,
1659 			.encrypt = skcipher_encrypt,
1660 			.decrypt = skcipher_decrypt,
1661 			.min_keysize = DES_KEY_SIZE,
1662 			.max_keysize = DES_KEY_SIZE,
1663 			.ivsize = DES_BLOCK_SIZE,
1664 		},
1665 		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1666 	},
1667 	{
1668 		.skcipher = {
1669 			.base = {
1670 				.cra_name = "ctr(aes)",
1671 				.cra_driver_name = "ctr-aes-caam",
1672 				.cra_blocksize = 1,
1673 			},
1674 			.setkey = skcipher_setkey,
1675 			.encrypt = skcipher_encrypt,
1676 			.decrypt = skcipher_decrypt,
1677 			.min_keysize = AES_MIN_KEY_SIZE,
1678 			.max_keysize = AES_MAX_KEY_SIZE,
1679 			.ivsize = AES_BLOCK_SIZE,
1680 			.chunksize = AES_BLOCK_SIZE,
1681 		},
1682 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1683 					OP_ALG_AAI_CTR_MOD128,
1684 	},
1685 	{
1686 		.skcipher = {
1687 			.base = {
1688 				.cra_name = "rfc3686(ctr(aes))",
1689 				.cra_driver_name = "rfc3686-ctr-aes-caam",
1690 				.cra_blocksize = 1,
1691 			},
1692 			.setkey = skcipher_setkey,
1693 			.encrypt = skcipher_encrypt,
1694 			.decrypt = skcipher_decrypt,
1695 			.min_keysize = AES_MIN_KEY_SIZE +
1696 				       CTR_RFC3686_NONCE_SIZE,
1697 			.max_keysize = AES_MAX_KEY_SIZE +
1698 				       CTR_RFC3686_NONCE_SIZE,
1699 			.ivsize = CTR_RFC3686_IV_SIZE,
1700 			.chunksize = AES_BLOCK_SIZE,
1701 		},
1702 		.caam = {
1703 			.class1_alg_type = OP_ALG_ALGSEL_AES |
1704 					   OP_ALG_AAI_CTR_MOD128,
1705 			.rfc3686 = true,
1706 		},
1707 	},
1708 	{
1709 		.skcipher = {
1710 			.base = {
1711 				.cra_name = "xts(aes)",
1712 				.cra_driver_name = "xts-aes-caam",
1713 				.cra_blocksize = AES_BLOCK_SIZE,
1714 			},
1715 			.setkey = xts_skcipher_setkey,
1716 			.encrypt = skcipher_encrypt,
1717 			.decrypt = skcipher_decrypt,
1718 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1719 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1720 			.ivsize = AES_BLOCK_SIZE,
1721 		},
1722 		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1723 	},
1724 };
1725 
1726 static struct caam_aead_alg driver_aeads[] = {
1727 	{
1728 		.aead = {
1729 			.base = {
1730 				.cra_name = "rfc4106(gcm(aes))",
1731 				.cra_driver_name = "rfc4106-gcm-aes-caam",
1732 				.cra_blocksize = 1,
1733 			},
1734 			.setkey = rfc4106_setkey,
1735 			.setauthsize = rfc4106_setauthsize,
1736 			.encrypt = ipsec_gcm_encrypt,
1737 			.decrypt = ipsec_gcm_decrypt,
1738 			.ivsize = GCM_RFC4106_IV_SIZE,
1739 			.maxauthsize = AES_BLOCK_SIZE,
1740 		},
1741 		.caam = {
1742 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1743 		},
1744 	},
1745 	{
1746 		.aead = {
1747 			.base = {
1748 				.cra_name = "rfc4543(gcm(aes))",
1749 				.cra_driver_name = "rfc4543-gcm-aes-caam",
1750 				.cra_blocksize = 1,
1751 			},
1752 			.setkey = rfc4543_setkey,
1753 			.setauthsize = rfc4543_setauthsize,
1754 			.encrypt = ipsec_gcm_encrypt,
1755 			.decrypt = ipsec_gcm_decrypt,
1756 			.ivsize = GCM_RFC4543_IV_SIZE,
1757 			.maxauthsize = AES_BLOCK_SIZE,
1758 		},
1759 		.caam = {
1760 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1761 		},
1762 	},
1763 	/* Galois Counter Mode */
1764 	{
1765 		.aead = {
1766 			.base = {
1767 				.cra_name = "gcm(aes)",
1768 				.cra_driver_name = "gcm-aes-caam",
1769 				.cra_blocksize = 1,
1770 			},
1771 			.setkey = gcm_setkey,
1772 			.setauthsize = gcm_setauthsize,
1773 			.encrypt = gcm_encrypt,
1774 			.decrypt = gcm_decrypt,
1775 			.ivsize = GCM_AES_IV_SIZE,
1776 			.maxauthsize = AES_BLOCK_SIZE,
1777 		},
1778 		.caam = {
1779 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
1780 		},
1781 	},
1782 	/* single-pass ipsec_esp descriptor */
1783 	{
1784 		.aead = {
1785 			.base = {
1786 				.cra_name = "authenc(hmac(md5),"
1787 					    "ecb(cipher_null))",
1788 				.cra_driver_name = "authenc-hmac-md5-"
1789 						   "ecb-cipher_null-caam",
1790 				.cra_blocksize = NULL_BLOCK_SIZE,
1791 			},
1792 			.setkey = aead_setkey,
1793 			.setauthsize = aead_setauthsize,
1794 			.encrypt = aead_encrypt,
1795 			.decrypt = aead_decrypt,
1796 			.ivsize = NULL_IV_SIZE,
1797 			.maxauthsize = MD5_DIGEST_SIZE,
1798 		},
1799 		.caam = {
1800 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1801 					   OP_ALG_AAI_HMAC_PRECOMP,
1802 		},
1803 	},
1804 	{
1805 		.aead = {
1806 			.base = {
1807 				.cra_name = "authenc(hmac(sha1),"
1808 					    "ecb(cipher_null))",
1809 				.cra_driver_name = "authenc-hmac-sha1-"
1810 						   "ecb-cipher_null-caam",
1811 				.cra_blocksize = NULL_BLOCK_SIZE,
1812 			},
1813 			.setkey = aead_setkey,
1814 			.setauthsize = aead_setauthsize,
1815 			.encrypt = aead_encrypt,
1816 			.decrypt = aead_decrypt,
1817 			.ivsize = NULL_IV_SIZE,
1818 			.maxauthsize = SHA1_DIGEST_SIZE,
1819 		},
1820 		.caam = {
1821 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1822 					   OP_ALG_AAI_HMAC_PRECOMP,
1823 		},
1824 	},
1825 	{
1826 		.aead = {
1827 			.base = {
1828 				.cra_name = "authenc(hmac(sha224),"
1829 					    "ecb(cipher_null))",
1830 				.cra_driver_name = "authenc-hmac-sha224-"
1831 						   "ecb-cipher_null-caam",
1832 				.cra_blocksize = NULL_BLOCK_SIZE,
1833 			},
1834 			.setkey = aead_setkey,
1835 			.setauthsize = aead_setauthsize,
1836 			.encrypt = aead_encrypt,
1837 			.decrypt = aead_decrypt,
1838 			.ivsize = NULL_IV_SIZE,
1839 			.maxauthsize = SHA224_DIGEST_SIZE,
1840 		},
1841 		.caam = {
1842 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
1843 					   OP_ALG_AAI_HMAC_PRECOMP,
1844 		},
1845 	},
1846 	{
1847 		.aead = {
1848 			.base = {
1849 				.cra_name = "authenc(hmac(sha256),"
1850 					    "ecb(cipher_null))",
1851 				.cra_driver_name = "authenc-hmac-sha256-"
1852 						   "ecb-cipher_null-caam",
1853 				.cra_blocksize = NULL_BLOCK_SIZE,
1854 			},
1855 			.setkey = aead_setkey,
1856 			.setauthsize = aead_setauthsize,
1857 			.encrypt = aead_encrypt,
1858 			.decrypt = aead_decrypt,
1859 			.ivsize = NULL_IV_SIZE,
1860 			.maxauthsize = SHA256_DIGEST_SIZE,
1861 		},
1862 		.caam = {
1863 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
1864 					   OP_ALG_AAI_HMAC_PRECOMP,
1865 		},
1866 	},
1867 	{
1868 		.aead = {
1869 			.base = {
1870 				.cra_name = "authenc(hmac(sha384),"
1871 					    "ecb(cipher_null))",
1872 				.cra_driver_name = "authenc-hmac-sha384-"
1873 						   "ecb-cipher_null-caam",
1874 				.cra_blocksize = NULL_BLOCK_SIZE,
1875 			},
1876 			.setkey = aead_setkey,
1877 			.setauthsize = aead_setauthsize,
1878 			.encrypt = aead_encrypt,
1879 			.decrypt = aead_decrypt,
1880 			.ivsize = NULL_IV_SIZE,
1881 			.maxauthsize = SHA384_DIGEST_SIZE,
1882 		},
1883 		.caam = {
1884 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
1885 					   OP_ALG_AAI_HMAC_PRECOMP,
1886 		},
1887 	},
1888 	{
1889 		.aead = {
1890 			.base = {
1891 				.cra_name = "authenc(hmac(sha512),"
1892 					    "ecb(cipher_null))",
1893 				.cra_driver_name = "authenc-hmac-sha512-"
1894 						   "ecb-cipher_null-caam",
1895 				.cra_blocksize = NULL_BLOCK_SIZE,
1896 			},
1897 			.setkey = aead_setkey,
1898 			.setauthsize = aead_setauthsize,
1899 			.encrypt = aead_encrypt,
1900 			.decrypt = aead_decrypt,
1901 			.ivsize = NULL_IV_SIZE,
1902 			.maxauthsize = SHA512_DIGEST_SIZE,
1903 		},
1904 		.caam = {
1905 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
1906 					   OP_ALG_AAI_HMAC_PRECOMP,
1907 		},
1908 	},
1909 	{
1910 		.aead = {
1911 			.base = {
1912 				.cra_name = "authenc(hmac(md5),cbc(aes))",
1913 				.cra_driver_name = "authenc-hmac-md5-"
1914 						   "cbc-aes-caam",
1915 				.cra_blocksize = AES_BLOCK_SIZE,
1916 			},
1917 			.setkey = aead_setkey,
1918 			.setauthsize = aead_setauthsize,
1919 			.encrypt = aead_encrypt,
1920 			.decrypt = aead_decrypt,
1921 			.ivsize = AES_BLOCK_SIZE,
1922 			.maxauthsize = MD5_DIGEST_SIZE,
1923 		},
1924 		.caam = {
1925 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1926 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1927 					   OP_ALG_AAI_HMAC_PRECOMP,
1928 		},
1929 	},
1930 	{
1931 		.aead = {
1932 			.base = {
1933 				.cra_name = "echainiv(authenc(hmac(md5),"
1934 					    "cbc(aes)))",
1935 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
1936 						   "cbc-aes-caam",
1937 				.cra_blocksize = AES_BLOCK_SIZE,
1938 			},
1939 			.setkey = aead_setkey,
1940 			.setauthsize = aead_setauthsize,
1941 			.encrypt = aead_encrypt,
1942 			.decrypt = aead_decrypt,
1943 			.ivsize = AES_BLOCK_SIZE,
1944 			.maxauthsize = MD5_DIGEST_SIZE,
1945 		},
1946 		.caam = {
1947 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1948 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
1949 					   OP_ALG_AAI_HMAC_PRECOMP,
1950 			.geniv = true,
1951 		},
1952 	},
1953 	{
1954 		.aead = {
1955 			.base = {
1956 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
1957 				.cra_driver_name = "authenc-hmac-sha1-"
1958 						   "cbc-aes-caam",
1959 				.cra_blocksize = AES_BLOCK_SIZE,
1960 			},
1961 			.setkey = aead_setkey,
1962 			.setauthsize = aead_setauthsize,
1963 			.encrypt = aead_encrypt,
1964 			.decrypt = aead_decrypt,
1965 			.ivsize = AES_BLOCK_SIZE,
1966 			.maxauthsize = SHA1_DIGEST_SIZE,
1967 		},
1968 		.caam = {
1969 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1970 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1971 					   OP_ALG_AAI_HMAC_PRECOMP,
1972 		},
1973 	},
1974 	{
1975 		.aead = {
1976 			.base = {
1977 				.cra_name = "echainiv(authenc(hmac(sha1),"
1978 					    "cbc(aes)))",
1979 				.cra_driver_name = "echainiv-authenc-"
1980 						   "hmac-sha1-cbc-aes-caam",
1981 				.cra_blocksize = AES_BLOCK_SIZE,
1982 			},
1983 			.setkey = aead_setkey,
1984 			.setauthsize = aead_setauthsize,
1985 			.encrypt = aead_encrypt,
1986 			.decrypt = aead_decrypt,
1987 			.ivsize = AES_BLOCK_SIZE,
1988 			.maxauthsize = SHA1_DIGEST_SIZE,
1989 		},
1990 		.caam = {
1991 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1992 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
1993 					   OP_ALG_AAI_HMAC_PRECOMP,
1994 			.geniv = true,
1995 		},
1996 	},
1997 	{
1998 		.aead = {
1999 			.base = {
2000 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2001 				.cra_driver_name = "authenc-hmac-sha224-"
2002 						   "cbc-aes-caam",
2003 				.cra_blocksize = AES_BLOCK_SIZE,
2004 			},
2005 			.setkey = aead_setkey,
2006 			.setauthsize = aead_setauthsize,
2007 			.encrypt = aead_encrypt,
2008 			.decrypt = aead_decrypt,
2009 			.ivsize = AES_BLOCK_SIZE,
2010 			.maxauthsize = SHA224_DIGEST_SIZE,
2011 		},
2012 		.caam = {
2013 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2014 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2015 					   OP_ALG_AAI_HMAC_PRECOMP,
2016 		},
2017 	},
2018 	{
2019 		.aead = {
2020 			.base = {
2021 				.cra_name = "echainiv(authenc(hmac(sha224),"
2022 					    "cbc(aes)))",
2023 				.cra_driver_name = "echainiv-authenc-"
2024 						   "hmac-sha224-cbc-aes-caam",
2025 				.cra_blocksize = AES_BLOCK_SIZE,
2026 			},
2027 			.setkey = aead_setkey,
2028 			.setauthsize = aead_setauthsize,
2029 			.encrypt = aead_encrypt,
2030 			.decrypt = aead_decrypt,
2031 			.ivsize = AES_BLOCK_SIZE,
2032 			.maxauthsize = SHA224_DIGEST_SIZE,
2033 		},
2034 		.caam = {
2035 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2036 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2037 					   OP_ALG_AAI_HMAC_PRECOMP,
2038 			.geniv = true,
2039 		},
2040 	},
2041 	{
2042 		.aead = {
2043 			.base = {
2044 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2045 				.cra_driver_name = "authenc-hmac-sha256-"
2046 						   "cbc-aes-caam",
2047 				.cra_blocksize = AES_BLOCK_SIZE,
2048 			},
2049 			.setkey = aead_setkey,
2050 			.setauthsize = aead_setauthsize,
2051 			.encrypt = aead_encrypt,
2052 			.decrypt = aead_decrypt,
2053 			.ivsize = AES_BLOCK_SIZE,
2054 			.maxauthsize = SHA256_DIGEST_SIZE,
2055 		},
2056 		.caam = {
2057 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2058 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2059 					   OP_ALG_AAI_HMAC_PRECOMP,
2060 		},
2061 	},
2062 	{
2063 		.aead = {
2064 			.base = {
2065 				.cra_name = "echainiv(authenc(hmac(sha256),"
2066 					    "cbc(aes)))",
2067 				.cra_driver_name = "echainiv-authenc-"
2068 						   "hmac-sha256-cbc-aes-caam",
2069 				.cra_blocksize = AES_BLOCK_SIZE,
2070 			},
2071 			.setkey = aead_setkey,
2072 			.setauthsize = aead_setauthsize,
2073 			.encrypt = aead_encrypt,
2074 			.decrypt = aead_decrypt,
2075 			.ivsize = AES_BLOCK_SIZE,
2076 			.maxauthsize = SHA256_DIGEST_SIZE,
2077 		},
2078 		.caam = {
2079 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2080 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2081 					   OP_ALG_AAI_HMAC_PRECOMP,
2082 			.geniv = true,
2083 		},
2084 	},
2085 	{
2086 		.aead = {
2087 			.base = {
2088 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2089 				.cra_driver_name = "authenc-hmac-sha384-"
2090 						   "cbc-aes-caam",
2091 				.cra_blocksize = AES_BLOCK_SIZE,
2092 			},
2093 			.setkey = aead_setkey,
2094 			.setauthsize = aead_setauthsize,
2095 			.encrypt = aead_encrypt,
2096 			.decrypt = aead_decrypt,
2097 			.ivsize = AES_BLOCK_SIZE,
2098 			.maxauthsize = SHA384_DIGEST_SIZE,
2099 		},
2100 		.caam = {
2101 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2102 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2103 					   OP_ALG_AAI_HMAC_PRECOMP,
2104 		},
2105 	},
2106 	{
2107 		.aead = {
2108 			.base = {
2109 				.cra_name = "echainiv(authenc(hmac(sha384),"
2110 					    "cbc(aes)))",
2111 				.cra_driver_name = "echainiv-authenc-"
2112 						   "hmac-sha384-cbc-aes-caam",
2113 				.cra_blocksize = AES_BLOCK_SIZE,
2114 			},
2115 			.setkey = aead_setkey,
2116 			.setauthsize = aead_setauthsize,
2117 			.encrypt = aead_encrypt,
2118 			.decrypt = aead_decrypt,
2119 			.ivsize = AES_BLOCK_SIZE,
2120 			.maxauthsize = SHA384_DIGEST_SIZE,
2121 		},
2122 		.caam = {
2123 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2124 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2125 					   OP_ALG_AAI_HMAC_PRECOMP,
2126 			.geniv = true,
2127 		},
2128 	},
2129 	{
2130 		.aead = {
2131 			.base = {
2132 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2133 				.cra_driver_name = "authenc-hmac-sha512-"
2134 						   "cbc-aes-caam",
2135 				.cra_blocksize = AES_BLOCK_SIZE,
2136 			},
2137 			.setkey = aead_setkey,
2138 			.setauthsize = aead_setauthsize,
2139 			.encrypt = aead_encrypt,
2140 			.decrypt = aead_decrypt,
2141 			.ivsize = AES_BLOCK_SIZE,
2142 			.maxauthsize = SHA512_DIGEST_SIZE,
2143 		},
2144 		.caam = {
2145 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2146 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2147 					   OP_ALG_AAI_HMAC_PRECOMP,
2148 		},
2149 	},
2150 	{
2151 		.aead = {
2152 			.base = {
2153 				.cra_name = "echainiv(authenc(hmac(sha512),"
2154 					    "cbc(aes)))",
2155 				.cra_driver_name = "echainiv-authenc-"
2156 						   "hmac-sha512-cbc-aes-caam",
2157 				.cra_blocksize = AES_BLOCK_SIZE,
2158 			},
2159 			.setkey = aead_setkey,
2160 			.setauthsize = aead_setauthsize,
2161 			.encrypt = aead_encrypt,
2162 			.decrypt = aead_decrypt,
2163 			.ivsize = AES_BLOCK_SIZE,
2164 			.maxauthsize = SHA512_DIGEST_SIZE,
2165 		},
2166 		.caam = {
2167 			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2168 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2169 					   OP_ALG_AAI_HMAC_PRECOMP,
2170 			.geniv = true,
2171 		},
2172 	},
2173 	{
2174 		.aead = {
2175 			.base = {
2176 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2177 				.cra_driver_name = "authenc-hmac-md5-"
2178 						   "cbc-des3_ede-caam",
2179 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2180 			},
2181 			.setkey = aead_setkey,
2182 			.setauthsize = aead_setauthsize,
2183 			.encrypt = aead_encrypt,
2184 			.decrypt = aead_decrypt,
2185 			.ivsize = DES3_EDE_BLOCK_SIZE,
2186 			.maxauthsize = MD5_DIGEST_SIZE,
2187 		},
2188 		.caam = {
2189 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2190 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2191 					   OP_ALG_AAI_HMAC_PRECOMP,
2192 		}
2193 	},
2194 	{
2195 		.aead = {
2196 			.base = {
2197 				.cra_name = "echainiv(authenc(hmac(md5),"
2198 					    "cbc(des3_ede)))",
2199 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2200 						   "cbc-des3_ede-caam",
2201 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2202 			},
2203 			.setkey = aead_setkey,
2204 			.setauthsize = aead_setauthsize,
2205 			.encrypt = aead_encrypt,
2206 			.decrypt = aead_decrypt,
2207 			.ivsize = DES3_EDE_BLOCK_SIZE,
2208 			.maxauthsize = MD5_DIGEST_SIZE,
2209 		},
2210 		.caam = {
2211 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2212 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2213 					   OP_ALG_AAI_HMAC_PRECOMP,
2214 			.geniv = true,
2215 		}
2216 	},
2217 	{
2218 		.aead = {
2219 			.base = {
2220 				.cra_name = "authenc(hmac(sha1),"
2221 					    "cbc(des3_ede))",
2222 				.cra_driver_name = "authenc-hmac-sha1-"
2223 						   "cbc-des3_ede-caam",
2224 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2225 			},
2226 			.setkey = aead_setkey,
2227 			.setauthsize = aead_setauthsize,
2228 			.encrypt = aead_encrypt,
2229 			.decrypt = aead_decrypt,
2230 			.ivsize = DES3_EDE_BLOCK_SIZE,
2231 			.maxauthsize = SHA1_DIGEST_SIZE,
2232 		},
2233 		.caam = {
2234 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2235 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2236 					   OP_ALG_AAI_HMAC_PRECOMP,
2237 		},
2238 	},
2239 	{
2240 		.aead = {
2241 			.base = {
2242 				.cra_name = "echainiv(authenc(hmac(sha1),"
2243 					    "cbc(des3_ede)))",
2244 				.cra_driver_name = "echainiv-authenc-"
2245 						   "hmac-sha1-"
2246 						   "cbc-des3_ede-caam",
2247 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2248 			},
2249 			.setkey = aead_setkey,
2250 			.setauthsize = aead_setauthsize,
2251 			.encrypt = aead_encrypt,
2252 			.decrypt = aead_decrypt,
2253 			.ivsize = DES3_EDE_BLOCK_SIZE,
2254 			.maxauthsize = SHA1_DIGEST_SIZE,
2255 		},
2256 		.caam = {
2257 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2258 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2259 					   OP_ALG_AAI_HMAC_PRECOMP,
2260 			.geniv = true,
2261 		},
2262 	},
2263 	{
2264 		.aead = {
2265 			.base = {
2266 				.cra_name = "authenc(hmac(sha224),"
2267 					    "cbc(des3_ede))",
2268 				.cra_driver_name = "authenc-hmac-sha224-"
2269 						   "cbc-des3_ede-caam",
2270 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2271 			},
2272 			.setkey = aead_setkey,
2273 			.setauthsize = aead_setauthsize,
2274 			.encrypt = aead_encrypt,
2275 			.decrypt = aead_decrypt,
2276 			.ivsize = DES3_EDE_BLOCK_SIZE,
2277 			.maxauthsize = SHA224_DIGEST_SIZE,
2278 		},
2279 		.caam = {
2280 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2281 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2282 					   OP_ALG_AAI_HMAC_PRECOMP,
2283 		},
2284 	},
2285 	{
2286 		.aead = {
2287 			.base = {
2288 				.cra_name = "echainiv(authenc(hmac(sha224),"
2289 					    "cbc(des3_ede)))",
2290 				.cra_driver_name = "echainiv-authenc-"
2291 						   "hmac-sha224-"
2292 						   "cbc-des3_ede-caam",
2293 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2294 			},
2295 			.setkey = aead_setkey,
2296 			.setauthsize = aead_setauthsize,
2297 			.encrypt = aead_encrypt,
2298 			.decrypt = aead_decrypt,
2299 			.ivsize = DES3_EDE_BLOCK_SIZE,
2300 			.maxauthsize = SHA224_DIGEST_SIZE,
2301 		},
2302 		.caam = {
2303 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2304 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2305 					   OP_ALG_AAI_HMAC_PRECOMP,
2306 			.geniv = true,
2307 		},
2308 	},
2309 	{
2310 		.aead = {
2311 			.base = {
2312 				.cra_name = "authenc(hmac(sha256),"
2313 					    "cbc(des3_ede))",
2314 				.cra_driver_name = "authenc-hmac-sha256-"
2315 						   "cbc-des3_ede-caam",
2316 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2317 			},
2318 			.setkey = aead_setkey,
2319 			.setauthsize = aead_setauthsize,
2320 			.encrypt = aead_encrypt,
2321 			.decrypt = aead_decrypt,
2322 			.ivsize = DES3_EDE_BLOCK_SIZE,
2323 			.maxauthsize = SHA256_DIGEST_SIZE,
2324 		},
2325 		.caam = {
2326 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2327 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2328 					   OP_ALG_AAI_HMAC_PRECOMP,
2329 		},
2330 	},
2331 	{
2332 		.aead = {
2333 			.base = {
2334 				.cra_name = "echainiv(authenc(hmac(sha256),"
2335 					    "cbc(des3_ede)))",
2336 				.cra_driver_name = "echainiv-authenc-"
2337 						   "hmac-sha256-"
2338 						   "cbc-des3_ede-caam",
2339 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2340 			},
2341 			.setkey = aead_setkey,
2342 			.setauthsize = aead_setauthsize,
2343 			.encrypt = aead_encrypt,
2344 			.decrypt = aead_decrypt,
2345 			.ivsize = DES3_EDE_BLOCK_SIZE,
2346 			.maxauthsize = SHA256_DIGEST_SIZE,
2347 		},
2348 		.caam = {
2349 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2350 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2351 					   OP_ALG_AAI_HMAC_PRECOMP,
2352 			.geniv = true,
2353 		},
2354 	},
2355 	{
2356 		.aead = {
2357 			.base = {
2358 				.cra_name = "authenc(hmac(sha384),"
2359 					    "cbc(des3_ede))",
2360 				.cra_driver_name = "authenc-hmac-sha384-"
2361 						   "cbc-des3_ede-caam",
2362 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2363 			},
2364 			.setkey = aead_setkey,
2365 			.setauthsize = aead_setauthsize,
2366 			.encrypt = aead_encrypt,
2367 			.decrypt = aead_decrypt,
2368 			.ivsize = DES3_EDE_BLOCK_SIZE,
2369 			.maxauthsize = SHA384_DIGEST_SIZE,
2370 		},
2371 		.caam = {
2372 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2373 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2374 					   OP_ALG_AAI_HMAC_PRECOMP,
2375 		},
2376 	},
2377 	{
2378 		.aead = {
2379 			.base = {
2380 				.cra_name = "echainiv(authenc(hmac(sha384),"
2381 					    "cbc(des3_ede)))",
2382 				.cra_driver_name = "echainiv-authenc-"
2383 						   "hmac-sha384-"
2384 						   "cbc-des3_ede-caam",
2385 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2386 			},
2387 			.setkey = aead_setkey,
2388 			.setauthsize = aead_setauthsize,
2389 			.encrypt = aead_encrypt,
2390 			.decrypt = aead_decrypt,
2391 			.ivsize = DES3_EDE_BLOCK_SIZE,
2392 			.maxauthsize = SHA384_DIGEST_SIZE,
2393 		},
2394 		.caam = {
2395 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2396 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2397 					   OP_ALG_AAI_HMAC_PRECOMP,
2398 			.geniv = true,
2399 		},
2400 	},
2401 	{
2402 		.aead = {
2403 			.base = {
2404 				.cra_name = "authenc(hmac(sha512),"
2405 					    "cbc(des3_ede))",
2406 				.cra_driver_name = "authenc-hmac-sha512-"
2407 						   "cbc-des3_ede-caam",
2408 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2409 			},
2410 			.setkey = aead_setkey,
2411 			.setauthsize = aead_setauthsize,
2412 			.encrypt = aead_encrypt,
2413 			.decrypt = aead_decrypt,
2414 			.ivsize = DES3_EDE_BLOCK_SIZE,
2415 			.maxauthsize = SHA512_DIGEST_SIZE,
2416 		},
2417 		.caam = {
2418 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2419 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2420 					   OP_ALG_AAI_HMAC_PRECOMP,
2421 		},
2422 	},
2423 	{
2424 		.aead = {
2425 			.base = {
2426 				.cra_name = "echainiv(authenc(hmac(sha512),"
2427 					    "cbc(des3_ede)))",
2428 				.cra_driver_name = "echainiv-authenc-"
2429 						   "hmac-sha512-"
2430 						   "cbc-des3_ede-caam",
2431 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2432 			},
2433 			.setkey = aead_setkey,
2434 			.setauthsize = aead_setauthsize,
2435 			.encrypt = aead_encrypt,
2436 			.decrypt = aead_decrypt,
2437 			.ivsize = DES3_EDE_BLOCK_SIZE,
2438 			.maxauthsize = SHA512_DIGEST_SIZE,
2439 		},
2440 		.caam = {
2441 			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2442 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2443 					   OP_ALG_AAI_HMAC_PRECOMP,
2444 			.geniv = true,
2445 		},
2446 	},
2447 	{
2448 		.aead = {
2449 			.base = {
2450 				.cra_name = "authenc(hmac(md5),cbc(des))",
2451 				.cra_driver_name = "authenc-hmac-md5-"
2452 						   "cbc-des-caam",
2453 				.cra_blocksize = DES_BLOCK_SIZE,
2454 			},
2455 			.setkey = aead_setkey,
2456 			.setauthsize = aead_setauthsize,
2457 			.encrypt = aead_encrypt,
2458 			.decrypt = aead_decrypt,
2459 			.ivsize = DES_BLOCK_SIZE,
2460 			.maxauthsize = MD5_DIGEST_SIZE,
2461 		},
2462 		.caam = {
2463 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2464 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2465 					   OP_ALG_AAI_HMAC_PRECOMP,
2466 		},
2467 	},
2468 	{
2469 		.aead = {
2470 			.base = {
2471 				.cra_name = "echainiv(authenc(hmac(md5),"
2472 					    "cbc(des)))",
2473 				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2474 						   "cbc-des-caam",
2475 				.cra_blocksize = DES_BLOCK_SIZE,
2476 			},
2477 			.setkey = aead_setkey,
2478 			.setauthsize = aead_setauthsize,
2479 			.encrypt = aead_encrypt,
2480 			.decrypt = aead_decrypt,
2481 			.ivsize = DES_BLOCK_SIZE,
2482 			.maxauthsize = MD5_DIGEST_SIZE,
2483 		},
2484 		.caam = {
2485 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2486 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2487 					   OP_ALG_AAI_HMAC_PRECOMP,
2488 			.geniv = true,
2489 		},
2490 	},
2491 	{
2492 		.aead = {
2493 			.base = {
2494 				.cra_name = "authenc(hmac(sha1),cbc(des))",
2495 				.cra_driver_name = "authenc-hmac-sha1-"
2496 						   "cbc-des-caam",
2497 				.cra_blocksize = DES_BLOCK_SIZE,
2498 			},
2499 			.setkey = aead_setkey,
2500 			.setauthsize = aead_setauthsize,
2501 			.encrypt = aead_encrypt,
2502 			.decrypt = aead_decrypt,
2503 			.ivsize = DES_BLOCK_SIZE,
2504 			.maxauthsize = SHA1_DIGEST_SIZE,
2505 		},
2506 		.caam = {
2507 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2508 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2509 					   OP_ALG_AAI_HMAC_PRECOMP,
2510 		},
2511 	},
2512 	{
2513 		.aead = {
2514 			.base = {
2515 				.cra_name = "echainiv(authenc(hmac(sha1),"
2516 					    "cbc(des)))",
2517 				.cra_driver_name = "echainiv-authenc-"
2518 						   "hmac-sha1-cbc-des-caam",
2519 				.cra_blocksize = DES_BLOCK_SIZE,
2520 			},
2521 			.setkey = aead_setkey,
2522 			.setauthsize = aead_setauthsize,
2523 			.encrypt = aead_encrypt,
2524 			.decrypt = aead_decrypt,
2525 			.ivsize = DES_BLOCK_SIZE,
2526 			.maxauthsize = SHA1_DIGEST_SIZE,
2527 		},
2528 		.caam = {
2529 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2530 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2531 					   OP_ALG_AAI_HMAC_PRECOMP,
2532 			.geniv = true,
2533 		},
2534 	},
2535 	{
2536 		.aead = {
2537 			.base = {
2538 				.cra_name = "authenc(hmac(sha224),cbc(des))",
2539 				.cra_driver_name = "authenc-hmac-sha224-"
2540 						   "cbc-des-caam",
2541 				.cra_blocksize = DES_BLOCK_SIZE,
2542 			},
2543 			.setkey = aead_setkey,
2544 			.setauthsize = aead_setauthsize,
2545 			.encrypt = aead_encrypt,
2546 			.decrypt = aead_decrypt,
2547 			.ivsize = DES_BLOCK_SIZE,
2548 			.maxauthsize = SHA224_DIGEST_SIZE,
2549 		},
2550 		.caam = {
2551 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2552 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2553 					   OP_ALG_AAI_HMAC_PRECOMP,
2554 		},
2555 	},
2556 	{
2557 		.aead = {
2558 			.base = {
2559 				.cra_name = "echainiv(authenc(hmac(sha224),"
2560 					    "cbc(des)))",
2561 				.cra_driver_name = "echainiv-authenc-"
2562 						   "hmac-sha224-cbc-des-caam",
2563 				.cra_blocksize = DES_BLOCK_SIZE,
2564 			},
2565 			.setkey = aead_setkey,
2566 			.setauthsize = aead_setauthsize,
2567 			.encrypt = aead_encrypt,
2568 			.decrypt = aead_decrypt,
2569 			.ivsize = DES_BLOCK_SIZE,
2570 			.maxauthsize = SHA224_DIGEST_SIZE,
2571 		},
2572 		.caam = {
2573 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2574 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2575 					   OP_ALG_AAI_HMAC_PRECOMP,
2576 			.geniv = true,
2577 		},
2578 	},
2579 	{
2580 		.aead = {
2581 			.base = {
2582 				.cra_name = "authenc(hmac(sha256),cbc(des))",
2583 				.cra_driver_name = "authenc-hmac-sha256-"
2584 						   "cbc-des-caam",
2585 				.cra_blocksize = DES_BLOCK_SIZE,
2586 			},
2587 			.setkey = aead_setkey,
2588 			.setauthsize = aead_setauthsize,
2589 			.encrypt = aead_encrypt,
2590 			.decrypt = aead_decrypt,
2591 			.ivsize = DES_BLOCK_SIZE,
2592 			.maxauthsize = SHA256_DIGEST_SIZE,
2593 		},
2594 		.caam = {
2595 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2596 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2597 					   OP_ALG_AAI_HMAC_PRECOMP,
2598 		},
2599 	},
2600 	{
2601 		.aead = {
2602 			.base = {
2603 				.cra_name = "echainiv(authenc(hmac(sha256),"
2604 					    "cbc(des)))",
2605 				.cra_driver_name = "echainiv-authenc-"
2606 						   "hmac-sha256-cbc-des-caam",
2607 				.cra_blocksize = DES_BLOCK_SIZE,
2608 			},
2609 			.setkey = aead_setkey,
2610 			.setauthsize = aead_setauthsize,
2611 			.encrypt = aead_encrypt,
2612 			.decrypt = aead_decrypt,
2613 			.ivsize = DES_BLOCK_SIZE,
2614 			.maxauthsize = SHA256_DIGEST_SIZE,
2615 		},
2616 		.caam = {
2617 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2618 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2619 					   OP_ALG_AAI_HMAC_PRECOMP,
2620 			.geniv = true,
2621 		},
2622 	},
2623 	{
2624 		.aead = {
2625 			.base = {
2626 				.cra_name = "authenc(hmac(sha384),cbc(des))",
2627 				.cra_driver_name = "authenc-hmac-sha384-"
2628 						   "cbc-des-caam",
2629 				.cra_blocksize = DES_BLOCK_SIZE,
2630 			},
2631 			.setkey = aead_setkey,
2632 			.setauthsize = aead_setauthsize,
2633 			.encrypt = aead_encrypt,
2634 			.decrypt = aead_decrypt,
2635 			.ivsize = DES_BLOCK_SIZE,
2636 			.maxauthsize = SHA384_DIGEST_SIZE,
2637 		},
2638 		.caam = {
2639 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2640 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2641 					   OP_ALG_AAI_HMAC_PRECOMP,
2642 		},
2643 	},
2644 	{
2645 		.aead = {
2646 			.base = {
2647 				.cra_name = "echainiv(authenc(hmac(sha384),"
2648 					    "cbc(des)))",
2649 				.cra_driver_name = "echainiv-authenc-"
2650 						   "hmac-sha384-cbc-des-caam",
2651 				.cra_blocksize = DES_BLOCK_SIZE,
2652 			},
2653 			.setkey = aead_setkey,
2654 			.setauthsize = aead_setauthsize,
2655 			.encrypt = aead_encrypt,
2656 			.decrypt = aead_decrypt,
2657 			.ivsize = DES_BLOCK_SIZE,
2658 			.maxauthsize = SHA384_DIGEST_SIZE,
2659 		},
2660 		.caam = {
2661 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2662 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2663 					   OP_ALG_AAI_HMAC_PRECOMP,
2664 			.geniv = true,
2665 		},
2666 	},
2667 	{
2668 		.aead = {
2669 			.base = {
2670 				.cra_name = "authenc(hmac(sha512),cbc(des))",
2671 				.cra_driver_name = "authenc-hmac-sha512-"
2672 						   "cbc-des-caam",
2673 				.cra_blocksize = DES_BLOCK_SIZE,
2674 			},
2675 			.setkey = aead_setkey,
2676 			.setauthsize = aead_setauthsize,
2677 			.encrypt = aead_encrypt,
2678 			.decrypt = aead_decrypt,
2679 			.ivsize = DES_BLOCK_SIZE,
2680 			.maxauthsize = SHA512_DIGEST_SIZE,
2681 		},
2682 		.caam = {
2683 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2684 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2685 					   OP_ALG_AAI_HMAC_PRECOMP,
2686 		},
2687 	},
2688 	{
2689 		.aead = {
2690 			.base = {
2691 				.cra_name = "echainiv(authenc(hmac(sha512),"
2692 					    "cbc(des)))",
2693 				.cra_driver_name = "echainiv-authenc-"
2694 						   "hmac-sha512-cbc-des-caam",
2695 				.cra_blocksize = DES_BLOCK_SIZE,
2696 			},
2697 			.setkey = aead_setkey,
2698 			.setauthsize = aead_setauthsize,
2699 			.encrypt = aead_encrypt,
2700 			.decrypt = aead_decrypt,
2701 			.ivsize = DES_BLOCK_SIZE,
2702 			.maxauthsize = SHA512_DIGEST_SIZE,
2703 		},
2704 		.caam = {
2705 			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2706 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2707 					   OP_ALG_AAI_HMAC_PRECOMP,
2708 			.geniv = true,
2709 		},
2710 	},
2711 	{
2712 		.aead = {
2713 			.base = {
2714 				.cra_name = "authenc(hmac(md5),"
2715 					    "rfc3686(ctr(aes)))",
2716 				.cra_driver_name = "authenc-hmac-md5-"
2717 						   "rfc3686-ctr-aes-caam",
2718 				.cra_blocksize = 1,
2719 			},
2720 			.setkey = aead_setkey,
2721 			.setauthsize = aead_setauthsize,
2722 			.encrypt = aead_encrypt,
2723 			.decrypt = aead_decrypt,
2724 			.ivsize = CTR_RFC3686_IV_SIZE,
2725 			.maxauthsize = MD5_DIGEST_SIZE,
2726 		},
2727 		.caam = {
2728 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2729 					   OP_ALG_AAI_CTR_MOD128,
2730 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2731 					   OP_ALG_AAI_HMAC_PRECOMP,
2732 			.rfc3686 = true,
2733 		},
2734 	},
2735 	{
2736 		.aead = {
2737 			.base = {
2738 				.cra_name = "seqiv(authenc("
2739 					    "hmac(md5),rfc3686(ctr(aes))))",
2740 				.cra_driver_name = "seqiv-authenc-hmac-md5-"
2741 						   "rfc3686-ctr-aes-caam",
2742 				.cra_blocksize = 1,
2743 			},
2744 			.setkey = aead_setkey,
2745 			.setauthsize = aead_setauthsize,
2746 			.encrypt = aead_encrypt,
2747 			.decrypt = aead_decrypt,
2748 			.ivsize = CTR_RFC3686_IV_SIZE,
2749 			.maxauthsize = MD5_DIGEST_SIZE,
2750 		},
2751 		.caam = {
2752 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2753 					   OP_ALG_AAI_CTR_MOD128,
2754 			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2755 					   OP_ALG_AAI_HMAC_PRECOMP,
2756 			.rfc3686 = true,
2757 			.geniv = true,
2758 		},
2759 	},
2760 	{
2761 		.aead = {
2762 			.base = {
2763 				.cra_name = "authenc(hmac(sha1),"
2764 					    "rfc3686(ctr(aes)))",
2765 				.cra_driver_name = "authenc-hmac-sha1-"
2766 						   "rfc3686-ctr-aes-caam",
2767 				.cra_blocksize = 1,
2768 			},
2769 			.setkey = aead_setkey,
2770 			.setauthsize = aead_setauthsize,
2771 			.encrypt = aead_encrypt,
2772 			.decrypt = aead_decrypt,
2773 			.ivsize = CTR_RFC3686_IV_SIZE,
2774 			.maxauthsize = SHA1_DIGEST_SIZE,
2775 		},
2776 		.caam = {
2777 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2778 					   OP_ALG_AAI_CTR_MOD128,
2779 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2780 					   OP_ALG_AAI_HMAC_PRECOMP,
2781 			.rfc3686 = true,
2782 		},
2783 	},
2784 	{
2785 		.aead = {
2786 			.base = {
2787 				.cra_name = "seqiv(authenc("
2788 					    "hmac(sha1),rfc3686(ctr(aes))))",
2789 				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
2790 						   "rfc3686-ctr-aes-caam",
2791 				.cra_blocksize = 1,
2792 			},
2793 			.setkey = aead_setkey,
2794 			.setauthsize = aead_setauthsize,
2795 			.encrypt = aead_encrypt,
2796 			.decrypt = aead_decrypt,
2797 			.ivsize = CTR_RFC3686_IV_SIZE,
2798 			.maxauthsize = SHA1_DIGEST_SIZE,
2799 		},
2800 		.caam = {
2801 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2802 					   OP_ALG_AAI_CTR_MOD128,
2803 			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2804 					   OP_ALG_AAI_HMAC_PRECOMP,
2805 			.rfc3686 = true,
2806 			.geniv = true,
2807 		},
2808 	},
2809 	{
2810 		.aead = {
2811 			.base = {
2812 				.cra_name = "authenc(hmac(sha224),"
2813 					    "rfc3686(ctr(aes)))",
2814 				.cra_driver_name = "authenc-hmac-sha224-"
2815 						   "rfc3686-ctr-aes-caam",
2816 				.cra_blocksize = 1,
2817 			},
2818 			.setkey = aead_setkey,
2819 			.setauthsize = aead_setauthsize,
2820 			.encrypt = aead_encrypt,
2821 			.decrypt = aead_decrypt,
2822 			.ivsize = CTR_RFC3686_IV_SIZE,
2823 			.maxauthsize = SHA224_DIGEST_SIZE,
2824 		},
2825 		.caam = {
2826 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2827 					   OP_ALG_AAI_CTR_MOD128,
2828 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2829 					   OP_ALG_AAI_HMAC_PRECOMP,
2830 			.rfc3686 = true,
2831 		},
2832 	},
2833 	{
2834 		.aead = {
2835 			.base = {
2836 				.cra_name = "seqiv(authenc("
2837 					    "hmac(sha224),rfc3686(ctr(aes))))",
2838 				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
2839 						   "rfc3686-ctr-aes-caam",
2840 				.cra_blocksize = 1,
2841 			},
2842 			.setkey = aead_setkey,
2843 			.setauthsize = aead_setauthsize,
2844 			.encrypt = aead_encrypt,
2845 			.decrypt = aead_decrypt,
2846 			.ivsize = CTR_RFC3686_IV_SIZE,
2847 			.maxauthsize = SHA224_DIGEST_SIZE,
2848 		},
2849 		.caam = {
2850 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2851 					   OP_ALG_AAI_CTR_MOD128,
2852 			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2853 					   OP_ALG_AAI_HMAC_PRECOMP,
2854 			.rfc3686 = true,
2855 			.geniv = true,
2856 		},
2857 	},
2858 	{
2859 		.aead = {
2860 			.base = {
2861 				.cra_name = "authenc(hmac(sha256),"
2862 					    "rfc3686(ctr(aes)))",
2863 				.cra_driver_name = "authenc-hmac-sha256-"
2864 						   "rfc3686-ctr-aes-caam",
2865 				.cra_blocksize = 1,
2866 			},
2867 			.setkey = aead_setkey,
2868 			.setauthsize = aead_setauthsize,
2869 			.encrypt = aead_encrypt,
2870 			.decrypt = aead_decrypt,
2871 			.ivsize = CTR_RFC3686_IV_SIZE,
2872 			.maxauthsize = SHA256_DIGEST_SIZE,
2873 		},
2874 		.caam = {
2875 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2876 					   OP_ALG_AAI_CTR_MOD128,
2877 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2878 					   OP_ALG_AAI_HMAC_PRECOMP,
2879 			.rfc3686 = true,
2880 		},
2881 	},
2882 	{
2883 		.aead = {
2884 			.base = {
2885 				.cra_name = "seqiv(authenc(hmac(sha256),"
2886 					    "rfc3686(ctr(aes))))",
2887 				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
2888 						   "rfc3686-ctr-aes-caam",
2889 				.cra_blocksize = 1,
2890 			},
2891 			.setkey = aead_setkey,
2892 			.setauthsize = aead_setauthsize,
2893 			.encrypt = aead_encrypt,
2894 			.decrypt = aead_decrypt,
2895 			.ivsize = CTR_RFC3686_IV_SIZE,
2896 			.maxauthsize = SHA256_DIGEST_SIZE,
2897 		},
2898 		.caam = {
2899 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2900 					   OP_ALG_AAI_CTR_MOD128,
2901 			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2902 					   OP_ALG_AAI_HMAC_PRECOMP,
2903 			.rfc3686 = true,
2904 			.geniv = true,
2905 		},
2906 	},
2907 	{
2908 		.aead = {
2909 			.base = {
2910 				.cra_name = "authenc(hmac(sha384),"
2911 					    "rfc3686(ctr(aes)))",
2912 				.cra_driver_name = "authenc-hmac-sha384-"
2913 						   "rfc3686-ctr-aes-caam",
2914 				.cra_blocksize = 1,
2915 			},
2916 			.setkey = aead_setkey,
2917 			.setauthsize = aead_setauthsize,
2918 			.encrypt = aead_encrypt,
2919 			.decrypt = aead_decrypt,
2920 			.ivsize = CTR_RFC3686_IV_SIZE,
2921 			.maxauthsize = SHA384_DIGEST_SIZE,
2922 		},
2923 		.caam = {
2924 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2925 					   OP_ALG_AAI_CTR_MOD128,
2926 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2927 					   OP_ALG_AAI_HMAC_PRECOMP,
2928 			.rfc3686 = true,
2929 		},
2930 	},
2931 	{
2932 		.aead = {
2933 			.base = {
2934 				.cra_name = "seqiv(authenc(hmac(sha384),"
2935 					    "rfc3686(ctr(aes))))",
2936 				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
2937 						   "rfc3686-ctr-aes-caam",
2938 				.cra_blocksize = 1,
2939 			},
2940 			.setkey = aead_setkey,
2941 			.setauthsize = aead_setauthsize,
2942 			.encrypt = aead_encrypt,
2943 			.decrypt = aead_decrypt,
2944 			.ivsize = CTR_RFC3686_IV_SIZE,
2945 			.maxauthsize = SHA384_DIGEST_SIZE,
2946 		},
2947 		.caam = {
2948 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2949 					   OP_ALG_AAI_CTR_MOD128,
2950 			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2951 					   OP_ALG_AAI_HMAC_PRECOMP,
2952 			.rfc3686 = true,
2953 			.geniv = true,
2954 		},
2955 	},
2956 	{
2957 		.aead = {
2958 			.base = {
2959 				.cra_name = "authenc(hmac(sha512),"
2960 					    "rfc3686(ctr(aes)))",
2961 				.cra_driver_name = "authenc-hmac-sha512-"
2962 						   "rfc3686-ctr-aes-caam",
2963 				.cra_blocksize = 1,
2964 			},
2965 			.setkey = aead_setkey,
2966 			.setauthsize = aead_setauthsize,
2967 			.encrypt = aead_encrypt,
2968 			.decrypt = aead_decrypt,
2969 			.ivsize = CTR_RFC3686_IV_SIZE,
2970 			.maxauthsize = SHA512_DIGEST_SIZE,
2971 		},
2972 		.caam = {
2973 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2974 					   OP_ALG_AAI_CTR_MOD128,
2975 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2976 					   OP_ALG_AAI_HMAC_PRECOMP,
2977 			.rfc3686 = true,
2978 		},
2979 	},
2980 	{
2981 		.aead = {
2982 			.base = {
2983 				.cra_name = "seqiv(authenc(hmac(sha512),"
2984 					    "rfc3686(ctr(aes))))",
2985 				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
2986 						   "rfc3686-ctr-aes-caam",
2987 				.cra_blocksize = 1,
2988 			},
2989 			.setkey = aead_setkey,
2990 			.setauthsize = aead_setauthsize,
2991 			.encrypt = aead_encrypt,
2992 			.decrypt = aead_decrypt,
2993 			.ivsize = CTR_RFC3686_IV_SIZE,
2994 			.maxauthsize = SHA512_DIGEST_SIZE,
2995 		},
2996 		.caam = {
2997 			.class1_alg_type = OP_ALG_ALGSEL_AES |
2998 					   OP_ALG_AAI_CTR_MOD128,
2999 			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3000 					   OP_ALG_AAI_HMAC_PRECOMP,
3001 			.rfc3686 = true,
3002 			.geniv = true,
3003 		},
3004 	},
3005 };
3006 
3007 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3008 			    bool uses_dkp)
3009 {
3010 	dma_addr_t dma_addr;
3011 	struct caam_drv_private *priv;
3012 
3013 	ctx->jrdev = caam_jr_alloc();
3014 	if (IS_ERR(ctx->jrdev)) {
3015 		pr_err("Job Ring Device allocation for transform failed\n");
3016 		return PTR_ERR(ctx->jrdev);
3017 	}
3018 
3019 	priv = dev_get_drvdata(ctx->jrdev->parent);
3020 	if (priv->era >= 6 && uses_dkp)
3021 		ctx->dir = DMA_BIDIRECTIONAL;
3022 	else
3023 		ctx->dir = DMA_TO_DEVICE;
3024 
3025 	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3026 					offsetof(struct caam_ctx,
3027 						 sh_desc_enc_dma),
3028 					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3029 	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3030 		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3031 		caam_jr_free(ctx->jrdev);
3032 		return -ENOMEM;
3033 	}
3034 
3035 	ctx->sh_desc_enc_dma = dma_addr;
3036 	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3037 						   sh_desc_dec);
3038 	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3039 
3040 	/* copy descriptor header template value */
3041 	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3042 	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3043 
3044 	return 0;
3045 }
3046 
3047 static int caam_cra_init(struct crypto_skcipher *tfm)
3048 {
3049 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3050 	struct caam_skcipher_alg *caam_alg =
3051 		container_of(alg, typeof(*caam_alg), skcipher);
3052 
3053 	return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam,
3054 				false);
3055 }
3056 
3057 static int caam_aead_init(struct crypto_aead *tfm)
3058 {
3059 	struct aead_alg *alg = crypto_aead_alg(tfm);
3060 	struct caam_aead_alg *caam_alg =
3061 		 container_of(alg, struct caam_aead_alg, aead);
3062 	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3063 
3064 	return caam_init_common(ctx, &caam_alg->caam,
3065 				alg->setkey == aead_setkey);
3066 }
3067 
3068 static void caam_exit_common(struct caam_ctx *ctx)
3069 {
3070 	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3071 			       offsetof(struct caam_ctx, sh_desc_enc_dma),
3072 			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3073 	caam_jr_free(ctx->jrdev);
3074 }
3075 
3076 static void caam_cra_exit(struct crypto_skcipher *tfm)
3077 {
3078 	caam_exit_common(crypto_skcipher_ctx(tfm));
3079 }
3080 
3081 static void caam_aead_exit(struct crypto_aead *tfm)
3082 {
3083 	caam_exit_common(crypto_aead_ctx(tfm));
3084 }
3085 
3086 static void __exit caam_algapi_exit(void)
3087 {
3088 	int i;
3089 
3090 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3091 		struct caam_aead_alg *t_alg = driver_aeads + i;
3092 
3093 		if (t_alg->registered)
3094 			crypto_unregister_aead(&t_alg->aead);
3095 	}
3096 
3097 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3098 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3099 
3100 		if (t_alg->registered)
3101 			crypto_unregister_skcipher(&t_alg->skcipher);
3102 	}
3103 }
3104 
3105 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3106 {
3107 	struct skcipher_alg *alg = &t_alg->skcipher;
3108 
3109 	alg->base.cra_module = THIS_MODULE;
3110 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3111 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3112 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3113 
3114 	alg->init = caam_cra_init;
3115 	alg->exit = caam_cra_exit;
3116 }
3117 
3118 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3119 {
3120 	struct aead_alg *alg = &t_alg->aead;
3121 
3122 	alg->base.cra_module = THIS_MODULE;
3123 	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3124 	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3125 	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3126 
3127 	alg->init = caam_aead_init;
3128 	alg->exit = caam_aead_exit;
3129 }
3130 
3131 static int __init caam_algapi_init(void)
3132 {
3133 	struct device_node *dev_node;
3134 	struct platform_device *pdev;
3135 	struct device *ctrldev;
3136 	struct caam_drv_private *priv;
3137 	int i = 0, err = 0;
3138 	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3139 	unsigned int md_limit = SHA512_DIGEST_SIZE;
3140 	bool registered = false;
3141 
3142 	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3143 	if (!dev_node) {
3144 		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3145 		if (!dev_node)
3146 			return -ENODEV;
3147 	}
3148 
3149 	pdev = of_find_device_by_node(dev_node);
3150 	if (!pdev) {
3151 		of_node_put(dev_node);
3152 		return -ENODEV;
3153 	}
3154 
3155 	ctrldev = &pdev->dev;
3156 	priv = dev_get_drvdata(ctrldev);
3157 	of_node_put(dev_node);
3158 
3159 	/*
3160 	 * If priv is NULL, it's probably because the caam driver wasn't
3161 	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3162 	 */
3163 	if (!priv)
3164 		return -ENODEV;
3165 
3166 
3167 	/*
3168 	 * Register crypto algorithms the device supports.
3169 	 * First, detect presence and attributes of DES, AES, and MD blocks.
3170 	 */
3171 	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3172 	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3173 	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3174 	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3175 	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3176 
3177 	/* If MD is present, limit digest size based on LP256 */
3178 	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3179 		md_limit = SHA256_DIGEST_SIZE;
3180 
3181 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3182 		struct caam_skcipher_alg *t_alg = driver_algs + i;
3183 		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3184 
3185 		/* Skip DES algorithms if not supported by device */
3186 		if (!des_inst &&
3187 		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3188 		     (alg_sel == OP_ALG_ALGSEL_DES)))
3189 				continue;
3190 
3191 		/* Skip AES algorithms if not supported by device */
3192 		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3193 				continue;
3194 
3195 		/*
3196 		 * Check support for AES modes not available
3197 		 * on LP devices.
3198 		 */
3199 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3200 			if ((t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3201 			     OP_ALG_AAI_XTS)
3202 				continue;
3203 
3204 		caam_skcipher_alg_init(t_alg);
3205 
3206 		err = crypto_register_skcipher(&t_alg->skcipher);
3207 		if (err) {
3208 			pr_warn("%s alg registration failed\n",
3209 				t_alg->skcipher.base.cra_driver_name);
3210 			continue;
3211 		}
3212 
3213 		t_alg->registered = true;
3214 		registered = true;
3215 	}
3216 
3217 	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3218 		struct caam_aead_alg *t_alg = driver_aeads + i;
3219 		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3220 				 OP_ALG_ALGSEL_MASK;
3221 		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3222 				 OP_ALG_ALGSEL_MASK;
3223 		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3224 
3225 		/* Skip DES algorithms if not supported by device */
3226 		if (!des_inst &&
3227 		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3228 		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3229 				continue;
3230 
3231 		/* Skip AES algorithms if not supported by device */
3232 		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3233 				continue;
3234 
3235 		/*
3236 		 * Check support for AES algorithms not available
3237 		 * on LP devices.
3238 		 */
3239 		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3240 			if (alg_aai == OP_ALG_AAI_GCM)
3241 				continue;
3242 
3243 		/*
3244 		 * Skip algorithms requiring message digests
3245 		 * if MD or MD size is not supported by device.
3246 		 */
3247 		if (c2_alg_sel &&
3248 		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3249 				continue;
3250 
3251 		caam_aead_alg_init(t_alg);
3252 
3253 		err = crypto_register_aead(&t_alg->aead);
3254 		if (err) {
3255 			pr_warn("%s alg registration failed\n",
3256 				t_alg->aead.base.cra_driver_name);
3257 			continue;
3258 		}
3259 
3260 		t_alg->registered = true;
3261 		registered = true;
3262 	}
3263 
3264 	if (registered)
3265 		pr_info("caam algorithms registered in /proc/crypto\n");
3266 
3267 	return err;
3268 }
3269 
3270 module_init(caam_algapi_init);
3271 module_exit(caam_algapi_exit);
3272 
3273 MODULE_LICENSE("GPL");
3274 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3275 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
3276