1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <linux/device.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/dmapool.h>
11 
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/ctr.h>
16 #include <crypto/internal/des.h>
17 #include <crypto/sha.h>
18 #include <crypto/skcipher.h>
19 #include <crypto/internal/aead.h>
20 #include <crypto/internal/skcipher.h>
21 
22 #include "safexcel.h"
23 
24 enum safexcel_cipher_direction {
25 	SAFEXCEL_ENCRYPT,
26 	SAFEXCEL_DECRYPT,
27 };
28 
29 enum safexcel_cipher_alg {
30 	SAFEXCEL_DES,
31 	SAFEXCEL_3DES,
32 	SAFEXCEL_AES,
33 };
34 
35 struct safexcel_cipher_ctx {
36 	struct safexcel_context base;
37 	struct safexcel_crypto_priv *priv;
38 
39 	u32 mode;
40 	enum safexcel_cipher_alg alg;
41 	bool aead;
42 
43 	__le32 key[8];
44 	u32 nonce;
45 	unsigned int key_len;
46 
47 	/* All the below is AEAD specific */
48 	u32 hash_alg;
49 	u32 state_sz;
50 	u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
51 	u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
52 };
53 
54 struct safexcel_cipher_req {
55 	enum safexcel_cipher_direction direction;
56 	/* Number of result descriptors associated to the request */
57 	unsigned int rdescs;
58 	bool needs_inv;
59 	int  nr_src, nr_dst;
60 };
61 
62 static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
63 				  struct safexcel_command_desc *cdesc)
64 {
65 	u32 block_sz = 0;
66 
67 	if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
68 		switch (ctx->alg) {
69 		case SAFEXCEL_DES:
70 			block_sz = DES_BLOCK_SIZE;
71 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
72 			break;
73 		case SAFEXCEL_3DES:
74 			block_sz = DES3_EDE_BLOCK_SIZE;
75 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
76 			break;
77 		case SAFEXCEL_AES:
78 			block_sz = AES_BLOCK_SIZE;
79 			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
80 			break;
81 		}
82 
83 		if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
84 			/* 32 bit nonce */
85 			cdesc->control_data.token[0] = ctx->nonce;
86 			/* 64 bit IV part */
87 			memcpy(&cdesc->control_data.token[1], iv, 8);
88 			/* 32 bit counter, start at 1 (big endian!) */
89 			cdesc->control_data.token[3] = cpu_to_be32(1);
90 		} else {
91 			memcpy(cdesc->control_data.token, iv, block_sz);
92 		}
93 	}
94 }
95 
96 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
97 				    struct safexcel_command_desc *cdesc,
98 				    u32 length)
99 {
100 	struct safexcel_token *token;
101 
102 	safexcel_cipher_token(ctx, iv, cdesc);
103 
104 	/* skip over worst case IV of 4 dwords, no need to be exact */
105 	token = (struct safexcel_token *)(cdesc->control_data.token + 4);
106 
107 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
108 	token[0].packet_length = length;
109 	token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
110 			EIP197_TOKEN_STAT_LAST_HASH;
111 	token[0].instructions = EIP197_TOKEN_INS_LAST |
112 				EIP197_TOKEN_INS_TYPE_CRYPTO |
113 				EIP197_TOKEN_INS_TYPE_OUTPUT;
114 }
115 
116 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
117 				struct safexcel_command_desc *cdesc,
118 				enum safexcel_cipher_direction direction,
119 				u32 cryptlen, u32 assoclen, u32 digestsize)
120 {
121 	struct safexcel_token *token;
122 
123 	safexcel_cipher_token(ctx, iv, cdesc);
124 
125 	if (direction == SAFEXCEL_DECRYPT)
126 		cryptlen -= digestsize;
127 
128 	if (direction == SAFEXCEL_ENCRYPT) {
129 		/* align end of instruction sequence to end of token */
130 		token = (struct safexcel_token *)(cdesc->control_data.token +
131 			 EIP197_MAX_TOKENS - 3);
132 
133 		token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
134 		token[2].packet_length = digestsize;
135 		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
136 				EIP197_TOKEN_STAT_LAST_PACKET;
137 		token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
138 					EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
139 	} else {
140 		/* align end of instruction sequence to end of token */
141 		token = (struct safexcel_token *)(cdesc->control_data.token +
142 			 EIP197_MAX_TOKENS - 4);
143 
144 		token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
145 		token[2].packet_length = digestsize;
146 		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
147 				EIP197_TOKEN_STAT_LAST_PACKET;
148 		token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
149 
150 		token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
151 		token[3].packet_length = digestsize |
152 					 EIP197_TOKEN_HASH_RESULT_VERIFY;
153 		token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
154 				EIP197_TOKEN_STAT_LAST_PACKET;
155 		token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
156 	}
157 
158 	if (unlikely(!cryptlen)) {
159 		token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
160 		token[1].packet_length = assoclen;
161 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
162 		token[1].instructions = EIP197_TOKEN_INS_LAST |
163 					EIP197_TOKEN_INS_TYPE_HASH;
164 	} else {
165 		if (likely(assoclen)) {
166 			token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
167 			token[0].packet_length = assoclen;
168 			token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
169 		}
170 
171 		token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
172 		token[1].packet_length = cryptlen;
173 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
174 		token[1].instructions = EIP197_TOKEN_INS_LAST |
175 					EIP197_TOKEN_INS_TYPE_CRYPTO |
176 					EIP197_TOKEN_INS_TYPE_HASH |
177 					EIP197_TOKEN_INS_TYPE_OUTPUT;
178 	}
179 }
180 
181 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
182 					const u8 *key, unsigned int len)
183 {
184 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
185 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
186 	struct safexcel_crypto_priv *priv = ctx->priv;
187 	struct crypto_aes_ctx aes;
188 	int ret, i;
189 
190 	ret = aes_expandkey(&aes, key, len);
191 	if (ret) {
192 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
193 		return ret;
194 	}
195 
196 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
197 		for (i = 0; i < len / sizeof(u32); i++) {
198 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
199 				ctx->base.needs_inv = true;
200 				break;
201 			}
202 		}
203 	}
204 
205 	for (i = 0; i < len / sizeof(u32); i++)
206 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
207 
208 	ctx->key_len = len;
209 
210 	memzero_explicit(&aes, sizeof(aes));
211 	return 0;
212 }
213 
214 static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
215 				unsigned int len)
216 {
217 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
218 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
219 	struct safexcel_ahash_export_state istate, ostate;
220 	struct safexcel_crypto_priv *priv = ctx->priv;
221 	struct crypto_authenc_keys keys;
222 	struct crypto_aes_ctx aes;
223 	int err = -EINVAL;
224 
225 	if (crypto_authenc_extractkeys(&keys, key, len) != 0)
226 		goto badkey;
227 
228 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
229 		/* Minimum keysize is minimum AES key size + nonce size */
230 		if (keys.enckeylen < (AES_MIN_KEY_SIZE +
231 				      CTR_RFC3686_NONCE_SIZE))
232 			goto badkey;
233 		/* last 4 bytes of key are the nonce! */
234 		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
235 				      CTR_RFC3686_NONCE_SIZE);
236 		/* exclude the nonce here */
237 		keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
238 	}
239 
240 	/* Encryption key */
241 	switch (ctx->alg) {
242 	case SAFEXCEL_3DES:
243 		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
244 		if (unlikely(err))
245 			goto badkey_expflags;
246 		break;
247 	case SAFEXCEL_AES:
248 		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
249 		if (unlikely(err))
250 			goto badkey;
251 		break;
252 	default:
253 		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
254 		goto badkey;
255 	}
256 
257 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
258 	    memcmp(ctx->key, keys.enckey, keys.enckeylen))
259 		ctx->base.needs_inv = true;
260 
261 	/* Auth key */
262 	switch (ctx->hash_alg) {
263 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
264 		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
265 					 keys.authkeylen, &istate, &ostate))
266 			goto badkey;
267 		break;
268 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
269 		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
270 					 keys.authkeylen, &istate, &ostate))
271 			goto badkey;
272 		break;
273 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
274 		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
275 					 keys.authkeylen, &istate, &ostate))
276 			goto badkey;
277 		break;
278 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
279 		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
280 					 keys.authkeylen, &istate, &ostate))
281 			goto badkey;
282 		break;
283 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
284 		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
285 					 keys.authkeylen, &istate, &ostate))
286 			goto badkey;
287 		break;
288 	default:
289 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
290 		goto badkey;
291 	}
292 
293 	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
294 				    CRYPTO_TFM_RES_MASK);
295 
296 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
297 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
298 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
299 		ctx->base.needs_inv = true;
300 
301 	/* Now copy the keys into the context */
302 	memcpy(ctx->key, keys.enckey, keys.enckeylen);
303 	ctx->key_len = keys.enckeylen;
304 
305 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
306 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
307 
308 	memzero_explicit(&keys, sizeof(keys));
309 	return 0;
310 
311 badkey:
312 	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
313 badkey_expflags:
314 	memzero_explicit(&keys, sizeof(keys));
315 	return err;
316 }
317 
318 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
319 				    struct crypto_async_request *async,
320 				    struct safexcel_cipher_req *sreq,
321 				    struct safexcel_command_desc *cdesc)
322 {
323 	struct safexcel_crypto_priv *priv = ctx->priv;
324 	int ctrl_size;
325 
326 	if (ctx->aead) {
327 		if (sreq->direction == SAFEXCEL_ENCRYPT)
328 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
329 		else
330 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
331 	} else {
332 		cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
333 
334 		/* The decryption control type is a combination of the
335 		 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
336 		 * types.
337 		 */
338 		if (sreq->direction == SAFEXCEL_DECRYPT)
339 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
340 	}
341 
342 	cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
343 	cdesc->control_data.control1 |= ctx->mode;
344 
345 	if (ctx->aead)
346 		cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
347 						ctx->hash_alg;
348 
349 	if (ctx->alg == SAFEXCEL_DES) {
350 		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
351 	} else if (ctx->alg == SAFEXCEL_3DES) {
352 		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
353 	} else if (ctx->alg == SAFEXCEL_AES) {
354 		switch (ctx->key_len) {
355 		case AES_KEYSIZE_128:
356 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
357 			break;
358 		case AES_KEYSIZE_192:
359 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
360 			break;
361 		case AES_KEYSIZE_256:
362 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
363 			break;
364 		default:
365 			dev_err(priv->dev, "aes keysize not supported: %u\n",
366 				ctx->key_len);
367 			return -EINVAL;
368 		}
369 	}
370 
371 	ctrl_size = ctx->key_len / sizeof(u32);
372 	if (ctx->aead)
373 		/* Take in account the ipad+opad digests */
374 		ctrl_size += ctx->state_sz / sizeof(u32) * 2;
375 	cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
376 
377 	return 0;
378 }
379 
380 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
381 				      struct crypto_async_request *async,
382 				      struct scatterlist *src,
383 				      struct scatterlist *dst,
384 				      unsigned int cryptlen,
385 				      struct safexcel_cipher_req *sreq,
386 				      bool *should_complete, int *ret)
387 {
388 	struct skcipher_request *areq = skcipher_request_cast(async);
389 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
390 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
391 	struct safexcel_result_desc *rdesc;
392 	int ndesc = 0;
393 
394 	*ret = 0;
395 
396 	if (unlikely(!sreq->rdescs))
397 		return 0;
398 
399 	while (sreq->rdescs--) {
400 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
401 		if (IS_ERR(rdesc)) {
402 			dev_err(priv->dev,
403 				"cipher: result: could not retrieve the result descriptor\n");
404 			*ret = PTR_ERR(rdesc);
405 			break;
406 		}
407 
408 		if (likely(!*ret))
409 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
410 
411 		ndesc++;
412 	}
413 
414 	safexcel_complete(priv, ring);
415 
416 	if (src == dst) {
417 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
418 	} else {
419 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
420 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
421 	}
422 
423 	/*
424 	 * Update IV in req from last crypto output word for CBC modes
425 	 */
426 	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
427 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
428 		/* For encrypt take the last output word */
429 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
430 				   crypto_skcipher_ivsize(skcipher),
431 				   (cryptlen -
432 				    crypto_skcipher_ivsize(skcipher)));
433 	}
434 
435 	*should_complete = true;
436 
437 	return ndesc;
438 }
439 
440 static int safexcel_send_req(struct crypto_async_request *base, int ring,
441 			     struct safexcel_cipher_req *sreq,
442 			     struct scatterlist *src, struct scatterlist *dst,
443 			     unsigned int cryptlen, unsigned int assoclen,
444 			     unsigned int digestsize, u8 *iv, int *commands,
445 			     int *results)
446 {
447 	struct skcipher_request *areq = skcipher_request_cast(base);
448 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
449 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
450 	struct safexcel_crypto_priv *priv = ctx->priv;
451 	struct safexcel_command_desc *cdesc;
452 	struct safexcel_command_desc *first_cdesc = NULL;
453 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
454 	struct scatterlist *sg;
455 	unsigned int totlen;
456 	unsigned int totlen_src = cryptlen + assoclen;
457 	unsigned int totlen_dst = totlen_src;
458 	int n_cdesc = 0, n_rdesc = 0;
459 	int queued, i, ret = 0;
460 	bool first = true;
461 
462 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
463 
464 	if (ctx->aead) {
465 		/*
466 		 * AEAD has auth tag appended to output for encrypt and
467 		 * removed from the output for decrypt!
468 		 */
469 		if (sreq->direction == SAFEXCEL_DECRYPT)
470 			totlen_dst -= digestsize;
471 		else
472 			totlen_dst += digestsize;
473 
474 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
475 		       ctx->ipad, ctx->state_sz);
476 		memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) /
477 		       sizeof(u32),
478 		       ctx->opad, ctx->state_sz);
479 	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
480 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
481 		/*
482 		 * Save IV from last crypto input word for CBC modes in decrypt
483 		 * direction. Need to do this first in case of inplace operation
484 		 * as it will be overwritten.
485 		 */
486 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
487 				   crypto_skcipher_ivsize(skcipher),
488 				   (totlen_src -
489 				    crypto_skcipher_ivsize(skcipher)));
490 	}
491 
492 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
493 
494 	/*
495 	 * Remember actual input length, source buffer length may be
496 	 * updated in case of inline operation below.
497 	 */
498 	totlen = totlen_src;
499 	queued = totlen_src;
500 
501 	if (src == dst) {
502 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
503 		sreq->nr_dst = sreq->nr_src;
504 		if (unlikely((totlen_src || totlen_dst) &&
505 		    (sreq->nr_src <= 0))) {
506 			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
507 				max(totlen_src, totlen_dst));
508 			return -EINVAL;
509 		}
510 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
511 	} else {
512 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
513 			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
514 				totlen_src);
515 			return -EINVAL;
516 		}
517 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
518 
519 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
520 			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
521 				totlen_dst);
522 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
523 				     DMA_TO_DEVICE);
524 			return -EINVAL;
525 		}
526 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
527 	}
528 
529 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
530 
531 	/* The EIP cannot deal with zero length input packets! */
532 	if (totlen == 0)
533 		totlen = 1;
534 
535 	/* command descriptors */
536 	for_each_sg(src, sg, sreq->nr_src, i) {
537 		int len = sg_dma_len(sg);
538 
539 		/* Do not overflow the request */
540 		if (queued - len < 0)
541 			len = queued;
542 
543 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
544 					   !(queued - len),
545 					   sg_dma_address(sg), len, totlen,
546 					   ctx->base.ctxr_dma);
547 		if (IS_ERR(cdesc)) {
548 			/* No space left in the command descriptor ring */
549 			ret = PTR_ERR(cdesc);
550 			goto cdesc_rollback;
551 		}
552 		n_cdesc++;
553 
554 		if (n_cdesc == 1) {
555 			first_cdesc = cdesc;
556 		}
557 
558 		queued -= len;
559 		if (!queued)
560 			break;
561 	}
562 
563 	if (unlikely(!n_cdesc)) {
564 		/*
565 		 * Special case: zero length input buffer.
566 		 * The engine always needs the 1st command descriptor, however!
567 		 */
568 		first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
569 						 ctx->base.ctxr_dma);
570 		n_cdesc = 1;
571 	}
572 
573 	/* Add context control words and token to first command descriptor */
574 	safexcel_context_control(ctx, base, sreq, first_cdesc);
575 	if (ctx->aead)
576 		safexcel_aead_token(ctx, iv, first_cdesc,
577 				    sreq->direction, cryptlen,
578 				    assoclen, digestsize);
579 	else
580 		safexcel_skcipher_token(ctx, iv, first_cdesc,
581 					cryptlen);
582 
583 	/* result descriptors */
584 	for_each_sg(dst, sg, sreq->nr_dst, i) {
585 		bool last = (i == sreq->nr_dst - 1);
586 		u32 len = sg_dma_len(sg);
587 
588 		/* only allow the part of the buffer we know we need */
589 		if (len > totlen_dst)
590 			len = totlen_dst;
591 		if (unlikely(!len))
592 			break;
593 		totlen_dst -= len;
594 
595 		/* skip over AAD space in buffer - not written */
596 		if (assoclen) {
597 			if (assoclen >= len) {
598 				assoclen -= len;
599 				continue;
600 			}
601 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
602 						   sg_dma_address(sg) +
603 						   assoclen,
604 						   len - assoclen);
605 			assoclen = 0;
606 		} else {
607 			rdesc = safexcel_add_rdesc(priv, ring, first, last,
608 						   sg_dma_address(sg),
609 						   len);
610 		}
611 		if (IS_ERR(rdesc)) {
612 			/* No space left in the result descriptor ring */
613 			ret = PTR_ERR(rdesc);
614 			goto rdesc_rollback;
615 		}
616 		if (first) {
617 			first_rdesc = rdesc;
618 			first = false;
619 		}
620 		n_rdesc++;
621 	}
622 
623 	if (unlikely(first)) {
624 		/*
625 		 * Special case: AEAD decrypt with only AAD data.
626 		 * In this case there is NO output data from the engine,
627 		 * but the engine still needs a result descriptor!
628 		 * Create a dummy one just for catching the result token.
629 		 */
630 		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
631 		if (IS_ERR(rdesc)) {
632 			/* No space left in the result descriptor ring */
633 			ret = PTR_ERR(rdesc);
634 			goto rdesc_rollback;
635 		}
636 		first_rdesc = rdesc;
637 		n_rdesc = 1;
638 	}
639 
640 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
641 
642 	*commands = n_cdesc;
643 	*results = n_rdesc;
644 	return 0;
645 
646 rdesc_rollback:
647 	for (i = 0; i < n_rdesc; i++)
648 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
649 cdesc_rollback:
650 	for (i = 0; i < n_cdesc; i++)
651 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
652 
653 	if (src == dst) {
654 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
655 	} else {
656 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
657 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
658 	}
659 
660 	return ret;
661 }
662 
663 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
664 				      int ring,
665 				      struct crypto_async_request *base,
666 				      struct safexcel_cipher_req *sreq,
667 				      bool *should_complete, int *ret)
668 {
669 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
670 	struct safexcel_result_desc *rdesc;
671 	int ndesc = 0, enq_ret;
672 
673 	*ret = 0;
674 
675 	if (unlikely(!sreq->rdescs))
676 		return 0;
677 
678 	while (sreq->rdescs--) {
679 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
680 		if (IS_ERR(rdesc)) {
681 			dev_err(priv->dev,
682 				"cipher: invalidate: could not retrieve the result descriptor\n");
683 			*ret = PTR_ERR(rdesc);
684 			break;
685 		}
686 
687 		if (likely(!*ret))
688 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
689 
690 		ndesc++;
691 	}
692 
693 	safexcel_complete(priv, ring);
694 
695 	if (ctx->base.exit_inv) {
696 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
697 			      ctx->base.ctxr_dma);
698 
699 		*should_complete = true;
700 
701 		return ndesc;
702 	}
703 
704 	ring = safexcel_select_ring(priv);
705 	ctx->base.ring = ring;
706 
707 	spin_lock_bh(&priv->ring[ring].queue_lock);
708 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
709 	spin_unlock_bh(&priv->ring[ring].queue_lock);
710 
711 	if (enq_ret != -EINPROGRESS)
712 		*ret = enq_ret;
713 
714 	queue_work(priv->ring[ring].workqueue,
715 		   &priv->ring[ring].work_data.work);
716 
717 	*should_complete = false;
718 
719 	return ndesc;
720 }
721 
722 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
723 					   int ring,
724 					   struct crypto_async_request *async,
725 					   bool *should_complete, int *ret)
726 {
727 	struct skcipher_request *req = skcipher_request_cast(async);
728 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
729 	int err;
730 
731 	if (sreq->needs_inv) {
732 		sreq->needs_inv = false;
733 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
734 						 should_complete, ret);
735 	} else {
736 		err = safexcel_handle_req_result(priv, ring, async, req->src,
737 						 req->dst, req->cryptlen, sreq,
738 						 should_complete, ret);
739 	}
740 
741 	return err;
742 }
743 
744 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
745 				       int ring,
746 				       struct crypto_async_request *async,
747 				       bool *should_complete, int *ret)
748 {
749 	struct aead_request *req = aead_request_cast(async);
750 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
751 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
752 	int err;
753 
754 	if (sreq->needs_inv) {
755 		sreq->needs_inv = false;
756 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
757 						 should_complete, ret);
758 	} else {
759 		err = safexcel_handle_req_result(priv, ring, async, req->src,
760 						 req->dst,
761 						 req->cryptlen + crypto_aead_authsize(tfm),
762 						 sreq, should_complete, ret);
763 	}
764 
765 	return err;
766 }
767 
768 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
769 				    int ring, int *commands, int *results)
770 {
771 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
772 	struct safexcel_crypto_priv *priv = ctx->priv;
773 	int ret;
774 
775 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
776 	if (unlikely(ret))
777 		return ret;
778 
779 	*commands = 1;
780 	*results = 1;
781 
782 	return 0;
783 }
784 
785 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
786 				  int *commands, int *results)
787 {
788 	struct skcipher_request *req = skcipher_request_cast(async);
789 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
790 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
791 	struct safexcel_crypto_priv *priv = ctx->priv;
792 	int ret;
793 
794 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
795 
796 	if (sreq->needs_inv) {
797 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
798 	} else {
799 		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
800 		u8 input_iv[AES_BLOCK_SIZE];
801 
802 		/*
803 		 * Save input IV in case of CBC decrypt mode
804 		 * Will be overwritten with output IV prior to use!
805 		 */
806 		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
807 
808 		ret = safexcel_send_req(async, ring, sreq, req->src,
809 					req->dst, req->cryptlen, 0, 0, input_iv,
810 					commands, results);
811 	}
812 
813 	sreq->rdescs = *results;
814 	return ret;
815 }
816 
817 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
818 			      int *commands, int *results)
819 {
820 	struct aead_request *req = aead_request_cast(async);
821 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
822 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
823 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
824 	struct safexcel_crypto_priv *priv = ctx->priv;
825 	int ret;
826 
827 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
828 
829 	if (sreq->needs_inv)
830 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
831 	else
832 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
833 					req->cryptlen, req->assoclen,
834 					crypto_aead_authsize(tfm), req->iv,
835 					commands, results);
836 	sreq->rdescs = *results;
837 	return ret;
838 }
839 
840 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
841 				    struct crypto_async_request *base,
842 				    struct safexcel_cipher_req *sreq,
843 				    struct safexcel_inv_result *result)
844 {
845 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
846 	struct safexcel_crypto_priv *priv = ctx->priv;
847 	int ring = ctx->base.ring;
848 
849 	init_completion(&result->completion);
850 
851 	ctx = crypto_tfm_ctx(base->tfm);
852 	ctx->base.exit_inv = true;
853 	sreq->needs_inv = true;
854 
855 	spin_lock_bh(&priv->ring[ring].queue_lock);
856 	crypto_enqueue_request(&priv->ring[ring].queue, base);
857 	spin_unlock_bh(&priv->ring[ring].queue_lock);
858 
859 	queue_work(priv->ring[ring].workqueue,
860 		   &priv->ring[ring].work_data.work);
861 
862 	wait_for_completion(&result->completion);
863 
864 	if (result->error) {
865 		dev_warn(priv->dev,
866 			"cipher: sync: invalidate: completion error %d\n",
867 			 result->error);
868 		return result->error;
869 	}
870 
871 	return 0;
872 }
873 
874 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
875 {
876 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
877 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
878 	struct safexcel_inv_result result = {};
879 
880 	memset(req, 0, sizeof(struct skcipher_request));
881 
882 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
883 				      safexcel_inv_complete, &result);
884 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
885 
886 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
887 }
888 
889 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
890 {
891 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
892 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
893 	struct safexcel_inv_result result = {};
894 
895 	memset(req, 0, sizeof(struct aead_request));
896 
897 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
898 				  safexcel_inv_complete, &result);
899 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
900 
901 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
902 }
903 
904 static int safexcel_queue_req(struct crypto_async_request *base,
905 			struct safexcel_cipher_req *sreq,
906 			enum safexcel_cipher_direction dir, u32 mode,
907 			enum safexcel_cipher_alg alg)
908 {
909 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
910 	struct safexcel_crypto_priv *priv = ctx->priv;
911 	int ret, ring;
912 
913 	sreq->needs_inv = false;
914 	sreq->direction = dir;
915 	ctx->alg = alg;
916 	ctx->mode = mode;
917 
918 	if (ctx->base.ctxr) {
919 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
920 			sreq->needs_inv = true;
921 			ctx->base.needs_inv = false;
922 		}
923 	} else {
924 		ctx->base.ring = safexcel_select_ring(priv);
925 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
926 						 EIP197_GFP_FLAGS(*base),
927 						 &ctx->base.ctxr_dma);
928 		if (!ctx->base.ctxr)
929 			return -ENOMEM;
930 	}
931 
932 	ring = ctx->base.ring;
933 
934 	spin_lock_bh(&priv->ring[ring].queue_lock);
935 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
936 	spin_unlock_bh(&priv->ring[ring].queue_lock);
937 
938 	queue_work(priv->ring[ring].workqueue,
939 		   &priv->ring[ring].work_data.work);
940 
941 	return ret;
942 }
943 
944 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
945 {
946 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
947 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
948 			SAFEXCEL_AES);
949 }
950 
951 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
952 {
953 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
954 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
955 			SAFEXCEL_AES);
956 }
957 
958 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
959 {
960 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
961 	struct safexcel_alg_template *tmpl =
962 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
963 			     alg.skcipher.base);
964 
965 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
966 				    sizeof(struct safexcel_cipher_req));
967 
968 	ctx->priv = tmpl->priv;
969 
970 	ctx->base.send = safexcel_skcipher_send;
971 	ctx->base.handle_result = safexcel_skcipher_handle_result;
972 	return 0;
973 }
974 
975 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
976 {
977 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
978 
979 	memzero_explicit(ctx->key, sizeof(ctx->key));
980 
981 	/* context not allocated, skip invalidation */
982 	if (!ctx->base.ctxr)
983 		return -ENOMEM;
984 
985 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
986 	return 0;
987 }
988 
989 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
990 {
991 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
992 	struct safexcel_crypto_priv *priv = ctx->priv;
993 	int ret;
994 
995 	if (safexcel_cipher_cra_exit(tfm))
996 		return;
997 
998 	if (priv->flags & EIP197_TRC_CACHE) {
999 		ret = safexcel_skcipher_exit_inv(tfm);
1000 		if (ret)
1001 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1002 				 ret);
1003 	} else {
1004 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1005 			      ctx->base.ctxr_dma);
1006 	}
1007 }
1008 
1009 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1010 {
1011 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1012 	struct safexcel_crypto_priv *priv = ctx->priv;
1013 	int ret;
1014 
1015 	if (safexcel_cipher_cra_exit(tfm))
1016 		return;
1017 
1018 	if (priv->flags & EIP197_TRC_CACHE) {
1019 		ret = safexcel_aead_exit_inv(tfm);
1020 		if (ret)
1021 			dev_warn(priv->dev, "aead: invalidation error %d\n",
1022 				 ret);
1023 	} else {
1024 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1025 			      ctx->base.ctxr_dma);
1026 	}
1027 }
1028 
1029 struct safexcel_alg_template safexcel_alg_ecb_aes = {
1030 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1031 	.engines = EIP97IES | EIP197B | EIP197D,
1032 	.alg.skcipher = {
1033 		.setkey = safexcel_skcipher_aes_setkey,
1034 		.encrypt = safexcel_ecb_aes_encrypt,
1035 		.decrypt = safexcel_ecb_aes_decrypt,
1036 		.min_keysize = AES_MIN_KEY_SIZE,
1037 		.max_keysize = AES_MAX_KEY_SIZE,
1038 		.base = {
1039 			.cra_name = "ecb(aes)",
1040 			.cra_driver_name = "safexcel-ecb-aes",
1041 			.cra_priority = 300,
1042 			.cra_flags = CRYPTO_ALG_ASYNC |
1043 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1044 			.cra_blocksize = AES_BLOCK_SIZE,
1045 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1046 			.cra_alignmask = 0,
1047 			.cra_init = safexcel_skcipher_cra_init,
1048 			.cra_exit = safexcel_skcipher_cra_exit,
1049 			.cra_module = THIS_MODULE,
1050 		},
1051 	},
1052 };
1053 
1054 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
1055 {
1056 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1057 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1058 			SAFEXCEL_AES);
1059 }
1060 
1061 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
1062 {
1063 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1064 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1065 			SAFEXCEL_AES);
1066 }
1067 
1068 struct safexcel_alg_template safexcel_alg_cbc_aes = {
1069 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1070 	.engines = EIP97IES | EIP197B | EIP197D,
1071 	.alg.skcipher = {
1072 		.setkey = safexcel_skcipher_aes_setkey,
1073 		.encrypt = safexcel_cbc_aes_encrypt,
1074 		.decrypt = safexcel_cbc_aes_decrypt,
1075 		.min_keysize = AES_MIN_KEY_SIZE,
1076 		.max_keysize = AES_MAX_KEY_SIZE,
1077 		.ivsize = AES_BLOCK_SIZE,
1078 		.base = {
1079 			.cra_name = "cbc(aes)",
1080 			.cra_driver_name = "safexcel-cbc-aes",
1081 			.cra_priority = 300,
1082 			.cra_flags = CRYPTO_ALG_ASYNC |
1083 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1084 			.cra_blocksize = AES_BLOCK_SIZE,
1085 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1086 			.cra_alignmask = 0,
1087 			.cra_init = safexcel_skcipher_cra_init,
1088 			.cra_exit = safexcel_skcipher_cra_exit,
1089 			.cra_module = THIS_MODULE,
1090 		},
1091 	},
1092 };
1093 
1094 static int safexcel_ctr_aes_encrypt(struct skcipher_request *req)
1095 {
1096 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1097 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD,
1098 			SAFEXCEL_AES);
1099 }
1100 
1101 static int safexcel_ctr_aes_decrypt(struct skcipher_request *req)
1102 {
1103 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1104 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD,
1105 			SAFEXCEL_AES);
1106 }
1107 
1108 static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1109 					   const u8 *key, unsigned int len)
1110 {
1111 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1112 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1113 	struct safexcel_crypto_priv *priv = ctx->priv;
1114 	struct crypto_aes_ctx aes;
1115 	int ret, i;
1116 	unsigned int keylen;
1117 
1118 	/* last 4 bytes of key are the nonce! */
1119 	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1120 	/* exclude the nonce here */
1121 	keylen = len - CTR_RFC3686_NONCE_SIZE;
1122 	ret = aes_expandkey(&aes, key, keylen);
1123 	if (ret) {
1124 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1125 		return ret;
1126 	}
1127 
1128 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1129 		for (i = 0; i < keylen / sizeof(u32); i++) {
1130 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1131 				ctx->base.needs_inv = true;
1132 				break;
1133 			}
1134 		}
1135 	}
1136 
1137 	for (i = 0; i < keylen / sizeof(u32); i++)
1138 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1139 
1140 	ctx->key_len = keylen;
1141 
1142 	memzero_explicit(&aes, sizeof(aes));
1143 	return 0;
1144 }
1145 
1146 struct safexcel_alg_template safexcel_alg_ctr_aes = {
1147 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1148 	.alg.skcipher = {
1149 		.setkey = safexcel_skcipher_aesctr_setkey,
1150 		.encrypt = safexcel_ctr_aes_encrypt,
1151 		.decrypt = safexcel_ctr_aes_decrypt,
1152 		/* Add nonce size */
1153 		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1154 		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1155 		.ivsize = CTR_RFC3686_IV_SIZE,
1156 		.base = {
1157 			.cra_name = "rfc3686(ctr(aes))",
1158 			.cra_driver_name = "safexcel-ctr-aes",
1159 			.cra_priority = 300,
1160 			.cra_flags = CRYPTO_ALG_ASYNC |
1161 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1162 			.cra_blocksize = 1,
1163 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1164 			.cra_alignmask = 0,
1165 			.cra_init = safexcel_skcipher_cra_init,
1166 			.cra_exit = safexcel_skcipher_cra_exit,
1167 			.cra_module = THIS_MODULE,
1168 		},
1169 	},
1170 };
1171 
1172 static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
1173 {
1174 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1175 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1176 			SAFEXCEL_DES);
1177 }
1178 
1179 static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
1180 {
1181 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1182 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1183 			SAFEXCEL_DES);
1184 }
1185 
1186 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1187 			       unsigned int len)
1188 {
1189 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1190 	int ret;
1191 
1192 	ret = verify_skcipher_des_key(ctfm, key);
1193 	if (ret)
1194 		return ret;
1195 
1196 	/* if context exits and key changed, need to invalidate it */
1197 	if (ctx->base.ctxr_dma)
1198 		if (memcmp(ctx->key, key, len))
1199 			ctx->base.needs_inv = true;
1200 
1201 	memcpy(ctx->key, key, len);
1202 	ctx->key_len = len;
1203 
1204 	return 0;
1205 }
1206 
1207 struct safexcel_alg_template safexcel_alg_cbc_des = {
1208 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1209 	.engines = EIP97IES | EIP197B | EIP197D,
1210 	.alg.skcipher = {
1211 		.setkey = safexcel_des_setkey,
1212 		.encrypt = safexcel_cbc_des_encrypt,
1213 		.decrypt = safexcel_cbc_des_decrypt,
1214 		.min_keysize = DES_KEY_SIZE,
1215 		.max_keysize = DES_KEY_SIZE,
1216 		.ivsize = DES_BLOCK_SIZE,
1217 		.base = {
1218 			.cra_name = "cbc(des)",
1219 			.cra_driver_name = "safexcel-cbc-des",
1220 			.cra_priority = 300,
1221 			.cra_flags = CRYPTO_ALG_ASYNC |
1222 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1223 			.cra_blocksize = DES_BLOCK_SIZE,
1224 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1225 			.cra_alignmask = 0,
1226 			.cra_init = safexcel_skcipher_cra_init,
1227 			.cra_exit = safexcel_skcipher_cra_exit,
1228 			.cra_module = THIS_MODULE,
1229 		},
1230 	},
1231 };
1232 
1233 static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
1234 {
1235 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1236 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1237 			SAFEXCEL_DES);
1238 }
1239 
1240 static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
1241 {
1242 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1243 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1244 			SAFEXCEL_DES);
1245 }
1246 
1247 struct safexcel_alg_template safexcel_alg_ecb_des = {
1248 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1249 	.engines = EIP97IES | EIP197B | EIP197D,
1250 	.alg.skcipher = {
1251 		.setkey = safexcel_des_setkey,
1252 		.encrypt = safexcel_ecb_des_encrypt,
1253 		.decrypt = safexcel_ecb_des_decrypt,
1254 		.min_keysize = DES_KEY_SIZE,
1255 		.max_keysize = DES_KEY_SIZE,
1256 		.base = {
1257 			.cra_name = "ecb(des)",
1258 			.cra_driver_name = "safexcel-ecb-des",
1259 			.cra_priority = 300,
1260 			.cra_flags = CRYPTO_ALG_ASYNC |
1261 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1262 			.cra_blocksize = DES_BLOCK_SIZE,
1263 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1264 			.cra_alignmask = 0,
1265 			.cra_init = safexcel_skcipher_cra_init,
1266 			.cra_exit = safexcel_skcipher_cra_exit,
1267 			.cra_module = THIS_MODULE,
1268 		},
1269 	},
1270 };
1271 
1272 static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1273 {
1274 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1275 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1276 			SAFEXCEL_3DES);
1277 }
1278 
1279 static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1280 {
1281 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1282 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1283 			SAFEXCEL_3DES);
1284 }
1285 
1286 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1287 				   const u8 *key, unsigned int len)
1288 {
1289 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1290 	int err;
1291 
1292 	err = verify_skcipher_des3_key(ctfm, key);
1293 	if (err)
1294 		return err;
1295 
1296 	/* if context exits and key changed, need to invalidate it */
1297 	if (ctx->base.ctxr_dma) {
1298 		if (memcmp(ctx->key, key, len))
1299 			ctx->base.needs_inv = true;
1300 	}
1301 
1302 	memcpy(ctx->key, key, len);
1303 
1304 	ctx->key_len = len;
1305 
1306 	return 0;
1307 }
1308 
1309 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1310 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1311 	.engines = EIP97IES | EIP197B | EIP197D,
1312 	.alg.skcipher = {
1313 		.setkey = safexcel_des3_ede_setkey,
1314 		.encrypt = safexcel_cbc_des3_ede_encrypt,
1315 		.decrypt = safexcel_cbc_des3_ede_decrypt,
1316 		.min_keysize = DES3_EDE_KEY_SIZE,
1317 		.max_keysize = DES3_EDE_KEY_SIZE,
1318 		.ivsize = DES3_EDE_BLOCK_SIZE,
1319 		.base = {
1320 			.cra_name = "cbc(des3_ede)",
1321 			.cra_driver_name = "safexcel-cbc-des3_ede",
1322 			.cra_priority = 300,
1323 			.cra_flags = CRYPTO_ALG_ASYNC |
1324 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1325 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1326 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1327 			.cra_alignmask = 0,
1328 			.cra_init = safexcel_skcipher_cra_init,
1329 			.cra_exit = safexcel_skcipher_cra_exit,
1330 			.cra_module = THIS_MODULE,
1331 		},
1332 	},
1333 };
1334 
1335 static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1336 {
1337 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1338 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1339 			SAFEXCEL_3DES);
1340 }
1341 
1342 static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1343 {
1344 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1345 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1346 			SAFEXCEL_3DES);
1347 }
1348 
1349 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1350 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1351 	.engines = EIP97IES | EIP197B | EIP197D,
1352 	.alg.skcipher = {
1353 		.setkey = safexcel_des3_ede_setkey,
1354 		.encrypt = safexcel_ecb_des3_ede_encrypt,
1355 		.decrypt = safexcel_ecb_des3_ede_decrypt,
1356 		.min_keysize = DES3_EDE_KEY_SIZE,
1357 		.max_keysize = DES3_EDE_KEY_SIZE,
1358 		.base = {
1359 			.cra_name = "ecb(des3_ede)",
1360 			.cra_driver_name = "safexcel-ecb-des3_ede",
1361 			.cra_priority = 300,
1362 			.cra_flags = CRYPTO_ALG_ASYNC |
1363 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1364 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1365 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1366 			.cra_alignmask = 0,
1367 			.cra_init = safexcel_skcipher_cra_init,
1368 			.cra_exit = safexcel_skcipher_cra_exit,
1369 			.cra_module = THIS_MODULE,
1370 		},
1371 	},
1372 };
1373 
1374 static int safexcel_aead_encrypt_aes(struct aead_request *req)
1375 {
1376 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1377 
1378 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1379 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1380 }
1381 
1382 static int safexcel_aead_decrypt_aes(struct aead_request *req)
1383 {
1384 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1385 
1386 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1387 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1388 }
1389 
1390 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1391 {
1392 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1393 	struct safexcel_alg_template *tmpl =
1394 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1395 			     alg.aead.base);
1396 
1397 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1398 				sizeof(struct safexcel_cipher_req));
1399 
1400 	ctx->priv = tmpl->priv;
1401 
1402 	ctx->alg  = SAFEXCEL_AES; /* default */
1403 	ctx->aead = true;
1404 	ctx->base.send = safexcel_aead_send;
1405 	ctx->base.handle_result = safexcel_aead_handle_result;
1406 	return 0;
1407 }
1408 
1409 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1410 {
1411 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1412 
1413 	safexcel_aead_cra_init(tfm);
1414 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1415 	ctx->state_sz = SHA1_DIGEST_SIZE;
1416 	return 0;
1417 }
1418 
1419 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1420 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1421 	.engines = EIP97IES | EIP197B | EIP197D,
1422 	.alg.aead = {
1423 		.setkey = safexcel_aead_setkey,
1424 		.encrypt = safexcel_aead_encrypt_aes,
1425 		.decrypt = safexcel_aead_decrypt_aes,
1426 		.ivsize = AES_BLOCK_SIZE,
1427 		.maxauthsize = SHA1_DIGEST_SIZE,
1428 		.base = {
1429 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1430 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1431 			.cra_priority = 300,
1432 			.cra_flags = CRYPTO_ALG_ASYNC |
1433 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1434 			.cra_blocksize = AES_BLOCK_SIZE,
1435 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1436 			.cra_alignmask = 0,
1437 			.cra_init = safexcel_aead_sha1_cra_init,
1438 			.cra_exit = safexcel_aead_cra_exit,
1439 			.cra_module = THIS_MODULE,
1440 		},
1441 	},
1442 };
1443 
1444 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1445 {
1446 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1447 
1448 	safexcel_aead_cra_init(tfm);
1449 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1450 	ctx->state_sz = SHA256_DIGEST_SIZE;
1451 	return 0;
1452 }
1453 
1454 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1455 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1456 	.engines = EIP97IES | EIP197B | EIP197D,
1457 	.alg.aead = {
1458 		.setkey = safexcel_aead_setkey,
1459 		.encrypt = safexcel_aead_encrypt_aes,
1460 		.decrypt = safexcel_aead_decrypt_aes,
1461 		.ivsize = AES_BLOCK_SIZE,
1462 		.maxauthsize = SHA256_DIGEST_SIZE,
1463 		.base = {
1464 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1465 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1466 			.cra_priority = 300,
1467 			.cra_flags = CRYPTO_ALG_ASYNC |
1468 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1469 			.cra_blocksize = AES_BLOCK_SIZE,
1470 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1471 			.cra_alignmask = 0,
1472 			.cra_init = safexcel_aead_sha256_cra_init,
1473 			.cra_exit = safexcel_aead_cra_exit,
1474 			.cra_module = THIS_MODULE,
1475 		},
1476 	},
1477 };
1478 
1479 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1480 {
1481 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1482 
1483 	safexcel_aead_cra_init(tfm);
1484 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1485 	ctx->state_sz = SHA256_DIGEST_SIZE;
1486 	return 0;
1487 }
1488 
1489 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1490 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1491 	.engines = EIP97IES | EIP197B | EIP197D,
1492 	.alg.aead = {
1493 		.setkey = safexcel_aead_setkey,
1494 		.encrypt = safexcel_aead_encrypt_aes,
1495 		.decrypt = safexcel_aead_decrypt_aes,
1496 		.ivsize = AES_BLOCK_SIZE,
1497 		.maxauthsize = SHA224_DIGEST_SIZE,
1498 		.base = {
1499 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1500 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1501 			.cra_priority = 300,
1502 			.cra_flags = CRYPTO_ALG_ASYNC |
1503 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1504 			.cra_blocksize = AES_BLOCK_SIZE,
1505 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1506 			.cra_alignmask = 0,
1507 			.cra_init = safexcel_aead_sha224_cra_init,
1508 			.cra_exit = safexcel_aead_cra_exit,
1509 			.cra_module = THIS_MODULE,
1510 		},
1511 	},
1512 };
1513 
1514 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1515 {
1516 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1517 
1518 	safexcel_aead_cra_init(tfm);
1519 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1520 	ctx->state_sz = SHA512_DIGEST_SIZE;
1521 	return 0;
1522 }
1523 
1524 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1525 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1526 	.engines = EIP97IES | EIP197B | EIP197D,
1527 	.alg.aead = {
1528 		.setkey = safexcel_aead_setkey,
1529 		.encrypt = safexcel_aead_encrypt_aes,
1530 		.decrypt = safexcel_aead_decrypt_aes,
1531 		.ivsize = AES_BLOCK_SIZE,
1532 		.maxauthsize = SHA512_DIGEST_SIZE,
1533 		.base = {
1534 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1535 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1536 			.cra_priority = 300,
1537 			.cra_flags = CRYPTO_ALG_ASYNC |
1538 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1539 			.cra_blocksize = AES_BLOCK_SIZE,
1540 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1541 			.cra_alignmask = 0,
1542 			.cra_init = safexcel_aead_sha512_cra_init,
1543 			.cra_exit = safexcel_aead_cra_exit,
1544 			.cra_module = THIS_MODULE,
1545 		},
1546 	},
1547 };
1548 
1549 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1550 {
1551 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1552 
1553 	safexcel_aead_cra_init(tfm);
1554 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1555 	ctx->state_sz = SHA512_DIGEST_SIZE;
1556 	return 0;
1557 }
1558 
1559 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1560 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1561 	.engines = EIP97IES | EIP197B | EIP197D,
1562 	.alg.aead = {
1563 		.setkey = safexcel_aead_setkey,
1564 		.encrypt = safexcel_aead_encrypt_aes,
1565 		.decrypt = safexcel_aead_decrypt_aes,
1566 		.ivsize = AES_BLOCK_SIZE,
1567 		.maxauthsize = SHA384_DIGEST_SIZE,
1568 		.base = {
1569 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1570 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1571 			.cra_priority = 300,
1572 			.cra_flags = CRYPTO_ALG_ASYNC |
1573 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1574 			.cra_blocksize = AES_BLOCK_SIZE,
1575 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1576 			.cra_alignmask = 0,
1577 			.cra_init = safexcel_aead_sha384_cra_init,
1578 			.cra_exit = safexcel_aead_cra_exit,
1579 			.cra_module = THIS_MODULE,
1580 		},
1581 	},
1582 };
1583 
1584 static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1585 {
1586 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1587 
1588 	safexcel_aead_sha1_cra_init(tfm);
1589 	ctx->alg = SAFEXCEL_3DES; /* override default */
1590 	return 0;
1591 }
1592 
1593 static int safexcel_aead_encrypt_3des(struct aead_request *req)
1594 {
1595 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1596 
1597 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1598 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_3DES);
1599 }
1600 
1601 static int safexcel_aead_decrypt_3des(struct aead_request *req)
1602 {
1603 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1604 
1605 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1606 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_3DES);
1607 }
1608 
1609 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1610 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1611 	.alg.aead = {
1612 		.setkey = safexcel_aead_setkey,
1613 		.encrypt = safexcel_aead_encrypt_3des,
1614 		.decrypt = safexcel_aead_decrypt_3des,
1615 		.ivsize = DES3_EDE_BLOCK_SIZE,
1616 		.maxauthsize = SHA1_DIGEST_SIZE,
1617 		.base = {
1618 			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1619 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1620 			.cra_priority = 300,
1621 			.cra_flags = CRYPTO_ALG_ASYNC |
1622 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1623 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1624 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1625 			.cra_alignmask = 0,
1626 			.cra_init = safexcel_aead_sha1_des3_cra_init,
1627 			.cra_exit = safexcel_aead_cra_exit,
1628 			.cra_module = THIS_MODULE,
1629 		},
1630 	},
1631 };
1632 
1633 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
1634 {
1635 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1636 
1637 	safexcel_aead_sha1_cra_init(tfm);
1638 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1639 	return 0;
1640 }
1641 
1642 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
1643 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1644 	.alg.aead = {
1645 		.setkey = safexcel_aead_setkey,
1646 		.encrypt = safexcel_aead_encrypt_aes,
1647 		.decrypt = safexcel_aead_decrypt_aes,
1648 		.ivsize = CTR_RFC3686_IV_SIZE,
1649 		.maxauthsize = SHA1_DIGEST_SIZE,
1650 		.base = {
1651 			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
1652 			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
1653 			.cra_priority = 300,
1654 			.cra_flags = CRYPTO_ALG_ASYNC |
1655 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1656 			.cra_blocksize = 1,
1657 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1658 			.cra_alignmask = 0,
1659 			.cra_init = safexcel_aead_sha1_ctr_cra_init,
1660 			.cra_exit = safexcel_aead_cra_exit,
1661 			.cra_module = THIS_MODULE,
1662 		},
1663 	},
1664 };
1665 
1666 static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
1667 {
1668 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1669 
1670 	safexcel_aead_sha256_cra_init(tfm);
1671 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1672 	return 0;
1673 }
1674 
1675 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
1676 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1677 	.alg.aead = {
1678 		.setkey = safexcel_aead_setkey,
1679 		.encrypt = safexcel_aead_encrypt_aes,
1680 		.decrypt = safexcel_aead_decrypt_aes,
1681 		.ivsize = CTR_RFC3686_IV_SIZE,
1682 		.maxauthsize = SHA256_DIGEST_SIZE,
1683 		.base = {
1684 			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
1685 			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
1686 			.cra_priority = 300,
1687 			.cra_flags = CRYPTO_ALG_ASYNC |
1688 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1689 			.cra_blocksize = 1,
1690 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1691 			.cra_alignmask = 0,
1692 			.cra_init = safexcel_aead_sha256_ctr_cra_init,
1693 			.cra_exit = safexcel_aead_cra_exit,
1694 			.cra_module = THIS_MODULE,
1695 		},
1696 	},
1697 };
1698 
1699 static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
1700 {
1701 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1702 
1703 	safexcel_aead_sha224_cra_init(tfm);
1704 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1705 	return 0;
1706 }
1707 
1708 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
1709 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1710 	.alg.aead = {
1711 		.setkey = safexcel_aead_setkey,
1712 		.encrypt = safexcel_aead_encrypt_aes,
1713 		.decrypt = safexcel_aead_decrypt_aes,
1714 		.ivsize = CTR_RFC3686_IV_SIZE,
1715 		.maxauthsize = SHA224_DIGEST_SIZE,
1716 		.base = {
1717 			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
1718 			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
1719 			.cra_priority = 300,
1720 			.cra_flags = CRYPTO_ALG_ASYNC |
1721 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1722 			.cra_blocksize = 1,
1723 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1724 			.cra_alignmask = 0,
1725 			.cra_init = safexcel_aead_sha224_ctr_cra_init,
1726 			.cra_exit = safexcel_aead_cra_exit,
1727 			.cra_module = THIS_MODULE,
1728 		},
1729 	},
1730 };
1731 
1732 static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
1733 {
1734 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1735 
1736 	safexcel_aead_sha512_cra_init(tfm);
1737 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1738 	return 0;
1739 }
1740 
1741 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
1742 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1743 	.alg.aead = {
1744 		.setkey = safexcel_aead_setkey,
1745 		.encrypt = safexcel_aead_encrypt_aes,
1746 		.decrypt = safexcel_aead_decrypt_aes,
1747 		.ivsize = CTR_RFC3686_IV_SIZE,
1748 		.maxauthsize = SHA512_DIGEST_SIZE,
1749 		.base = {
1750 			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
1751 			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
1752 			.cra_priority = 300,
1753 			.cra_flags = CRYPTO_ALG_ASYNC |
1754 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1755 			.cra_blocksize = 1,
1756 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1757 			.cra_alignmask = 0,
1758 			.cra_init = safexcel_aead_sha512_ctr_cra_init,
1759 			.cra_exit = safexcel_aead_cra_exit,
1760 			.cra_module = THIS_MODULE,
1761 		},
1762 	},
1763 };
1764 
1765 static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
1766 {
1767 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1768 
1769 	safexcel_aead_sha384_cra_init(tfm);
1770 	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1771 	return 0;
1772 }
1773 
1774 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
1775 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1776 	.alg.aead = {
1777 		.setkey = safexcel_aead_setkey,
1778 		.encrypt = safexcel_aead_encrypt_aes,
1779 		.decrypt = safexcel_aead_decrypt_aes,
1780 		.ivsize = CTR_RFC3686_IV_SIZE,
1781 		.maxauthsize = SHA384_DIGEST_SIZE,
1782 		.base = {
1783 			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
1784 			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
1785 			.cra_priority = 300,
1786 			.cra_flags = CRYPTO_ALG_ASYNC |
1787 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1788 			.cra_blocksize = 1,
1789 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1790 			.cra_alignmask = 0,
1791 			.cra_init = safexcel_aead_sha384_ctr_cra_init,
1792 			.cra_exit = safexcel_aead_cra_exit,
1793 			.cra_module = THIS_MODULE,
1794 		},
1795 	},
1796 };
1797