1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <linux/device.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/dmapool.h>
11 
12 #include <crypto/aead.h>
13 #include <crypto/aes.h>
14 #include <crypto/authenc.h>
15 #include <crypto/des.h>
16 #include <crypto/sha.h>
17 #include <crypto/skcipher.h>
18 #include <crypto/internal/aead.h>
19 #include <crypto/internal/skcipher.h>
20 
21 #include "safexcel.h"
22 
23 enum safexcel_cipher_direction {
24 	SAFEXCEL_ENCRYPT,
25 	SAFEXCEL_DECRYPT,
26 };
27 
28 enum safexcel_cipher_alg {
29 	SAFEXCEL_DES,
30 	SAFEXCEL_3DES,
31 	SAFEXCEL_AES,
32 };
33 
34 struct safexcel_cipher_ctx {
35 	struct safexcel_context base;
36 	struct safexcel_crypto_priv *priv;
37 
38 	u32 mode;
39 	enum safexcel_cipher_alg alg;
40 	bool aead;
41 
42 	__le32 key[8];
43 	unsigned int key_len;
44 
45 	/* All the below is AEAD specific */
46 	u32 hash_alg;
47 	u32 state_sz;
48 	u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
49 	u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
50 };
51 
52 struct safexcel_cipher_req {
53 	enum safexcel_cipher_direction direction;
54 	/* Number of result descriptors associated to the request */
55 	unsigned int rdescs;
56 	bool needs_inv;
57 };
58 
59 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
60 				    struct safexcel_command_desc *cdesc,
61 				    u32 length)
62 {
63 	struct safexcel_token *token;
64 	u32 offset = 0, block_sz = 0;
65 
66 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
67 		switch (ctx->alg) {
68 		case SAFEXCEL_DES:
69 			block_sz = DES_BLOCK_SIZE;
70 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
71 			break;
72 		case SAFEXCEL_3DES:
73 			block_sz = DES3_EDE_BLOCK_SIZE;
74 			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
75 			break;
76 		case SAFEXCEL_AES:
77 			block_sz = AES_BLOCK_SIZE;
78 			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
79 			break;
80 		}
81 
82 		offset = block_sz / sizeof(u32);
83 		memcpy(cdesc->control_data.token, iv, block_sz);
84 	}
85 
86 	token = (struct safexcel_token *)(cdesc->control_data.token + offset);
87 
88 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
89 	token[0].packet_length = length;
90 	token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
91 			EIP197_TOKEN_STAT_LAST_HASH;
92 	token[0].instructions = EIP197_TOKEN_INS_LAST |
93 				EIP197_TOKEN_INS_TYPE_CRYTO |
94 				EIP197_TOKEN_INS_TYPE_OUTPUT;
95 
96 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
97 		u32 last = (EIP197_MAX_TOKENS - 1) - offset;
98 
99 		token[last].opcode = EIP197_TOKEN_OPCODE_CTX_ACCESS;
100 		token[last].packet_length = EIP197_TOKEN_DIRECTION_EXTERNAL |
101 					    EIP197_TOKEN_EXEC_IF_SUCCESSFUL|
102 					    EIP197_TOKEN_CTX_OFFSET(0x2);
103 		token[last].stat = EIP197_TOKEN_STAT_LAST_HASH |
104 			EIP197_TOKEN_STAT_LAST_PACKET;
105 		token[last].instructions =
106 			EIP197_TOKEN_INS_ORIGIN_LEN(block_sz / sizeof(u32)) |
107 			EIP197_TOKEN_INS_ORIGIN_IV0;
108 
109 		/* Store the updated IV values back in the internal context
110 		 * registers.
111 		 */
112 		cdesc->control_data.control1 |= CONTEXT_CONTROL_CRYPTO_STORE;
113 	}
114 }
115 
116 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
117 				struct safexcel_command_desc *cdesc,
118 				enum safexcel_cipher_direction direction,
119 				u32 cryptlen, u32 assoclen, u32 digestsize)
120 {
121 	struct safexcel_token *token;
122 	unsigned offset = 0;
123 
124 	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
125 		offset = AES_BLOCK_SIZE / sizeof(u32);
126 		memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
127 
128 		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
129 	}
130 
131 	token = (struct safexcel_token *)(cdesc->control_data.token + offset);
132 
133 	if (direction == SAFEXCEL_DECRYPT)
134 		cryptlen -= digestsize;
135 
136 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
137 	token[0].packet_length = assoclen;
138 	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
139 				EIP197_TOKEN_INS_TYPE_OUTPUT;
140 
141 	token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
142 	token[1].packet_length = cryptlen;
143 	token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
144 	token[1].instructions = EIP197_TOKEN_INS_LAST |
145 				EIP197_TOKEN_INS_TYPE_CRYTO |
146 				EIP197_TOKEN_INS_TYPE_HASH |
147 				EIP197_TOKEN_INS_TYPE_OUTPUT;
148 
149 	if (direction == SAFEXCEL_ENCRYPT) {
150 		token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
151 		token[2].packet_length = digestsize;
152 		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
153 				EIP197_TOKEN_STAT_LAST_PACKET;
154 		token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
155 					EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
156 	} else {
157 		token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
158 		token[2].packet_length = digestsize;
159 		token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
160 				EIP197_TOKEN_STAT_LAST_PACKET;
161 		token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
162 
163 		token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
164 		token[3].packet_length = digestsize |
165 					 EIP197_TOKEN_HASH_RESULT_VERIFY;
166 		token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
167 				EIP197_TOKEN_STAT_LAST_PACKET;
168 		token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
169 	}
170 }
171 
172 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
173 					const u8 *key, unsigned int len)
174 {
175 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
176 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
177 	struct safexcel_crypto_priv *priv = ctx->priv;
178 	struct crypto_aes_ctx aes;
179 	int ret, i;
180 
181 	ret = crypto_aes_expand_key(&aes, key, len);
182 	if (ret) {
183 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
184 		return ret;
185 	}
186 
187 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
188 		for (i = 0; i < len / sizeof(u32); i++) {
189 			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
190 				ctx->base.needs_inv = true;
191 				break;
192 			}
193 		}
194 	}
195 
196 	for (i = 0; i < len / sizeof(u32); i++)
197 		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
198 
199 	ctx->key_len = len;
200 
201 	memzero_explicit(&aes, sizeof(aes));
202 	return 0;
203 }
204 
205 static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
206 				    unsigned int len)
207 {
208 	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
209 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
210 	struct safexcel_ahash_export_state istate, ostate;
211 	struct safexcel_crypto_priv *priv = ctx->priv;
212 	struct crypto_authenc_keys keys;
213 
214 	if (crypto_authenc_extractkeys(&keys, key, len) != 0)
215 		goto badkey;
216 
217 	if (keys.enckeylen > sizeof(ctx->key))
218 		goto badkey;
219 
220 	/* Encryption key */
221 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
222 	    memcmp(ctx->key, keys.enckey, keys.enckeylen))
223 		ctx->base.needs_inv = true;
224 
225 	/* Auth key */
226 	switch (ctx->hash_alg) {
227 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
228 		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
229 					 keys.authkeylen, &istate, &ostate))
230 			goto badkey;
231 		break;
232 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
233 		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
234 					 keys.authkeylen, &istate, &ostate))
235 			goto badkey;
236 		break;
237 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
238 		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
239 					 keys.authkeylen, &istate, &ostate))
240 			goto badkey;
241 		break;
242 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
243 		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
244 					 keys.authkeylen, &istate, &ostate))
245 			goto badkey;
246 		break;
247 	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
248 		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
249 					 keys.authkeylen, &istate, &ostate))
250 			goto badkey;
251 		break;
252 	default:
253 		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
254 		goto badkey;
255 	}
256 
257 	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
258 				    CRYPTO_TFM_RES_MASK);
259 
260 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
261 	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
262 	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
263 		ctx->base.needs_inv = true;
264 
265 	/* Now copy the keys into the context */
266 	memcpy(ctx->key, keys.enckey, keys.enckeylen);
267 	ctx->key_len = keys.enckeylen;
268 
269 	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
270 	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
271 
272 	memzero_explicit(&keys, sizeof(keys));
273 	return 0;
274 
275 badkey:
276 	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
277 	memzero_explicit(&keys, sizeof(keys));
278 	return -EINVAL;
279 }
280 
281 static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
282 				    struct crypto_async_request *async,
283 				    struct safexcel_cipher_req *sreq,
284 				    struct safexcel_command_desc *cdesc)
285 {
286 	struct safexcel_crypto_priv *priv = ctx->priv;
287 	int ctrl_size;
288 
289 	if (ctx->aead) {
290 		if (sreq->direction == SAFEXCEL_ENCRYPT)
291 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
292 		else
293 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
294 	} else {
295 		cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
296 
297 		/* The decryption control type is a combination of the
298 		 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
299 		 * types.
300 		 */
301 		if (sreq->direction == SAFEXCEL_DECRYPT)
302 			cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
303 	}
304 
305 	cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
306 	cdesc->control_data.control1 |= ctx->mode;
307 
308 	if (ctx->aead)
309 		cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
310 						ctx->hash_alg;
311 
312 	if (ctx->alg == SAFEXCEL_DES) {
313 		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
314 	} else if (ctx->alg == SAFEXCEL_3DES) {
315 		cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
316 	} else if (ctx->alg == SAFEXCEL_AES) {
317 		switch (ctx->key_len) {
318 		case AES_KEYSIZE_128:
319 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
320 			break;
321 		case AES_KEYSIZE_192:
322 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
323 			break;
324 		case AES_KEYSIZE_256:
325 			cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
326 			break;
327 		default:
328 			dev_err(priv->dev, "aes keysize not supported: %u\n",
329 				ctx->key_len);
330 			return -EINVAL;
331 		}
332 	}
333 
334 	ctrl_size = ctx->key_len / sizeof(u32);
335 	if (ctx->aead)
336 		/* Take in account the ipad+opad digests */
337 		ctrl_size += ctx->state_sz / sizeof(u32) * 2;
338 	cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
339 
340 	return 0;
341 }
342 
343 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
344 				      struct crypto_async_request *async,
345 				      struct scatterlist *src,
346 				      struct scatterlist *dst,
347 				      unsigned int cryptlen,
348 				      struct safexcel_cipher_req *sreq,
349 				      bool *should_complete, int *ret)
350 {
351 	struct safexcel_result_desc *rdesc;
352 	int ndesc = 0;
353 
354 	*ret = 0;
355 
356 	if (unlikely(!sreq->rdescs))
357 		return 0;
358 
359 	while (sreq->rdescs--) {
360 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
361 		if (IS_ERR(rdesc)) {
362 			dev_err(priv->dev,
363 				"cipher: result: could not retrieve the result descriptor\n");
364 			*ret = PTR_ERR(rdesc);
365 			break;
366 		}
367 
368 		if (likely(!*ret))
369 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
370 
371 		ndesc++;
372 	}
373 
374 	safexcel_complete(priv, ring);
375 
376 	if (src == dst) {
377 		dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_BIDIRECTIONAL);
378 	} else {
379 		dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_TO_DEVICE);
380 		dma_unmap_sg(priv->dev, dst, sg_nents(dst), DMA_FROM_DEVICE);
381 	}
382 
383 	*should_complete = true;
384 
385 	return ndesc;
386 }
387 
388 static int safexcel_send_req(struct crypto_async_request *base, int ring,
389 			     struct safexcel_cipher_req *sreq,
390 			     struct scatterlist *src, struct scatterlist *dst,
391 			     unsigned int cryptlen, unsigned int assoclen,
392 			     unsigned int digestsize, u8 *iv, int *commands,
393 			     int *results)
394 {
395 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
396 	struct safexcel_crypto_priv *priv = ctx->priv;
397 	struct safexcel_command_desc *cdesc;
398 	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
399 	struct scatterlist *sg;
400 	unsigned int totlen = cryptlen + assoclen;
401 	int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
402 	int i, ret = 0;
403 
404 	if (src == dst) {
405 		nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
406 				    DMA_BIDIRECTIONAL);
407 		nr_dst = nr_src;
408 		if (!nr_src)
409 			return -EINVAL;
410 	} else {
411 		nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
412 				    DMA_TO_DEVICE);
413 		if (!nr_src)
414 			return -EINVAL;
415 
416 		nr_dst = dma_map_sg(priv->dev, dst, sg_nents(dst),
417 				    DMA_FROM_DEVICE);
418 		if (!nr_dst) {
419 			dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
420 			return -EINVAL;
421 		}
422 	}
423 
424 	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
425 
426 	if (ctx->aead) {
427 		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
428 		       ctx->ipad, ctx->state_sz);
429 		memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
430 		       ctx->opad, ctx->state_sz);
431 	}
432 
433 	/* command descriptors */
434 	for_each_sg(src, sg, nr_src, i) {
435 		int len = sg_dma_len(sg);
436 
437 		/* Do not overflow the request */
438 		if (queued - len < 0)
439 			len = queued;
440 
441 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
442 					   sg_dma_address(sg), len, totlen,
443 					   ctx->base.ctxr_dma);
444 		if (IS_ERR(cdesc)) {
445 			/* No space left in the command descriptor ring */
446 			ret = PTR_ERR(cdesc);
447 			goto cdesc_rollback;
448 		}
449 		n_cdesc++;
450 
451 		if (n_cdesc == 1) {
452 			safexcel_context_control(ctx, base, sreq, cdesc);
453 			if (ctx->aead)
454 				safexcel_aead_token(ctx, iv, cdesc,
455 						    sreq->direction, cryptlen,
456 						    assoclen, digestsize);
457 			else
458 				safexcel_skcipher_token(ctx, iv, cdesc,
459 							cryptlen);
460 		}
461 
462 		queued -= len;
463 		if (!queued)
464 			break;
465 	}
466 
467 	/* result descriptors */
468 	for_each_sg(dst, sg, nr_dst, i) {
469 		bool first = !i, last = sg_is_last(sg);
470 		u32 len = sg_dma_len(sg);
471 
472 		rdesc = safexcel_add_rdesc(priv, ring, first, last,
473 					   sg_dma_address(sg), len);
474 		if (IS_ERR(rdesc)) {
475 			/* No space left in the result descriptor ring */
476 			ret = PTR_ERR(rdesc);
477 			goto rdesc_rollback;
478 		}
479 		if (first)
480 			first_rdesc = rdesc;
481 		n_rdesc++;
482 	}
483 
484 	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
485 
486 	*commands = n_cdesc;
487 	*results = n_rdesc;
488 	return 0;
489 
490 rdesc_rollback:
491 	for (i = 0; i < n_rdesc; i++)
492 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
493 cdesc_rollback:
494 	for (i = 0; i < n_cdesc; i++)
495 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
496 
497 	if (src == dst) {
498 		dma_unmap_sg(priv->dev, src, nr_src, DMA_BIDIRECTIONAL);
499 	} else {
500 		dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
501 		dma_unmap_sg(priv->dev, dst, nr_dst, DMA_FROM_DEVICE);
502 	}
503 
504 	return ret;
505 }
506 
507 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
508 				      int ring,
509 				      struct crypto_async_request *base,
510 				      struct safexcel_cipher_req *sreq,
511 				      bool *should_complete, int *ret)
512 {
513 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
514 	struct safexcel_result_desc *rdesc;
515 	int ndesc = 0, enq_ret;
516 
517 	*ret = 0;
518 
519 	if (unlikely(!sreq->rdescs))
520 		return 0;
521 
522 	while (sreq->rdescs--) {
523 		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
524 		if (IS_ERR(rdesc)) {
525 			dev_err(priv->dev,
526 				"cipher: invalidate: could not retrieve the result descriptor\n");
527 			*ret = PTR_ERR(rdesc);
528 			break;
529 		}
530 
531 		if (likely(!*ret))
532 			*ret = safexcel_rdesc_check_errors(priv, rdesc);
533 
534 		ndesc++;
535 	}
536 
537 	safexcel_complete(priv, ring);
538 
539 	if (ctx->base.exit_inv) {
540 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
541 			      ctx->base.ctxr_dma);
542 
543 		*should_complete = true;
544 
545 		return ndesc;
546 	}
547 
548 	ring = safexcel_select_ring(priv);
549 	ctx->base.ring = ring;
550 
551 	spin_lock_bh(&priv->ring[ring].queue_lock);
552 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
553 	spin_unlock_bh(&priv->ring[ring].queue_lock);
554 
555 	if (enq_ret != -EINPROGRESS)
556 		*ret = enq_ret;
557 
558 	queue_work(priv->ring[ring].workqueue,
559 		   &priv->ring[ring].work_data.work);
560 
561 	*should_complete = false;
562 
563 	return ndesc;
564 }
565 
566 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
567 					   int ring,
568 					   struct crypto_async_request *async,
569 					   bool *should_complete, int *ret)
570 {
571 	struct skcipher_request *req = skcipher_request_cast(async);
572 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
573 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
574 	int err;
575 
576 	if (sreq->needs_inv) {
577 		sreq->needs_inv = false;
578 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
579 						 should_complete, ret);
580 	} else {
581 		err = safexcel_handle_req_result(priv, ring, async, req->src,
582 						 req->dst, req->cryptlen, sreq,
583 						 should_complete, ret);
584 
585 		if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
586 			u32 block_sz = 0;
587 
588 			switch (ctx->alg) {
589 			case SAFEXCEL_DES:
590 				block_sz = DES_BLOCK_SIZE;
591 				break;
592 			case SAFEXCEL_3DES:
593 				block_sz = DES3_EDE_BLOCK_SIZE;
594 				break;
595 			case SAFEXCEL_AES:
596 				block_sz = AES_BLOCK_SIZE;
597 				break;
598 			}
599 
600 			memcpy(req->iv, ctx->base.ctxr->data, block_sz);
601 		}
602 	}
603 
604 	return err;
605 }
606 
607 static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
608 				       int ring,
609 				       struct crypto_async_request *async,
610 				       bool *should_complete, int *ret)
611 {
612 	struct aead_request *req = aead_request_cast(async);
613 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
614 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
615 	int err;
616 
617 	if (sreq->needs_inv) {
618 		sreq->needs_inv = false;
619 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
620 						 should_complete, ret);
621 	} else {
622 		err = safexcel_handle_req_result(priv, ring, async, req->src,
623 						 req->dst,
624 						 req->cryptlen + crypto_aead_authsize(tfm),
625 						 sreq, should_complete, ret);
626 	}
627 
628 	return err;
629 }
630 
631 static int safexcel_cipher_send_inv(struct crypto_async_request *base,
632 				    int ring, int *commands, int *results)
633 {
634 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
635 	struct safexcel_crypto_priv *priv = ctx->priv;
636 	int ret;
637 
638 	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
639 	if (unlikely(ret))
640 		return ret;
641 
642 	*commands = 1;
643 	*results = 1;
644 
645 	return 0;
646 }
647 
648 static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
649 				  int *commands, int *results)
650 {
651 	struct skcipher_request *req = skcipher_request_cast(async);
652 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
653 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
654 	struct safexcel_crypto_priv *priv = ctx->priv;
655 	int ret;
656 
657 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
658 
659 	if (sreq->needs_inv)
660 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
661 	else
662 		ret = safexcel_send_req(async, ring, sreq, req->src,
663 					req->dst, req->cryptlen, 0, 0, req->iv,
664 					commands, results);
665 
666 	sreq->rdescs = *results;
667 	return ret;
668 }
669 
670 static int safexcel_aead_send(struct crypto_async_request *async, int ring,
671 			      int *commands, int *results)
672 {
673 	struct aead_request *req = aead_request_cast(async);
674 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
675 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
676 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
677 	struct safexcel_crypto_priv *priv = ctx->priv;
678 	int ret;
679 
680 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
681 
682 	if (sreq->needs_inv)
683 		ret = safexcel_cipher_send_inv(async, ring, commands, results);
684 	else
685 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
686 					req->cryptlen, req->assoclen,
687 					crypto_aead_authsize(tfm), req->iv,
688 					commands, results);
689 	sreq->rdescs = *results;
690 	return ret;
691 }
692 
693 static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
694 				    struct crypto_async_request *base,
695 				    struct safexcel_cipher_req *sreq,
696 				    struct safexcel_inv_result *result)
697 {
698 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
699 	struct safexcel_crypto_priv *priv = ctx->priv;
700 	int ring = ctx->base.ring;
701 
702 	init_completion(&result->completion);
703 
704 	ctx = crypto_tfm_ctx(base->tfm);
705 	ctx->base.exit_inv = true;
706 	sreq->needs_inv = true;
707 
708 	spin_lock_bh(&priv->ring[ring].queue_lock);
709 	crypto_enqueue_request(&priv->ring[ring].queue, base);
710 	spin_unlock_bh(&priv->ring[ring].queue_lock);
711 
712 	queue_work(priv->ring[ring].workqueue,
713 		   &priv->ring[ring].work_data.work);
714 
715 	wait_for_completion(&result->completion);
716 
717 	if (result->error) {
718 		dev_warn(priv->dev,
719 			"cipher: sync: invalidate: completion error %d\n",
720 			 result->error);
721 		return result->error;
722 	}
723 
724 	return 0;
725 }
726 
727 static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
728 {
729 	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
730 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
731 	struct safexcel_inv_result result = {};
732 
733 	memset(req, 0, sizeof(struct skcipher_request));
734 
735 	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
736 				      safexcel_inv_complete, &result);
737 	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
738 
739 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
740 }
741 
742 static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
743 {
744 	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
745 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
746 	struct safexcel_inv_result result = {};
747 
748 	memset(req, 0, sizeof(struct aead_request));
749 
750 	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
751 				  safexcel_inv_complete, &result);
752 	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
753 
754 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
755 }
756 
757 static int safexcel_queue_req(struct crypto_async_request *base,
758 			struct safexcel_cipher_req *sreq,
759 			enum safexcel_cipher_direction dir, u32 mode,
760 			enum safexcel_cipher_alg alg)
761 {
762 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
763 	struct safexcel_crypto_priv *priv = ctx->priv;
764 	int ret, ring;
765 
766 	sreq->needs_inv = false;
767 	sreq->direction = dir;
768 	ctx->alg = alg;
769 	ctx->mode = mode;
770 
771 	if (ctx->base.ctxr) {
772 		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
773 			sreq->needs_inv = true;
774 			ctx->base.needs_inv = false;
775 		}
776 	} else {
777 		ctx->base.ring = safexcel_select_ring(priv);
778 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
779 						 EIP197_GFP_FLAGS(*base),
780 						 &ctx->base.ctxr_dma);
781 		if (!ctx->base.ctxr)
782 			return -ENOMEM;
783 	}
784 
785 	ring = ctx->base.ring;
786 
787 	spin_lock_bh(&priv->ring[ring].queue_lock);
788 	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
789 	spin_unlock_bh(&priv->ring[ring].queue_lock);
790 
791 	queue_work(priv->ring[ring].workqueue,
792 		   &priv->ring[ring].work_data.work);
793 
794 	return ret;
795 }
796 
797 static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
798 {
799 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
800 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
801 			SAFEXCEL_AES);
802 }
803 
804 static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
805 {
806 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
807 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
808 			SAFEXCEL_AES);
809 }
810 
811 static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
812 {
813 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
814 	struct safexcel_alg_template *tmpl =
815 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
816 			     alg.skcipher.base);
817 
818 	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
819 				    sizeof(struct safexcel_cipher_req));
820 
821 	ctx->priv = tmpl->priv;
822 
823 	ctx->base.send = safexcel_skcipher_send;
824 	ctx->base.handle_result = safexcel_skcipher_handle_result;
825 	return 0;
826 }
827 
828 static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
829 {
830 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
831 
832 	memzero_explicit(ctx->key, sizeof(ctx->key));
833 
834 	/* context not allocated, skip invalidation */
835 	if (!ctx->base.ctxr)
836 		return -ENOMEM;
837 
838 	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
839 	return 0;
840 }
841 
842 static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
843 {
844 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
845 	struct safexcel_crypto_priv *priv = ctx->priv;
846 	int ret;
847 
848 	if (safexcel_cipher_cra_exit(tfm))
849 		return;
850 
851 	if (priv->flags & EIP197_TRC_CACHE) {
852 		ret = safexcel_skcipher_exit_inv(tfm);
853 		if (ret)
854 			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
855 				 ret);
856 	} else {
857 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
858 			      ctx->base.ctxr_dma);
859 	}
860 }
861 
862 static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
863 {
864 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
865 	struct safexcel_crypto_priv *priv = ctx->priv;
866 	int ret;
867 
868 	if (safexcel_cipher_cra_exit(tfm))
869 		return;
870 
871 	if (priv->flags & EIP197_TRC_CACHE) {
872 		ret = safexcel_aead_exit_inv(tfm);
873 		if (ret)
874 			dev_warn(priv->dev, "aead: invalidation error %d\n",
875 				 ret);
876 	} else {
877 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
878 			      ctx->base.ctxr_dma);
879 	}
880 }
881 
882 struct safexcel_alg_template safexcel_alg_ecb_aes = {
883 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
884 	.engines = EIP97IES | EIP197B | EIP197D,
885 	.alg.skcipher = {
886 		.setkey = safexcel_skcipher_aes_setkey,
887 		.encrypt = safexcel_ecb_aes_encrypt,
888 		.decrypt = safexcel_ecb_aes_decrypt,
889 		.min_keysize = AES_MIN_KEY_SIZE,
890 		.max_keysize = AES_MAX_KEY_SIZE,
891 		.base = {
892 			.cra_name = "ecb(aes)",
893 			.cra_driver_name = "safexcel-ecb-aes",
894 			.cra_priority = 300,
895 			.cra_flags = CRYPTO_ALG_ASYNC |
896 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
897 			.cra_blocksize = AES_BLOCK_SIZE,
898 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
899 			.cra_alignmask = 0,
900 			.cra_init = safexcel_skcipher_cra_init,
901 			.cra_exit = safexcel_skcipher_cra_exit,
902 			.cra_module = THIS_MODULE,
903 		},
904 	},
905 };
906 
907 static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
908 {
909 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
910 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
911 			SAFEXCEL_AES);
912 }
913 
914 static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
915 {
916 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
917 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
918 			SAFEXCEL_AES);
919 }
920 
921 struct safexcel_alg_template safexcel_alg_cbc_aes = {
922 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
923 	.engines = EIP97IES | EIP197B | EIP197D,
924 	.alg.skcipher = {
925 		.setkey = safexcel_skcipher_aes_setkey,
926 		.encrypt = safexcel_cbc_aes_encrypt,
927 		.decrypt = safexcel_cbc_aes_decrypt,
928 		.min_keysize = AES_MIN_KEY_SIZE,
929 		.max_keysize = AES_MAX_KEY_SIZE,
930 		.ivsize = AES_BLOCK_SIZE,
931 		.base = {
932 			.cra_name = "cbc(aes)",
933 			.cra_driver_name = "safexcel-cbc-aes",
934 			.cra_priority = 300,
935 			.cra_flags = CRYPTO_ALG_ASYNC |
936 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
937 			.cra_blocksize = AES_BLOCK_SIZE,
938 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
939 			.cra_alignmask = 0,
940 			.cra_init = safexcel_skcipher_cra_init,
941 			.cra_exit = safexcel_skcipher_cra_exit,
942 			.cra_module = THIS_MODULE,
943 		},
944 	},
945 };
946 
947 static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
948 {
949 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
950 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
951 			SAFEXCEL_DES);
952 }
953 
954 static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
955 {
956 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
957 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
958 			SAFEXCEL_DES);
959 }
960 
961 static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
962 			       unsigned int len)
963 {
964 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
965 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
966 	u32 tmp[DES_EXPKEY_WORDS];
967 	int ret;
968 
969 	if (len != DES_KEY_SIZE) {
970 		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
971 		return -EINVAL;
972 	}
973 
974 	ret = des_ekey(tmp, key);
975 	if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
976 		tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
977 		return -EINVAL;
978 	}
979 
980 	/* if context exits and key changed, need to invalidate it */
981 	if (ctx->base.ctxr_dma)
982 		if (memcmp(ctx->key, key, len))
983 			ctx->base.needs_inv = true;
984 
985 	memcpy(ctx->key, key, len);
986 	ctx->key_len = len;
987 
988 	return 0;
989 }
990 
991 struct safexcel_alg_template safexcel_alg_cbc_des = {
992 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
993 	.engines = EIP97IES | EIP197B | EIP197D,
994 	.alg.skcipher = {
995 		.setkey = safexcel_des_setkey,
996 		.encrypt = safexcel_cbc_des_encrypt,
997 		.decrypt = safexcel_cbc_des_decrypt,
998 		.min_keysize = DES_KEY_SIZE,
999 		.max_keysize = DES_KEY_SIZE,
1000 		.ivsize = DES_BLOCK_SIZE,
1001 		.base = {
1002 			.cra_name = "cbc(des)",
1003 			.cra_driver_name = "safexcel-cbc-des",
1004 			.cra_priority = 300,
1005 			.cra_flags = CRYPTO_ALG_ASYNC |
1006 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1007 			.cra_blocksize = DES_BLOCK_SIZE,
1008 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1009 			.cra_alignmask = 0,
1010 			.cra_init = safexcel_skcipher_cra_init,
1011 			.cra_exit = safexcel_skcipher_cra_exit,
1012 			.cra_module = THIS_MODULE,
1013 		},
1014 	},
1015 };
1016 
1017 static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
1018 {
1019 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1020 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1021 			SAFEXCEL_DES);
1022 }
1023 
1024 static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
1025 {
1026 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1027 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1028 			SAFEXCEL_DES);
1029 }
1030 
1031 struct safexcel_alg_template safexcel_alg_ecb_des = {
1032 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1033 	.engines = EIP97IES | EIP197B | EIP197D,
1034 	.alg.skcipher = {
1035 		.setkey = safexcel_des_setkey,
1036 		.encrypt = safexcel_ecb_des_encrypt,
1037 		.decrypt = safexcel_ecb_des_decrypt,
1038 		.min_keysize = DES_KEY_SIZE,
1039 		.max_keysize = DES_KEY_SIZE,
1040 		.ivsize = DES_BLOCK_SIZE,
1041 		.base = {
1042 			.cra_name = "ecb(des)",
1043 			.cra_driver_name = "safexcel-ecb-des",
1044 			.cra_priority = 300,
1045 			.cra_flags = CRYPTO_ALG_ASYNC |
1046 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1047 			.cra_blocksize = DES_BLOCK_SIZE,
1048 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1049 			.cra_alignmask = 0,
1050 			.cra_init = safexcel_skcipher_cra_init,
1051 			.cra_exit = safexcel_skcipher_cra_exit,
1052 			.cra_module = THIS_MODULE,
1053 		},
1054 	},
1055 };
1056 
1057 static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1058 {
1059 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1060 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1061 			SAFEXCEL_3DES);
1062 }
1063 
1064 static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1065 {
1066 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1067 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1068 			SAFEXCEL_3DES);
1069 }
1070 
1071 static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1072 				   const u8 *key, unsigned int len)
1073 {
1074 	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1075 	int err;
1076 
1077 	err = des3_verify_key(ctfm, key);
1078 	if (unlikely(err))
1079 		return err;
1080 
1081 	/* if context exits and key changed, need to invalidate it */
1082 	if (ctx->base.ctxr_dma) {
1083 		if (memcmp(ctx->key, key, len))
1084 			ctx->base.needs_inv = true;
1085 	}
1086 
1087 	memcpy(ctx->key, key, len);
1088 
1089 	ctx->key_len = len;
1090 
1091 	return 0;
1092 }
1093 
1094 struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1095 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1096 	.engines = EIP97IES | EIP197B | EIP197D,
1097 	.alg.skcipher = {
1098 		.setkey = safexcel_des3_ede_setkey,
1099 		.encrypt = safexcel_cbc_des3_ede_encrypt,
1100 		.decrypt = safexcel_cbc_des3_ede_decrypt,
1101 		.min_keysize = DES3_EDE_KEY_SIZE,
1102 		.max_keysize = DES3_EDE_KEY_SIZE,
1103 		.ivsize = DES3_EDE_BLOCK_SIZE,
1104 		.base = {
1105 			.cra_name = "cbc(des3_ede)",
1106 			.cra_driver_name = "safexcel-cbc-des3_ede",
1107 			.cra_priority = 300,
1108 			.cra_flags = CRYPTO_ALG_ASYNC |
1109 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1110 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1111 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1112 			.cra_alignmask = 0,
1113 			.cra_init = safexcel_skcipher_cra_init,
1114 			.cra_exit = safexcel_skcipher_cra_exit,
1115 			.cra_module = THIS_MODULE,
1116 		},
1117 	},
1118 };
1119 
1120 static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1121 {
1122 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1123 			SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1124 			SAFEXCEL_3DES);
1125 }
1126 
1127 static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1128 {
1129 	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1130 			SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1131 			SAFEXCEL_3DES);
1132 }
1133 
1134 struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1135 	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1136 	.engines = EIP97IES | EIP197B | EIP197D,
1137 	.alg.skcipher = {
1138 		.setkey = safexcel_des3_ede_setkey,
1139 		.encrypt = safexcel_ecb_des3_ede_encrypt,
1140 		.decrypt = safexcel_ecb_des3_ede_decrypt,
1141 		.min_keysize = DES3_EDE_KEY_SIZE,
1142 		.max_keysize = DES3_EDE_KEY_SIZE,
1143 		.ivsize = DES3_EDE_BLOCK_SIZE,
1144 		.base = {
1145 			.cra_name = "ecb(des3_ede)",
1146 			.cra_driver_name = "safexcel-ecb-des3_ede",
1147 			.cra_priority = 300,
1148 			.cra_flags = CRYPTO_ALG_ASYNC |
1149 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1150 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1151 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1152 			.cra_alignmask = 0,
1153 			.cra_init = safexcel_skcipher_cra_init,
1154 			.cra_exit = safexcel_skcipher_cra_exit,
1155 			.cra_module = THIS_MODULE,
1156 		},
1157 	},
1158 };
1159 
1160 static int safexcel_aead_encrypt(struct aead_request *req)
1161 {
1162 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1163 
1164 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1165 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1166 }
1167 
1168 static int safexcel_aead_decrypt(struct aead_request *req)
1169 {
1170 	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1171 
1172 	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1173 			CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
1174 }
1175 
1176 static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1177 {
1178 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1179 	struct safexcel_alg_template *tmpl =
1180 		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1181 			     alg.aead.base);
1182 
1183 	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1184 				sizeof(struct safexcel_cipher_req));
1185 
1186 	ctx->priv = tmpl->priv;
1187 
1188 	ctx->aead = true;
1189 	ctx->base.send = safexcel_aead_send;
1190 	ctx->base.handle_result = safexcel_aead_handle_result;
1191 	return 0;
1192 }
1193 
1194 static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1195 {
1196 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1197 
1198 	safexcel_aead_cra_init(tfm);
1199 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1200 	ctx->state_sz = SHA1_DIGEST_SIZE;
1201 	return 0;
1202 }
1203 
1204 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1205 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1206 	.engines = EIP97IES | EIP197B | EIP197D,
1207 	.alg.aead = {
1208 		.setkey = safexcel_aead_aes_setkey,
1209 		.encrypt = safexcel_aead_encrypt,
1210 		.decrypt = safexcel_aead_decrypt,
1211 		.ivsize = AES_BLOCK_SIZE,
1212 		.maxauthsize = SHA1_DIGEST_SIZE,
1213 		.base = {
1214 			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1215 			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1216 			.cra_priority = 300,
1217 			.cra_flags = CRYPTO_ALG_ASYNC |
1218 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1219 			.cra_blocksize = AES_BLOCK_SIZE,
1220 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1221 			.cra_alignmask = 0,
1222 			.cra_init = safexcel_aead_sha1_cra_init,
1223 			.cra_exit = safexcel_aead_cra_exit,
1224 			.cra_module = THIS_MODULE,
1225 		},
1226 	},
1227 };
1228 
1229 static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1230 {
1231 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1232 
1233 	safexcel_aead_cra_init(tfm);
1234 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1235 	ctx->state_sz = SHA256_DIGEST_SIZE;
1236 	return 0;
1237 }
1238 
1239 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1240 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1241 	.engines = EIP97IES | EIP197B | EIP197D,
1242 	.alg.aead = {
1243 		.setkey = safexcel_aead_aes_setkey,
1244 		.encrypt = safexcel_aead_encrypt,
1245 		.decrypt = safexcel_aead_decrypt,
1246 		.ivsize = AES_BLOCK_SIZE,
1247 		.maxauthsize = SHA256_DIGEST_SIZE,
1248 		.base = {
1249 			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1250 			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1251 			.cra_priority = 300,
1252 			.cra_flags = CRYPTO_ALG_ASYNC |
1253 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1254 			.cra_blocksize = AES_BLOCK_SIZE,
1255 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1256 			.cra_alignmask = 0,
1257 			.cra_init = safexcel_aead_sha256_cra_init,
1258 			.cra_exit = safexcel_aead_cra_exit,
1259 			.cra_module = THIS_MODULE,
1260 		},
1261 	},
1262 };
1263 
1264 static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1265 {
1266 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1267 
1268 	safexcel_aead_cra_init(tfm);
1269 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1270 	ctx->state_sz = SHA256_DIGEST_SIZE;
1271 	return 0;
1272 }
1273 
1274 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1275 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1276 	.engines = EIP97IES | EIP197B | EIP197D,
1277 	.alg.aead = {
1278 		.setkey = safexcel_aead_aes_setkey,
1279 		.encrypt = safexcel_aead_encrypt,
1280 		.decrypt = safexcel_aead_decrypt,
1281 		.ivsize = AES_BLOCK_SIZE,
1282 		.maxauthsize = SHA224_DIGEST_SIZE,
1283 		.base = {
1284 			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1285 			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1286 			.cra_priority = 300,
1287 			.cra_flags = CRYPTO_ALG_ASYNC |
1288 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1289 			.cra_blocksize = AES_BLOCK_SIZE,
1290 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1291 			.cra_alignmask = 0,
1292 			.cra_init = safexcel_aead_sha224_cra_init,
1293 			.cra_exit = safexcel_aead_cra_exit,
1294 			.cra_module = THIS_MODULE,
1295 		},
1296 	},
1297 };
1298 
1299 static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1300 {
1301 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1302 
1303 	safexcel_aead_cra_init(tfm);
1304 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1305 	ctx->state_sz = SHA512_DIGEST_SIZE;
1306 	return 0;
1307 }
1308 
1309 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1310 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1311 	.engines = EIP97IES | EIP197B | EIP197D,
1312 	.alg.aead = {
1313 		.setkey = safexcel_aead_aes_setkey,
1314 		.encrypt = safexcel_aead_encrypt,
1315 		.decrypt = safexcel_aead_decrypt,
1316 		.ivsize = AES_BLOCK_SIZE,
1317 		.maxauthsize = SHA512_DIGEST_SIZE,
1318 		.base = {
1319 			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1320 			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1321 			.cra_priority = 300,
1322 			.cra_flags = CRYPTO_ALG_ASYNC |
1323 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1324 			.cra_blocksize = AES_BLOCK_SIZE,
1325 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1326 			.cra_alignmask = 0,
1327 			.cra_init = safexcel_aead_sha512_cra_init,
1328 			.cra_exit = safexcel_aead_cra_exit,
1329 			.cra_module = THIS_MODULE,
1330 		},
1331 	},
1332 };
1333 
1334 static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1335 {
1336 	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1337 
1338 	safexcel_aead_cra_init(tfm);
1339 	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1340 	ctx->state_sz = SHA512_DIGEST_SIZE;
1341 	return 0;
1342 }
1343 
1344 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1345 	.type = SAFEXCEL_ALG_TYPE_AEAD,
1346 	.engines = EIP97IES | EIP197B | EIP197D,
1347 	.alg.aead = {
1348 		.setkey = safexcel_aead_aes_setkey,
1349 		.encrypt = safexcel_aead_encrypt,
1350 		.decrypt = safexcel_aead_decrypt,
1351 		.ivsize = AES_BLOCK_SIZE,
1352 		.maxauthsize = SHA384_DIGEST_SIZE,
1353 		.base = {
1354 			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1355 			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1356 			.cra_priority = 300,
1357 			.cra_flags = CRYPTO_ALG_ASYNC |
1358 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1359 			.cra_blocksize = AES_BLOCK_SIZE,
1360 			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1361 			.cra_alignmask = 0,
1362 			.cra_init = safexcel_aead_sha384_cra_init,
1363 			.cra_exit = safexcel_aead_cra_exit,
1364 			.cra_module = THIS_MODULE,
1365 		},
1366 	},
1367 };
1368