1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha.h>
12 #include <crypto/sha3.h>
13 #include <crypto/skcipher.h>
14 #include <crypto/sm3.h>
15 #include <linux/device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/dmapool.h>
18 
19 #include "safexcel.h"
20 
21 struct safexcel_ahash_ctx {
22 	struct safexcel_context base;
23 	struct safexcel_crypto_priv *priv;
24 
25 	u32 alg;
26 	u8  key_sz;
27 	bool cbcmac;
28 	bool do_fallback;
29 	bool fb_init_done;
30 	bool fb_do_setkey;
31 
32 	__le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
33 	__le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
34 
35 	struct crypto_cipher *kaes;
36 	struct crypto_ahash *fback;
37 	struct crypto_shash *shpre;
38 	struct shash_desc *shdesc;
39 };
40 
41 struct safexcel_ahash_req {
42 	bool last_req;
43 	bool finish;
44 	bool hmac;
45 	bool needs_inv;
46 	bool hmac_zlen;
47 	bool len_is_le;
48 	bool not_first;
49 	bool xcbcmac;
50 
51 	int nents;
52 	dma_addr_t result_dma;
53 
54 	u32 digest;
55 
56 	u8 state_sz;    /* expected state size, only set once */
57 	u8 block_sz;    /* block size, only set once */
58 	u8 digest_sz;   /* output digest size, only set once */
59 	__le32 state[SHA3_512_BLOCK_SIZE /
60 		     sizeof(__le32)] __aligned(sizeof(__le32));
61 
62 	u64 len;
63 	u64 processed;
64 
65 	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
66 	dma_addr_t cache_dma;
67 	unsigned int cache_sz;
68 
69 	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
70 };
71 
72 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
73 {
74 	return req->len - req->processed;
75 }
76 
77 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
78 				u32 input_length, u32 result_length,
79 				bool cbcmac)
80 {
81 	struct safexcel_token *token =
82 		(struct safexcel_token *)cdesc->control_data.token;
83 
84 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
85 	token[0].packet_length = input_length;
86 	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
87 
88 	input_length &= 15;
89 	if (unlikely(cbcmac && input_length)) {
90 		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
91 		token[1].packet_length = 16 - input_length;
92 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
93 		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
94 	} else {
95 		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
96 	}
97 
98 	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99 	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100 			EIP197_TOKEN_STAT_LAST_PACKET;
101 	token[2].packet_length = result_length;
102 	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103 				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104 }
105 
106 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
107 				     struct safexcel_ahash_req *req,
108 				     struct safexcel_command_desc *cdesc)
109 {
110 	struct safexcel_crypto_priv *priv = ctx->priv;
111 	u64 count = 0;
112 
113 	cdesc->control_data.control0 = ctx->alg;
114 
115 	/*
116 	 * Copy the input digest if needed, and setup the context
117 	 * fields. Do this now as we need it to setup the first command
118 	 * descriptor.
119 	 */
120 	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
121 		if (req->xcbcmac)
122 			memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
123 		else
124 			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
125 
126 		if (!req->finish && req->xcbcmac)
127 			cdesc->control_data.control0 |=
128 				CONTEXT_CONTROL_DIGEST_XCM |
129 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
130 				CONTEXT_CONTROL_NO_FINISH_HASH |
131 				CONTEXT_CONTROL_SIZE(req->state_sz /
132 						     sizeof(u32));
133 		else
134 			cdesc->control_data.control0 |=
135 				CONTEXT_CONTROL_DIGEST_XCM |
136 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
137 				CONTEXT_CONTROL_SIZE(req->state_sz /
138 						     sizeof(u32));
139 		return;
140 	} else if (!req->processed) {
141 		/* First - and possibly only - block of basic hash only */
142 		if (req->finish)
143 			cdesc->control_data.control0 |= req->digest |
144 				CONTEXT_CONTROL_TYPE_HASH_OUT |
145 				CONTEXT_CONTROL_RESTART_HASH  |
146 				/* ensure its not 0! */
147 				CONTEXT_CONTROL_SIZE(1);
148 		else
149 			cdesc->control_data.control0 |= req->digest |
150 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
151 				CONTEXT_CONTROL_RESTART_HASH   |
152 				CONTEXT_CONTROL_NO_FINISH_HASH |
153 				/* ensure its not 0! */
154 				CONTEXT_CONTROL_SIZE(1);
155 		return;
156 	}
157 
158 	/* Hash continuation or HMAC, setup (inner) digest from state */
159 	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
160 
161 	if (req->finish) {
162 		/* Compute digest count for hash/HMAC finish operations */
163 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
164 		    req->hmac_zlen || (req->processed != req->block_sz)) {
165 			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
166 
167 			/* This is a hardware limitation, as the
168 			 * counter must fit into an u32. This represents
169 			 * a fairly big amount of input data, so we
170 			 * shouldn't see this.
171 			 */
172 			if (unlikely(count & 0xffffffff00000000ULL)) {
173 				dev_warn(priv->dev,
174 					 "Input data is too big\n");
175 				return;
176 			}
177 		}
178 
179 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
180 		    /* Special case: zero length HMAC */
181 		    req->hmac_zlen ||
182 		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
183 		    (req->processed != req->block_sz)) {
184 			/* Basic hash continue operation, need digest + cnt */
185 			cdesc->control_data.control0 |=
186 				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
187 				CONTEXT_CONTROL_TYPE_HASH_OUT |
188 				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
189 			/* For zero-len HMAC, don't finalize, already padded! */
190 			if (req->hmac_zlen)
191 				cdesc->control_data.control0 |=
192 					CONTEXT_CONTROL_NO_FINISH_HASH;
193 			cdesc->control_data.control1 |=
194 				CONTEXT_CONTROL_DIGEST_CNT;
195 			ctx->base.ctxr->data[req->state_sz >> 2] =
196 				cpu_to_le32(count);
197 			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
198 
199 			/* Clear zero-length HMAC flag for next operation! */
200 			req->hmac_zlen = false;
201 		} else { /* HMAC */
202 			/* Need outer digest for HMAC finalization */
203 			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
204 			       ctx->opad, req->state_sz);
205 
206 			/* Single pass HMAC - no digest count */
207 			cdesc->control_data.control0 |=
208 				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
209 				CONTEXT_CONTROL_TYPE_HASH_OUT |
210 				CONTEXT_CONTROL_DIGEST_HMAC;
211 		}
212 	} else { /* Hash continuation, do not finish yet */
213 		cdesc->control_data.control0 |=
214 			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
215 			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
216 			CONTEXT_CONTROL_TYPE_HASH_OUT |
217 			CONTEXT_CONTROL_NO_FINISH_HASH;
218 	}
219 }
220 
221 static int safexcel_ahash_enqueue(struct ahash_request *areq);
222 
223 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
224 				      int ring,
225 				      struct crypto_async_request *async,
226 				      bool *should_complete, int *ret)
227 {
228 	struct safexcel_result_desc *rdesc;
229 	struct ahash_request *areq = ahash_request_cast(async);
230 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
231 	struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
232 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
233 	u64 cache_len;
234 
235 	*ret = 0;
236 
237 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
238 	if (IS_ERR(rdesc)) {
239 		dev_err(priv->dev,
240 			"hash: result: could not retrieve the result descriptor\n");
241 		*ret = PTR_ERR(rdesc);
242 	} else {
243 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
244 	}
245 
246 	safexcel_complete(priv, ring);
247 
248 	if (sreq->nents) {
249 		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
250 		sreq->nents = 0;
251 	}
252 
253 	if (sreq->result_dma) {
254 		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
255 				 DMA_FROM_DEVICE);
256 		sreq->result_dma = 0;
257 	}
258 
259 	if (sreq->cache_dma) {
260 		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
261 				 DMA_TO_DEVICE);
262 		sreq->cache_dma = 0;
263 		sreq->cache_sz = 0;
264 	}
265 
266 	if (sreq->finish) {
267 		if (sreq->hmac &&
268 		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
269 			/* Faking HMAC using hash - need to do outer hash */
270 			memcpy(sreq->cache, sreq->state,
271 			       crypto_ahash_digestsize(ahash));
272 
273 			memcpy(sreq->state, ctx->opad, sreq->digest_sz);
274 
275 			sreq->len = sreq->block_sz +
276 				    crypto_ahash_digestsize(ahash);
277 			sreq->processed = sreq->block_sz;
278 			sreq->hmac = 0;
279 
280 			ctx->base.needs_inv = true;
281 			areq->nbytes = 0;
282 			safexcel_ahash_enqueue(areq);
283 
284 			*should_complete = false; /* Not done yet */
285 			return 1;
286 		}
287 
288 		if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
289 			     ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
290 			/* Undo final XOR with 0xffffffff ...*/
291 			*(__le32 *)areq->result = ~sreq->state[0];
292 		} else {
293 			memcpy(areq->result, sreq->state,
294 			       crypto_ahash_digestsize(ahash));
295 		}
296 	}
297 
298 	cache_len = safexcel_queued_len(sreq);
299 	if (cache_len)
300 		memcpy(sreq->cache, sreq->cache_next, cache_len);
301 
302 	*should_complete = true;
303 
304 	return 1;
305 }
306 
307 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
308 				   int *commands, int *results)
309 {
310 	struct ahash_request *areq = ahash_request_cast(async);
311 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
312 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
313 	struct safexcel_crypto_priv *priv = ctx->priv;
314 	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
315 	struct safexcel_result_desc *rdesc;
316 	struct scatterlist *sg;
317 	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
318 	u64 queued, len;
319 
320 	queued = safexcel_queued_len(req);
321 	if (queued <= HASH_CACHE_SIZE)
322 		cache_len = queued;
323 	else
324 		cache_len = queued - areq->nbytes;
325 
326 	if (!req->finish && !req->last_req) {
327 		/* If this is not the last request and the queued data does not
328 		 * fit into full cache blocks, cache it for the next send call.
329 		 */
330 		extra = queued & (HASH_CACHE_SIZE - 1);
331 
332 		/* If this is not the last request and the queued data
333 		 * is a multiple of a block, cache the last one for now.
334 		 */
335 		if (!extra)
336 			extra = HASH_CACHE_SIZE;
337 
338 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
339 				   req->cache_next, extra,
340 				   areq->nbytes - extra);
341 
342 		queued -= extra;
343 
344 		if (!queued) {
345 			*commands = 0;
346 			*results = 0;
347 			return 0;
348 		}
349 
350 		extra = 0;
351 	}
352 
353 	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
354 		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
355 			/*
356 			 * Cache contains less than 1 full block, complete.
357 			 */
358 			extra = AES_BLOCK_SIZE - cache_len;
359 			if (queued > cache_len) {
360 				/* More data follows: borrow bytes */
361 				u64 tmp = queued - cache_len;
362 
363 				skip = min_t(u64, tmp, extra);
364 				sg_pcopy_to_buffer(areq->src,
365 					sg_nents(areq->src),
366 					req->cache + cache_len,
367 					skip, 0);
368 			}
369 			extra -= skip;
370 			memset(req->cache + cache_len + skip, 0, extra);
371 			if (!ctx->cbcmac && extra) {
372 				// 10- padding for XCBCMAC & CMAC
373 				req->cache[cache_len + skip] = 0x80;
374 				// HW will use K2 iso K3 - compensate!
375 				for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
376 					((__be32 *)req->cache)[i] ^=
377 					  cpu_to_be32(le32_to_cpu(
378 					    ctx->ipad[i] ^ ctx->ipad[i + 4]));
379 			}
380 			cache_len = AES_BLOCK_SIZE;
381 			queued = queued + extra;
382 		}
383 
384 		/* XCBC continue: XOR previous result into 1st word */
385 		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
386 	}
387 
388 	len = queued;
389 	/* Add a command descriptor for the cached data, if any */
390 	if (cache_len) {
391 		req->cache_dma = dma_map_single(priv->dev, req->cache,
392 						cache_len, DMA_TO_DEVICE);
393 		if (dma_mapping_error(priv->dev, req->cache_dma))
394 			return -EINVAL;
395 
396 		req->cache_sz = cache_len;
397 		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
398 						 (cache_len == len),
399 						 req->cache_dma, cache_len,
400 						 len, ctx->base.ctxr_dma);
401 		if (IS_ERR(first_cdesc)) {
402 			ret = PTR_ERR(first_cdesc);
403 			goto unmap_cache;
404 		}
405 		n_cdesc++;
406 
407 		queued -= cache_len;
408 		if (!queued)
409 			goto send_command;
410 	}
411 
412 	/* Now handle the current ahash request buffer(s) */
413 	req->nents = dma_map_sg(priv->dev, areq->src,
414 				sg_nents_for_len(areq->src,
415 						 areq->nbytes),
416 				DMA_TO_DEVICE);
417 	if (!req->nents) {
418 		ret = -ENOMEM;
419 		goto cdesc_rollback;
420 	}
421 
422 	for_each_sg(areq->src, sg, req->nents, i) {
423 		int sglen = sg_dma_len(sg);
424 
425 		if (unlikely(sglen <= skip)) {
426 			skip -= sglen;
427 			continue;
428 		}
429 
430 		/* Do not overflow the request */
431 		if ((queued + skip) <= sglen)
432 			sglen = queued;
433 		else
434 			sglen -= skip;
435 
436 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
437 					   !(queued - sglen),
438 					   sg_dma_address(sg) + skip, sglen,
439 					   len, ctx->base.ctxr_dma);
440 		if (IS_ERR(cdesc)) {
441 			ret = PTR_ERR(cdesc);
442 			goto unmap_sg;
443 		}
444 
445 		if (!n_cdesc)
446 			first_cdesc = cdesc;
447 		n_cdesc++;
448 
449 		queued -= sglen;
450 		if (!queued)
451 			break;
452 		skip = 0;
453 	}
454 
455 send_command:
456 	/* Setup the context options */
457 	safexcel_context_control(ctx, req, first_cdesc);
458 
459 	/* Add the token */
460 	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
461 
462 	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
463 					 DMA_FROM_DEVICE);
464 	if (dma_mapping_error(priv->dev, req->result_dma)) {
465 		ret = -EINVAL;
466 		goto unmap_sg;
467 	}
468 
469 	/* Add a result descriptor */
470 	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
471 				   req->digest_sz);
472 	if (IS_ERR(rdesc)) {
473 		ret = PTR_ERR(rdesc);
474 		goto unmap_result;
475 	}
476 
477 	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
478 
479 	req->processed += len - extra;
480 
481 	*commands = n_cdesc;
482 	*results = 1;
483 	return 0;
484 
485 unmap_result:
486 	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
487 			 DMA_FROM_DEVICE);
488 unmap_sg:
489 	if (req->nents) {
490 		dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
491 		req->nents = 0;
492 	}
493 cdesc_rollback:
494 	for (i = 0; i < n_cdesc; i++)
495 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
496 unmap_cache:
497 	if (req->cache_dma) {
498 		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
499 				 DMA_TO_DEVICE);
500 		req->cache_dma = 0;
501 		req->cache_sz = 0;
502 	}
503 
504 	return ret;
505 }
506 
507 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
508 				      int ring,
509 				      struct crypto_async_request *async,
510 				      bool *should_complete, int *ret)
511 {
512 	struct safexcel_result_desc *rdesc;
513 	struct ahash_request *areq = ahash_request_cast(async);
514 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
515 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
516 	int enq_ret;
517 
518 	*ret = 0;
519 
520 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
521 	if (IS_ERR(rdesc)) {
522 		dev_err(priv->dev,
523 			"hash: invalidate: could not retrieve the result descriptor\n");
524 		*ret = PTR_ERR(rdesc);
525 	} else {
526 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
527 	}
528 
529 	safexcel_complete(priv, ring);
530 
531 	if (ctx->base.exit_inv) {
532 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
533 			      ctx->base.ctxr_dma);
534 
535 		*should_complete = true;
536 		return 1;
537 	}
538 
539 	ring = safexcel_select_ring(priv);
540 	ctx->base.ring = ring;
541 
542 	spin_lock_bh(&priv->ring[ring].queue_lock);
543 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
544 	spin_unlock_bh(&priv->ring[ring].queue_lock);
545 
546 	if (enq_ret != -EINPROGRESS)
547 		*ret = enq_ret;
548 
549 	queue_work(priv->ring[ring].workqueue,
550 		   &priv->ring[ring].work_data.work);
551 
552 	*should_complete = false;
553 
554 	return 1;
555 }
556 
557 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
558 				  struct crypto_async_request *async,
559 				  bool *should_complete, int *ret)
560 {
561 	struct ahash_request *areq = ahash_request_cast(async);
562 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
563 	int err;
564 
565 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
566 
567 	if (req->needs_inv) {
568 		req->needs_inv = false;
569 		err = safexcel_handle_inv_result(priv, ring, async,
570 						 should_complete, ret);
571 	} else {
572 		err = safexcel_handle_req_result(priv, ring, async,
573 						 should_complete, ret);
574 	}
575 
576 	return err;
577 }
578 
579 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
580 				   int ring, int *commands, int *results)
581 {
582 	struct ahash_request *areq = ahash_request_cast(async);
583 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
584 	int ret;
585 
586 	ret = safexcel_invalidate_cache(async, ctx->priv,
587 					ctx->base.ctxr_dma, ring);
588 	if (unlikely(ret))
589 		return ret;
590 
591 	*commands = 1;
592 	*results = 1;
593 
594 	return 0;
595 }
596 
597 static int safexcel_ahash_send(struct crypto_async_request *async,
598 			       int ring, int *commands, int *results)
599 {
600 	struct ahash_request *areq = ahash_request_cast(async);
601 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
602 	int ret;
603 
604 	if (req->needs_inv)
605 		ret = safexcel_ahash_send_inv(async, ring, commands, results);
606 	else
607 		ret = safexcel_ahash_send_req(async, ring, commands, results);
608 
609 	return ret;
610 }
611 
612 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
613 {
614 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
615 	struct safexcel_crypto_priv *priv = ctx->priv;
616 	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
617 	struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
618 	struct safexcel_inv_result result = {};
619 	int ring = ctx->base.ring;
620 
621 	memset(req, 0, EIP197_AHASH_REQ_SIZE);
622 
623 	/* create invalidation request */
624 	init_completion(&result.completion);
625 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
626 				   safexcel_inv_complete, &result);
627 
628 	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
629 	ctx = crypto_tfm_ctx(req->base.tfm);
630 	ctx->base.exit_inv = true;
631 	rctx->needs_inv = true;
632 
633 	spin_lock_bh(&priv->ring[ring].queue_lock);
634 	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
635 	spin_unlock_bh(&priv->ring[ring].queue_lock);
636 
637 	queue_work(priv->ring[ring].workqueue,
638 		   &priv->ring[ring].work_data.work);
639 
640 	wait_for_completion(&result.completion);
641 
642 	if (result.error) {
643 		dev_warn(priv->dev, "hash: completion error (%d)\n",
644 			 result.error);
645 		return result.error;
646 	}
647 
648 	return 0;
649 }
650 
651 /* safexcel_ahash_cache: cache data until at least one request can be sent to
652  * the engine, aka. when there is at least 1 block size in the pipe.
653  */
654 static int safexcel_ahash_cache(struct ahash_request *areq)
655 {
656 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
657 	u64 cache_len;
658 
659 	/* cache_len: everything accepted by the driver but not sent yet,
660 	 * tot sz handled by update() - last req sz - tot sz handled by send()
661 	 */
662 	cache_len = safexcel_queued_len(req);
663 
664 	/*
665 	 * In case there isn't enough bytes to proceed (less than a
666 	 * block size), cache the data until we have enough.
667 	 */
668 	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
669 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
670 				   req->cache + cache_len,
671 				   areq->nbytes, 0);
672 		return 0;
673 	}
674 
675 	/* We couldn't cache all the data */
676 	return -E2BIG;
677 }
678 
679 static int safexcel_ahash_enqueue(struct ahash_request *areq)
680 {
681 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
682 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
683 	struct safexcel_crypto_priv *priv = ctx->priv;
684 	int ret, ring;
685 
686 	req->needs_inv = false;
687 
688 	if (ctx->base.ctxr) {
689 		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
690 		     /* invalidate for *any* non-XCBC continuation */
691 		   ((req->not_first && !req->xcbcmac) ||
692 		     /* invalidate if (i)digest changed */
693 		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
694 		     /* invalidate for HMAC finish with odigest changed */
695 		     (req->finish && req->hmac &&
696 		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
697 			     ctx->opad, req->state_sz))))
698 			/*
699 			 * We're still setting needs_inv here, even though it is
700 			 * cleared right away, because the needs_inv flag can be
701 			 * set in other functions and we want to keep the same
702 			 * logic.
703 			 */
704 			ctx->base.needs_inv = true;
705 
706 		if (ctx->base.needs_inv) {
707 			ctx->base.needs_inv = false;
708 			req->needs_inv = true;
709 		}
710 	} else {
711 		ctx->base.ring = safexcel_select_ring(priv);
712 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
713 						 EIP197_GFP_FLAGS(areq->base),
714 						 &ctx->base.ctxr_dma);
715 		if (!ctx->base.ctxr)
716 			return -ENOMEM;
717 	}
718 	req->not_first = true;
719 
720 	ring = ctx->base.ring;
721 
722 	spin_lock_bh(&priv->ring[ring].queue_lock);
723 	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
724 	spin_unlock_bh(&priv->ring[ring].queue_lock);
725 
726 	queue_work(priv->ring[ring].workqueue,
727 		   &priv->ring[ring].work_data.work);
728 
729 	return ret;
730 }
731 
732 static int safexcel_ahash_update(struct ahash_request *areq)
733 {
734 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
735 	int ret;
736 
737 	/* If the request is 0 length, do nothing */
738 	if (!areq->nbytes)
739 		return 0;
740 
741 	/* Add request to the cache if it fits */
742 	ret = safexcel_ahash_cache(areq);
743 
744 	/* Update total request length */
745 	req->len += areq->nbytes;
746 
747 	/* If not all data could fit into the cache, go process the excess.
748 	 * Also go process immediately for an HMAC IV precompute, which
749 	 * will never be finished at all, but needs to be processed anyway.
750 	 */
751 	if ((ret && !req->finish) || req->last_req)
752 		return safexcel_ahash_enqueue(areq);
753 
754 	return 0;
755 }
756 
757 static int safexcel_ahash_final(struct ahash_request *areq)
758 {
759 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
760 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
761 
762 	req->finish = true;
763 
764 	if (unlikely(!req->len && !areq->nbytes)) {
765 		/*
766 		 * If we have an overall 0 length *hash* request:
767 		 * The HW cannot do 0 length hash, so we provide the correct
768 		 * result directly here.
769 		 */
770 		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
771 			memcpy(areq->result, md5_zero_message_hash,
772 			       MD5_DIGEST_SIZE);
773 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
774 			memcpy(areq->result, sha1_zero_message_hash,
775 			       SHA1_DIGEST_SIZE);
776 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
777 			memcpy(areq->result, sha224_zero_message_hash,
778 			       SHA224_DIGEST_SIZE);
779 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
780 			memcpy(areq->result, sha256_zero_message_hash,
781 			       SHA256_DIGEST_SIZE);
782 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
783 			memcpy(areq->result, sha384_zero_message_hash,
784 			       SHA384_DIGEST_SIZE);
785 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
786 			memcpy(areq->result, sha512_zero_message_hash,
787 			       SHA512_DIGEST_SIZE);
788 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
789 			memcpy(areq->result,
790 			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
791 		}
792 
793 		return 0;
794 	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
795 			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
796 			    req->len == sizeof(u32) && !areq->nbytes)) {
797 		/* Zero length CRC32 */
798 		memcpy(areq->result, ctx->ipad, sizeof(u32));
799 		return 0;
800 	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
801 			    !areq->nbytes)) {
802 		/* Zero length CBC MAC */
803 		memset(areq->result, 0, AES_BLOCK_SIZE);
804 		return 0;
805 	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
806 			    !areq->nbytes)) {
807 		/* Zero length (X)CBC/CMAC */
808 		int i;
809 
810 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
811 			((__be32 *)areq->result)[i] =
812 				cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
813 		areq->result[0] ^= 0x80;			// 10- padding
814 		crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
815 		return 0;
816 	} else if (unlikely(req->hmac &&
817 			    (req->len == req->block_sz) &&
818 			    !areq->nbytes)) {
819 		/*
820 		 * If we have an overall 0 length *HMAC* request:
821 		 * For HMAC, we need to finalize the inner digest
822 		 * and then perform the outer hash.
823 		 */
824 
825 		/* generate pad block in the cache */
826 		/* start with a hash block of all zeroes */
827 		memset(req->cache, 0, req->block_sz);
828 		/* set the first byte to 0x80 to 'append a 1 bit' */
829 		req->cache[0] = 0x80;
830 		/* add the length in bits in the last 2 bytes */
831 		if (req->len_is_le) {
832 			/* Little endian length word (e.g. MD5) */
833 			req->cache[req->block_sz-8] = (req->block_sz << 3) &
834 						      255;
835 			req->cache[req->block_sz-7] = (req->block_sz >> 5);
836 		} else {
837 			/* Big endian length word (e.g. any SHA) */
838 			req->cache[req->block_sz-2] = (req->block_sz >> 5);
839 			req->cache[req->block_sz-1] = (req->block_sz << 3) &
840 						      255;
841 		}
842 
843 		req->len += req->block_sz; /* plus 1 hash block */
844 
845 		/* Set special zero-length HMAC flag */
846 		req->hmac_zlen = true;
847 
848 		/* Finalize HMAC */
849 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
850 	} else if (req->hmac) {
851 		/* Finalize HMAC */
852 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
853 	}
854 
855 	return safexcel_ahash_enqueue(areq);
856 }
857 
858 static int safexcel_ahash_finup(struct ahash_request *areq)
859 {
860 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
861 
862 	req->finish = true;
863 
864 	safexcel_ahash_update(areq);
865 	return safexcel_ahash_final(areq);
866 }
867 
868 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
869 {
870 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
871 	struct safexcel_ahash_export_state *export = out;
872 
873 	export->len = req->len;
874 	export->processed = req->processed;
875 
876 	export->digest = req->digest;
877 
878 	memcpy(export->state, req->state, req->state_sz);
879 	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
880 
881 	return 0;
882 }
883 
884 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
885 {
886 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
887 	const struct safexcel_ahash_export_state *export = in;
888 	int ret;
889 
890 	ret = crypto_ahash_init(areq);
891 	if (ret)
892 		return ret;
893 
894 	req->len = export->len;
895 	req->processed = export->processed;
896 
897 	req->digest = export->digest;
898 
899 	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
900 	memcpy(req->state, export->state, req->state_sz);
901 
902 	return 0;
903 }
904 
905 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
906 {
907 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
908 	struct safexcel_alg_template *tmpl =
909 		container_of(__crypto_ahash_alg(tfm->__crt_alg),
910 			     struct safexcel_alg_template, alg.ahash);
911 
912 	ctx->priv = tmpl->priv;
913 	ctx->base.send = safexcel_ahash_send;
914 	ctx->base.handle_result = safexcel_handle_result;
915 	ctx->fb_do_setkey = false;
916 
917 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
918 				 sizeof(struct safexcel_ahash_req));
919 	return 0;
920 }
921 
922 static int safexcel_sha1_init(struct ahash_request *areq)
923 {
924 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
925 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
926 
927 	memset(req, 0, sizeof(*req));
928 
929 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
930 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
931 	req->state_sz = SHA1_DIGEST_SIZE;
932 	req->digest_sz = SHA1_DIGEST_SIZE;
933 	req->block_sz = SHA1_BLOCK_SIZE;
934 
935 	return 0;
936 }
937 
938 static int safexcel_sha1_digest(struct ahash_request *areq)
939 {
940 	int ret = safexcel_sha1_init(areq);
941 
942 	if (ret)
943 		return ret;
944 
945 	return safexcel_ahash_finup(areq);
946 }
947 
948 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
949 {
950 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
951 	struct safexcel_crypto_priv *priv = ctx->priv;
952 	int ret;
953 
954 	/* context not allocated, skip invalidation */
955 	if (!ctx->base.ctxr)
956 		return;
957 
958 	if (priv->flags & EIP197_TRC_CACHE) {
959 		ret = safexcel_ahash_exit_inv(tfm);
960 		if (ret)
961 			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
962 	} else {
963 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
964 			      ctx->base.ctxr_dma);
965 	}
966 }
967 
968 struct safexcel_alg_template safexcel_alg_sha1 = {
969 	.type = SAFEXCEL_ALG_TYPE_AHASH,
970 	.algo_mask = SAFEXCEL_ALG_SHA1,
971 	.alg.ahash = {
972 		.init = safexcel_sha1_init,
973 		.update = safexcel_ahash_update,
974 		.final = safexcel_ahash_final,
975 		.finup = safexcel_ahash_finup,
976 		.digest = safexcel_sha1_digest,
977 		.export = safexcel_ahash_export,
978 		.import = safexcel_ahash_import,
979 		.halg = {
980 			.digestsize = SHA1_DIGEST_SIZE,
981 			.statesize = sizeof(struct safexcel_ahash_export_state),
982 			.base = {
983 				.cra_name = "sha1",
984 				.cra_driver_name = "safexcel-sha1",
985 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
986 				.cra_flags = CRYPTO_ALG_ASYNC |
987 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
988 				.cra_blocksize = SHA1_BLOCK_SIZE,
989 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
990 				.cra_init = safexcel_ahash_cra_init,
991 				.cra_exit = safexcel_ahash_cra_exit,
992 				.cra_module = THIS_MODULE,
993 			},
994 		},
995 	},
996 };
997 
998 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
999 {
1000 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1001 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1002 
1003 	memset(req, 0, sizeof(*req));
1004 
1005 	/* Start from ipad precompute */
1006 	memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
1007 	/* Already processed the key^ipad part now! */
1008 	req->len	= SHA1_BLOCK_SIZE;
1009 	req->processed	= SHA1_BLOCK_SIZE;
1010 
1011 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1012 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1013 	req->state_sz = SHA1_DIGEST_SIZE;
1014 	req->digest_sz = SHA1_DIGEST_SIZE;
1015 	req->block_sz = SHA1_BLOCK_SIZE;
1016 	req->hmac = true;
1017 
1018 	return 0;
1019 }
1020 
1021 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1022 {
1023 	int ret = safexcel_hmac_sha1_init(areq);
1024 
1025 	if (ret)
1026 		return ret;
1027 
1028 	return safexcel_ahash_finup(areq);
1029 }
1030 
1031 struct safexcel_ahash_result {
1032 	struct completion completion;
1033 	int error;
1034 };
1035 
1036 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1037 {
1038 	struct safexcel_ahash_result *result = req->data;
1039 
1040 	if (error == -EINPROGRESS)
1041 		return;
1042 
1043 	result->error = error;
1044 	complete(&result->completion);
1045 }
1046 
1047 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1048 				  unsigned int blocksize, const u8 *key,
1049 				  unsigned int keylen, u8 *ipad, u8 *opad)
1050 {
1051 	struct safexcel_ahash_result result;
1052 	struct scatterlist sg;
1053 	int ret, i;
1054 	u8 *keydup;
1055 
1056 	if (keylen <= blocksize) {
1057 		memcpy(ipad, key, keylen);
1058 	} else {
1059 		keydup = kmemdup(key, keylen, GFP_KERNEL);
1060 		if (!keydup)
1061 			return -ENOMEM;
1062 
1063 		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1064 					   safexcel_ahash_complete, &result);
1065 		sg_init_one(&sg, keydup, keylen);
1066 		ahash_request_set_crypt(areq, &sg, ipad, keylen);
1067 		init_completion(&result.completion);
1068 
1069 		ret = crypto_ahash_digest(areq);
1070 		if (ret == -EINPROGRESS || ret == -EBUSY) {
1071 			wait_for_completion_interruptible(&result.completion);
1072 			ret = result.error;
1073 		}
1074 
1075 		/* Avoid leaking */
1076 		memzero_explicit(keydup, keylen);
1077 		kfree(keydup);
1078 
1079 		if (ret)
1080 			return ret;
1081 
1082 		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1083 	}
1084 
1085 	memset(ipad + keylen, 0, blocksize - keylen);
1086 	memcpy(opad, ipad, blocksize);
1087 
1088 	for (i = 0; i < blocksize; i++) {
1089 		ipad[i] ^= HMAC_IPAD_VALUE;
1090 		opad[i] ^= HMAC_OPAD_VALUE;
1091 	}
1092 
1093 	return 0;
1094 }
1095 
1096 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1097 				 unsigned int blocksize, u8 *pad, void *state)
1098 {
1099 	struct safexcel_ahash_result result;
1100 	struct safexcel_ahash_req *req;
1101 	struct scatterlist sg;
1102 	int ret;
1103 
1104 	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1105 				   safexcel_ahash_complete, &result);
1106 	sg_init_one(&sg, pad, blocksize);
1107 	ahash_request_set_crypt(areq, &sg, pad, blocksize);
1108 	init_completion(&result.completion);
1109 
1110 	ret = crypto_ahash_init(areq);
1111 	if (ret)
1112 		return ret;
1113 
1114 	req = ahash_request_ctx(areq);
1115 	req->hmac = true;
1116 	req->last_req = true;
1117 
1118 	ret = crypto_ahash_update(areq);
1119 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1120 		return ret;
1121 
1122 	wait_for_completion_interruptible(&result.completion);
1123 	if (result.error)
1124 		return result.error;
1125 
1126 	return crypto_ahash_export(areq, state);
1127 }
1128 
1129 int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1130 			 void *istate, void *ostate)
1131 {
1132 	struct ahash_request *areq;
1133 	struct crypto_ahash *tfm;
1134 	unsigned int blocksize;
1135 	u8 *ipad, *opad;
1136 	int ret;
1137 
1138 	tfm = crypto_alloc_ahash(alg, 0, 0);
1139 	if (IS_ERR(tfm))
1140 		return PTR_ERR(tfm);
1141 
1142 	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1143 	if (!areq) {
1144 		ret = -ENOMEM;
1145 		goto free_ahash;
1146 	}
1147 
1148 	crypto_ahash_clear_flags(tfm, ~0);
1149 	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1150 
1151 	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1152 	if (!ipad) {
1153 		ret = -ENOMEM;
1154 		goto free_request;
1155 	}
1156 
1157 	opad = ipad + blocksize;
1158 
1159 	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1160 	if (ret)
1161 		goto free_ipad;
1162 
1163 	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1164 	if (ret)
1165 		goto free_ipad;
1166 
1167 	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1168 
1169 free_ipad:
1170 	kfree(ipad);
1171 free_request:
1172 	ahash_request_free(areq);
1173 free_ahash:
1174 	crypto_free_ahash(tfm);
1175 
1176 	return ret;
1177 }
1178 
1179 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1180 				    unsigned int keylen, const char *alg,
1181 				    unsigned int state_sz)
1182 {
1183 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1184 	struct safexcel_crypto_priv *priv = ctx->priv;
1185 	struct safexcel_ahash_export_state istate, ostate;
1186 	int ret;
1187 
1188 	ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1189 	if (ret)
1190 		return ret;
1191 
1192 	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1193 	    (memcmp(ctx->ipad, istate.state, state_sz) ||
1194 	     memcmp(ctx->opad, ostate.state, state_sz)))
1195 		ctx->base.needs_inv = true;
1196 
1197 	memcpy(ctx->ipad, &istate.state, state_sz);
1198 	memcpy(ctx->opad, &ostate.state, state_sz);
1199 
1200 	return 0;
1201 }
1202 
1203 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1204 				     unsigned int keylen)
1205 {
1206 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1207 					SHA1_DIGEST_SIZE);
1208 }
1209 
1210 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1211 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1212 	.algo_mask = SAFEXCEL_ALG_SHA1,
1213 	.alg.ahash = {
1214 		.init = safexcel_hmac_sha1_init,
1215 		.update = safexcel_ahash_update,
1216 		.final = safexcel_ahash_final,
1217 		.finup = safexcel_ahash_finup,
1218 		.digest = safexcel_hmac_sha1_digest,
1219 		.setkey = safexcel_hmac_sha1_setkey,
1220 		.export = safexcel_ahash_export,
1221 		.import = safexcel_ahash_import,
1222 		.halg = {
1223 			.digestsize = SHA1_DIGEST_SIZE,
1224 			.statesize = sizeof(struct safexcel_ahash_export_state),
1225 			.base = {
1226 				.cra_name = "hmac(sha1)",
1227 				.cra_driver_name = "safexcel-hmac-sha1",
1228 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1229 				.cra_flags = CRYPTO_ALG_ASYNC |
1230 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1231 				.cra_blocksize = SHA1_BLOCK_SIZE,
1232 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1233 				.cra_init = safexcel_ahash_cra_init,
1234 				.cra_exit = safexcel_ahash_cra_exit,
1235 				.cra_module = THIS_MODULE,
1236 			},
1237 		},
1238 	},
1239 };
1240 
1241 static int safexcel_sha256_init(struct ahash_request *areq)
1242 {
1243 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1244 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1245 
1246 	memset(req, 0, sizeof(*req));
1247 
1248 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1249 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1250 	req->state_sz = SHA256_DIGEST_SIZE;
1251 	req->digest_sz = SHA256_DIGEST_SIZE;
1252 	req->block_sz = SHA256_BLOCK_SIZE;
1253 
1254 	return 0;
1255 }
1256 
1257 static int safexcel_sha256_digest(struct ahash_request *areq)
1258 {
1259 	int ret = safexcel_sha256_init(areq);
1260 
1261 	if (ret)
1262 		return ret;
1263 
1264 	return safexcel_ahash_finup(areq);
1265 }
1266 
1267 struct safexcel_alg_template safexcel_alg_sha256 = {
1268 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1269 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1270 	.alg.ahash = {
1271 		.init = safexcel_sha256_init,
1272 		.update = safexcel_ahash_update,
1273 		.final = safexcel_ahash_final,
1274 		.finup = safexcel_ahash_finup,
1275 		.digest = safexcel_sha256_digest,
1276 		.export = safexcel_ahash_export,
1277 		.import = safexcel_ahash_import,
1278 		.halg = {
1279 			.digestsize = SHA256_DIGEST_SIZE,
1280 			.statesize = sizeof(struct safexcel_ahash_export_state),
1281 			.base = {
1282 				.cra_name = "sha256",
1283 				.cra_driver_name = "safexcel-sha256",
1284 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1285 				.cra_flags = CRYPTO_ALG_ASYNC |
1286 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1287 				.cra_blocksize = SHA256_BLOCK_SIZE,
1288 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1289 				.cra_init = safexcel_ahash_cra_init,
1290 				.cra_exit = safexcel_ahash_cra_exit,
1291 				.cra_module = THIS_MODULE,
1292 			},
1293 		},
1294 	},
1295 };
1296 
1297 static int safexcel_sha224_init(struct ahash_request *areq)
1298 {
1299 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1300 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1301 
1302 	memset(req, 0, sizeof(*req));
1303 
1304 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1305 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1306 	req->state_sz = SHA256_DIGEST_SIZE;
1307 	req->digest_sz = SHA256_DIGEST_SIZE;
1308 	req->block_sz = SHA256_BLOCK_SIZE;
1309 
1310 	return 0;
1311 }
1312 
1313 static int safexcel_sha224_digest(struct ahash_request *areq)
1314 {
1315 	int ret = safexcel_sha224_init(areq);
1316 
1317 	if (ret)
1318 		return ret;
1319 
1320 	return safexcel_ahash_finup(areq);
1321 }
1322 
1323 struct safexcel_alg_template safexcel_alg_sha224 = {
1324 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1325 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1326 	.alg.ahash = {
1327 		.init = safexcel_sha224_init,
1328 		.update = safexcel_ahash_update,
1329 		.final = safexcel_ahash_final,
1330 		.finup = safexcel_ahash_finup,
1331 		.digest = safexcel_sha224_digest,
1332 		.export = safexcel_ahash_export,
1333 		.import = safexcel_ahash_import,
1334 		.halg = {
1335 			.digestsize = SHA224_DIGEST_SIZE,
1336 			.statesize = sizeof(struct safexcel_ahash_export_state),
1337 			.base = {
1338 				.cra_name = "sha224",
1339 				.cra_driver_name = "safexcel-sha224",
1340 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1341 				.cra_flags = CRYPTO_ALG_ASYNC |
1342 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1343 				.cra_blocksize = SHA224_BLOCK_SIZE,
1344 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1345 				.cra_init = safexcel_ahash_cra_init,
1346 				.cra_exit = safexcel_ahash_cra_exit,
1347 				.cra_module = THIS_MODULE,
1348 			},
1349 		},
1350 	},
1351 };
1352 
1353 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1354 				       unsigned int keylen)
1355 {
1356 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1357 					SHA256_DIGEST_SIZE);
1358 }
1359 
1360 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1361 {
1362 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1363 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1364 
1365 	memset(req, 0, sizeof(*req));
1366 
1367 	/* Start from ipad precompute */
1368 	memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1369 	/* Already processed the key^ipad part now! */
1370 	req->len	= SHA256_BLOCK_SIZE;
1371 	req->processed	= SHA256_BLOCK_SIZE;
1372 
1373 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1374 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1375 	req->state_sz = SHA256_DIGEST_SIZE;
1376 	req->digest_sz = SHA256_DIGEST_SIZE;
1377 	req->block_sz = SHA256_BLOCK_SIZE;
1378 	req->hmac = true;
1379 
1380 	return 0;
1381 }
1382 
1383 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1384 {
1385 	int ret = safexcel_hmac_sha224_init(areq);
1386 
1387 	if (ret)
1388 		return ret;
1389 
1390 	return safexcel_ahash_finup(areq);
1391 }
1392 
1393 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1394 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1395 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1396 	.alg.ahash = {
1397 		.init = safexcel_hmac_sha224_init,
1398 		.update = safexcel_ahash_update,
1399 		.final = safexcel_ahash_final,
1400 		.finup = safexcel_ahash_finup,
1401 		.digest = safexcel_hmac_sha224_digest,
1402 		.setkey = safexcel_hmac_sha224_setkey,
1403 		.export = safexcel_ahash_export,
1404 		.import = safexcel_ahash_import,
1405 		.halg = {
1406 			.digestsize = SHA224_DIGEST_SIZE,
1407 			.statesize = sizeof(struct safexcel_ahash_export_state),
1408 			.base = {
1409 				.cra_name = "hmac(sha224)",
1410 				.cra_driver_name = "safexcel-hmac-sha224",
1411 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1412 				.cra_flags = CRYPTO_ALG_ASYNC |
1413 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1414 				.cra_blocksize = SHA224_BLOCK_SIZE,
1415 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1416 				.cra_init = safexcel_ahash_cra_init,
1417 				.cra_exit = safexcel_ahash_cra_exit,
1418 				.cra_module = THIS_MODULE,
1419 			},
1420 		},
1421 	},
1422 };
1423 
1424 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1425 				     unsigned int keylen)
1426 {
1427 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1428 					SHA256_DIGEST_SIZE);
1429 }
1430 
1431 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1432 {
1433 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1434 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1435 
1436 	memset(req, 0, sizeof(*req));
1437 
1438 	/* Start from ipad precompute */
1439 	memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1440 	/* Already processed the key^ipad part now! */
1441 	req->len	= SHA256_BLOCK_SIZE;
1442 	req->processed	= SHA256_BLOCK_SIZE;
1443 
1444 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1445 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1446 	req->state_sz = SHA256_DIGEST_SIZE;
1447 	req->digest_sz = SHA256_DIGEST_SIZE;
1448 	req->block_sz = SHA256_BLOCK_SIZE;
1449 	req->hmac = true;
1450 
1451 	return 0;
1452 }
1453 
1454 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1455 {
1456 	int ret = safexcel_hmac_sha256_init(areq);
1457 
1458 	if (ret)
1459 		return ret;
1460 
1461 	return safexcel_ahash_finup(areq);
1462 }
1463 
1464 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1465 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1466 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1467 	.alg.ahash = {
1468 		.init = safexcel_hmac_sha256_init,
1469 		.update = safexcel_ahash_update,
1470 		.final = safexcel_ahash_final,
1471 		.finup = safexcel_ahash_finup,
1472 		.digest = safexcel_hmac_sha256_digest,
1473 		.setkey = safexcel_hmac_sha256_setkey,
1474 		.export = safexcel_ahash_export,
1475 		.import = safexcel_ahash_import,
1476 		.halg = {
1477 			.digestsize = SHA256_DIGEST_SIZE,
1478 			.statesize = sizeof(struct safexcel_ahash_export_state),
1479 			.base = {
1480 				.cra_name = "hmac(sha256)",
1481 				.cra_driver_name = "safexcel-hmac-sha256",
1482 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1483 				.cra_flags = CRYPTO_ALG_ASYNC |
1484 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1485 				.cra_blocksize = SHA256_BLOCK_SIZE,
1486 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1487 				.cra_init = safexcel_ahash_cra_init,
1488 				.cra_exit = safexcel_ahash_cra_exit,
1489 				.cra_module = THIS_MODULE,
1490 			},
1491 		},
1492 	},
1493 };
1494 
1495 static int safexcel_sha512_init(struct ahash_request *areq)
1496 {
1497 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1498 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1499 
1500 	memset(req, 0, sizeof(*req));
1501 
1502 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1503 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1504 	req->state_sz = SHA512_DIGEST_SIZE;
1505 	req->digest_sz = SHA512_DIGEST_SIZE;
1506 	req->block_sz = SHA512_BLOCK_SIZE;
1507 
1508 	return 0;
1509 }
1510 
1511 static int safexcel_sha512_digest(struct ahash_request *areq)
1512 {
1513 	int ret = safexcel_sha512_init(areq);
1514 
1515 	if (ret)
1516 		return ret;
1517 
1518 	return safexcel_ahash_finup(areq);
1519 }
1520 
1521 struct safexcel_alg_template safexcel_alg_sha512 = {
1522 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1523 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1524 	.alg.ahash = {
1525 		.init = safexcel_sha512_init,
1526 		.update = safexcel_ahash_update,
1527 		.final = safexcel_ahash_final,
1528 		.finup = safexcel_ahash_finup,
1529 		.digest = safexcel_sha512_digest,
1530 		.export = safexcel_ahash_export,
1531 		.import = safexcel_ahash_import,
1532 		.halg = {
1533 			.digestsize = SHA512_DIGEST_SIZE,
1534 			.statesize = sizeof(struct safexcel_ahash_export_state),
1535 			.base = {
1536 				.cra_name = "sha512",
1537 				.cra_driver_name = "safexcel-sha512",
1538 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1539 				.cra_flags = CRYPTO_ALG_ASYNC |
1540 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1541 				.cra_blocksize = SHA512_BLOCK_SIZE,
1542 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1543 				.cra_init = safexcel_ahash_cra_init,
1544 				.cra_exit = safexcel_ahash_cra_exit,
1545 				.cra_module = THIS_MODULE,
1546 			},
1547 		},
1548 	},
1549 };
1550 
1551 static int safexcel_sha384_init(struct ahash_request *areq)
1552 {
1553 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1554 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1555 
1556 	memset(req, 0, sizeof(*req));
1557 
1558 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1559 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1560 	req->state_sz = SHA512_DIGEST_SIZE;
1561 	req->digest_sz = SHA512_DIGEST_SIZE;
1562 	req->block_sz = SHA512_BLOCK_SIZE;
1563 
1564 	return 0;
1565 }
1566 
1567 static int safexcel_sha384_digest(struct ahash_request *areq)
1568 {
1569 	int ret = safexcel_sha384_init(areq);
1570 
1571 	if (ret)
1572 		return ret;
1573 
1574 	return safexcel_ahash_finup(areq);
1575 }
1576 
1577 struct safexcel_alg_template safexcel_alg_sha384 = {
1578 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1579 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1580 	.alg.ahash = {
1581 		.init = safexcel_sha384_init,
1582 		.update = safexcel_ahash_update,
1583 		.final = safexcel_ahash_final,
1584 		.finup = safexcel_ahash_finup,
1585 		.digest = safexcel_sha384_digest,
1586 		.export = safexcel_ahash_export,
1587 		.import = safexcel_ahash_import,
1588 		.halg = {
1589 			.digestsize = SHA384_DIGEST_SIZE,
1590 			.statesize = sizeof(struct safexcel_ahash_export_state),
1591 			.base = {
1592 				.cra_name = "sha384",
1593 				.cra_driver_name = "safexcel-sha384",
1594 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1595 				.cra_flags = CRYPTO_ALG_ASYNC |
1596 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1597 				.cra_blocksize = SHA384_BLOCK_SIZE,
1598 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1599 				.cra_init = safexcel_ahash_cra_init,
1600 				.cra_exit = safexcel_ahash_cra_exit,
1601 				.cra_module = THIS_MODULE,
1602 			},
1603 		},
1604 	},
1605 };
1606 
1607 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1608 				       unsigned int keylen)
1609 {
1610 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1611 					SHA512_DIGEST_SIZE);
1612 }
1613 
1614 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1615 {
1616 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1617 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1618 
1619 	memset(req, 0, sizeof(*req));
1620 
1621 	/* Start from ipad precompute */
1622 	memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1623 	/* Already processed the key^ipad part now! */
1624 	req->len	= SHA512_BLOCK_SIZE;
1625 	req->processed	= SHA512_BLOCK_SIZE;
1626 
1627 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1628 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1629 	req->state_sz = SHA512_DIGEST_SIZE;
1630 	req->digest_sz = SHA512_DIGEST_SIZE;
1631 	req->block_sz = SHA512_BLOCK_SIZE;
1632 	req->hmac = true;
1633 
1634 	return 0;
1635 }
1636 
1637 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1638 {
1639 	int ret = safexcel_hmac_sha512_init(areq);
1640 
1641 	if (ret)
1642 		return ret;
1643 
1644 	return safexcel_ahash_finup(areq);
1645 }
1646 
1647 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1648 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1649 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1650 	.alg.ahash = {
1651 		.init = safexcel_hmac_sha512_init,
1652 		.update = safexcel_ahash_update,
1653 		.final = safexcel_ahash_final,
1654 		.finup = safexcel_ahash_finup,
1655 		.digest = safexcel_hmac_sha512_digest,
1656 		.setkey = safexcel_hmac_sha512_setkey,
1657 		.export = safexcel_ahash_export,
1658 		.import = safexcel_ahash_import,
1659 		.halg = {
1660 			.digestsize = SHA512_DIGEST_SIZE,
1661 			.statesize = sizeof(struct safexcel_ahash_export_state),
1662 			.base = {
1663 				.cra_name = "hmac(sha512)",
1664 				.cra_driver_name = "safexcel-hmac-sha512",
1665 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1666 				.cra_flags = CRYPTO_ALG_ASYNC |
1667 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1668 				.cra_blocksize = SHA512_BLOCK_SIZE,
1669 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1670 				.cra_init = safexcel_ahash_cra_init,
1671 				.cra_exit = safexcel_ahash_cra_exit,
1672 				.cra_module = THIS_MODULE,
1673 			},
1674 		},
1675 	},
1676 };
1677 
1678 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1679 				       unsigned int keylen)
1680 {
1681 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1682 					SHA512_DIGEST_SIZE);
1683 }
1684 
1685 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1686 {
1687 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1688 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1689 
1690 	memset(req, 0, sizeof(*req));
1691 
1692 	/* Start from ipad precompute */
1693 	memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1694 	/* Already processed the key^ipad part now! */
1695 	req->len	= SHA512_BLOCK_SIZE;
1696 	req->processed	= SHA512_BLOCK_SIZE;
1697 
1698 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1699 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1700 	req->state_sz = SHA512_DIGEST_SIZE;
1701 	req->digest_sz = SHA512_DIGEST_SIZE;
1702 	req->block_sz = SHA512_BLOCK_SIZE;
1703 	req->hmac = true;
1704 
1705 	return 0;
1706 }
1707 
1708 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1709 {
1710 	int ret = safexcel_hmac_sha384_init(areq);
1711 
1712 	if (ret)
1713 		return ret;
1714 
1715 	return safexcel_ahash_finup(areq);
1716 }
1717 
1718 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1719 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1720 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1721 	.alg.ahash = {
1722 		.init = safexcel_hmac_sha384_init,
1723 		.update = safexcel_ahash_update,
1724 		.final = safexcel_ahash_final,
1725 		.finup = safexcel_ahash_finup,
1726 		.digest = safexcel_hmac_sha384_digest,
1727 		.setkey = safexcel_hmac_sha384_setkey,
1728 		.export = safexcel_ahash_export,
1729 		.import = safexcel_ahash_import,
1730 		.halg = {
1731 			.digestsize = SHA384_DIGEST_SIZE,
1732 			.statesize = sizeof(struct safexcel_ahash_export_state),
1733 			.base = {
1734 				.cra_name = "hmac(sha384)",
1735 				.cra_driver_name = "safexcel-hmac-sha384",
1736 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1737 				.cra_flags = CRYPTO_ALG_ASYNC |
1738 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1739 				.cra_blocksize = SHA384_BLOCK_SIZE,
1740 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1741 				.cra_init = safexcel_ahash_cra_init,
1742 				.cra_exit = safexcel_ahash_cra_exit,
1743 				.cra_module = THIS_MODULE,
1744 			},
1745 		},
1746 	},
1747 };
1748 
1749 static int safexcel_md5_init(struct ahash_request *areq)
1750 {
1751 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1752 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1753 
1754 	memset(req, 0, sizeof(*req));
1755 
1756 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1757 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1758 	req->state_sz = MD5_DIGEST_SIZE;
1759 	req->digest_sz = MD5_DIGEST_SIZE;
1760 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1761 
1762 	return 0;
1763 }
1764 
1765 static int safexcel_md5_digest(struct ahash_request *areq)
1766 {
1767 	int ret = safexcel_md5_init(areq);
1768 
1769 	if (ret)
1770 		return ret;
1771 
1772 	return safexcel_ahash_finup(areq);
1773 }
1774 
1775 struct safexcel_alg_template safexcel_alg_md5 = {
1776 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1777 	.algo_mask = SAFEXCEL_ALG_MD5,
1778 	.alg.ahash = {
1779 		.init = safexcel_md5_init,
1780 		.update = safexcel_ahash_update,
1781 		.final = safexcel_ahash_final,
1782 		.finup = safexcel_ahash_finup,
1783 		.digest = safexcel_md5_digest,
1784 		.export = safexcel_ahash_export,
1785 		.import = safexcel_ahash_import,
1786 		.halg = {
1787 			.digestsize = MD5_DIGEST_SIZE,
1788 			.statesize = sizeof(struct safexcel_ahash_export_state),
1789 			.base = {
1790 				.cra_name = "md5",
1791 				.cra_driver_name = "safexcel-md5",
1792 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1793 				.cra_flags = CRYPTO_ALG_ASYNC |
1794 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1795 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1796 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1797 				.cra_init = safexcel_ahash_cra_init,
1798 				.cra_exit = safexcel_ahash_cra_exit,
1799 				.cra_module = THIS_MODULE,
1800 			},
1801 		},
1802 	},
1803 };
1804 
1805 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1806 {
1807 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1808 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1809 
1810 	memset(req, 0, sizeof(*req));
1811 
1812 	/* Start from ipad precompute */
1813 	memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1814 	/* Already processed the key^ipad part now! */
1815 	req->len	= MD5_HMAC_BLOCK_SIZE;
1816 	req->processed	= MD5_HMAC_BLOCK_SIZE;
1817 
1818 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1819 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1820 	req->state_sz = MD5_DIGEST_SIZE;
1821 	req->digest_sz = MD5_DIGEST_SIZE;
1822 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1823 	req->len_is_le = true; /* MD5 is little endian! ... */
1824 	req->hmac = true;
1825 
1826 	return 0;
1827 }
1828 
1829 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1830 				     unsigned int keylen)
1831 {
1832 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1833 					MD5_DIGEST_SIZE);
1834 }
1835 
1836 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1837 {
1838 	int ret = safexcel_hmac_md5_init(areq);
1839 
1840 	if (ret)
1841 		return ret;
1842 
1843 	return safexcel_ahash_finup(areq);
1844 }
1845 
1846 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1847 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1848 	.algo_mask = SAFEXCEL_ALG_MD5,
1849 	.alg.ahash = {
1850 		.init = safexcel_hmac_md5_init,
1851 		.update = safexcel_ahash_update,
1852 		.final = safexcel_ahash_final,
1853 		.finup = safexcel_ahash_finup,
1854 		.digest = safexcel_hmac_md5_digest,
1855 		.setkey = safexcel_hmac_md5_setkey,
1856 		.export = safexcel_ahash_export,
1857 		.import = safexcel_ahash_import,
1858 		.halg = {
1859 			.digestsize = MD5_DIGEST_SIZE,
1860 			.statesize = sizeof(struct safexcel_ahash_export_state),
1861 			.base = {
1862 				.cra_name = "hmac(md5)",
1863 				.cra_driver_name = "safexcel-hmac-md5",
1864 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1865 				.cra_flags = CRYPTO_ALG_ASYNC |
1866 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1867 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1868 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1869 				.cra_init = safexcel_ahash_cra_init,
1870 				.cra_exit = safexcel_ahash_cra_exit,
1871 				.cra_module = THIS_MODULE,
1872 			},
1873 		},
1874 	},
1875 };
1876 
1877 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1878 {
1879 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1880 	int ret = safexcel_ahash_cra_init(tfm);
1881 
1882 	/* Default 'key' is all zeroes */
1883 	memset(ctx->ipad, 0, sizeof(u32));
1884 	return ret;
1885 }
1886 
1887 static int safexcel_crc32_init(struct ahash_request *areq)
1888 {
1889 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1890 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1891 
1892 	memset(req, 0, sizeof(*req));
1893 
1894 	/* Start from loaded key */
1895 	req->state[0]	= (__force __le32)le32_to_cpu(~ctx->ipad[0]);
1896 	/* Set processed to non-zero to enable invalidation detection */
1897 	req->len	= sizeof(u32);
1898 	req->processed	= sizeof(u32);
1899 
1900 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1901 	req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1902 	req->state_sz = sizeof(u32);
1903 	req->digest_sz = sizeof(u32);
1904 	req->block_sz = sizeof(u32);
1905 
1906 	return 0;
1907 }
1908 
1909 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1910 				 unsigned int keylen)
1911 {
1912 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1913 
1914 	if (keylen != sizeof(u32)) {
1915 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1916 		return -EINVAL;
1917 	}
1918 
1919 	memcpy(ctx->ipad, key, sizeof(u32));
1920 	return 0;
1921 }
1922 
1923 static int safexcel_crc32_digest(struct ahash_request *areq)
1924 {
1925 	return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1926 }
1927 
1928 struct safexcel_alg_template safexcel_alg_crc32 = {
1929 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1930 	.algo_mask = 0,
1931 	.alg.ahash = {
1932 		.init = safexcel_crc32_init,
1933 		.update = safexcel_ahash_update,
1934 		.final = safexcel_ahash_final,
1935 		.finup = safexcel_ahash_finup,
1936 		.digest = safexcel_crc32_digest,
1937 		.setkey = safexcel_crc32_setkey,
1938 		.export = safexcel_ahash_export,
1939 		.import = safexcel_ahash_import,
1940 		.halg = {
1941 			.digestsize = sizeof(u32),
1942 			.statesize = sizeof(struct safexcel_ahash_export_state),
1943 			.base = {
1944 				.cra_name = "crc32",
1945 				.cra_driver_name = "safexcel-crc32",
1946 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1947 				.cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1948 					     CRYPTO_ALG_ASYNC |
1949 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1950 				.cra_blocksize = 1,
1951 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1952 				.cra_init = safexcel_crc32_cra_init,
1953 				.cra_exit = safexcel_ahash_cra_exit,
1954 				.cra_module = THIS_MODULE,
1955 			},
1956 		},
1957 	},
1958 };
1959 
1960 static int safexcel_cbcmac_init(struct ahash_request *areq)
1961 {
1962 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1963 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1964 
1965 	memset(req, 0, sizeof(*req));
1966 
1967 	/* Start from loaded keys */
1968 	memcpy(req->state, ctx->ipad, ctx->key_sz);
1969 	/* Set processed to non-zero to enable invalidation detection */
1970 	req->len	= AES_BLOCK_SIZE;
1971 	req->processed	= AES_BLOCK_SIZE;
1972 
1973 	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1974 	req->state_sz = ctx->key_sz;
1975 	req->digest_sz = AES_BLOCK_SIZE;
1976 	req->block_sz = AES_BLOCK_SIZE;
1977 	req->xcbcmac  = true;
1978 
1979 	return 0;
1980 }
1981 
1982 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1983 				 unsigned int len)
1984 {
1985 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1986 	struct crypto_aes_ctx aes;
1987 	int ret, i;
1988 
1989 	ret = aes_expandkey(&aes, key, len);
1990 	if (ret) {
1991 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1992 		return ret;
1993 	}
1994 
1995 	memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
1996 	for (i = 0; i < len / sizeof(u32); i++)
1997 		ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
1998 
1999 	if (len == AES_KEYSIZE_192) {
2000 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2001 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2002 	} else if (len == AES_KEYSIZE_256) {
2003 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2004 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2005 	} else {
2006 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2007 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2008 	}
2009 	ctx->cbcmac  = true;
2010 
2011 	memzero_explicit(&aes, sizeof(aes));
2012 	return 0;
2013 }
2014 
2015 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2016 {
2017 	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2018 }
2019 
2020 struct safexcel_alg_template safexcel_alg_cbcmac = {
2021 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2022 	.algo_mask = 0,
2023 	.alg.ahash = {
2024 		.init = safexcel_cbcmac_init,
2025 		.update = safexcel_ahash_update,
2026 		.final = safexcel_ahash_final,
2027 		.finup = safexcel_ahash_finup,
2028 		.digest = safexcel_cbcmac_digest,
2029 		.setkey = safexcel_cbcmac_setkey,
2030 		.export = safexcel_ahash_export,
2031 		.import = safexcel_ahash_import,
2032 		.halg = {
2033 			.digestsize = AES_BLOCK_SIZE,
2034 			.statesize = sizeof(struct safexcel_ahash_export_state),
2035 			.base = {
2036 				.cra_name = "cbcmac(aes)",
2037 				.cra_driver_name = "safexcel-cbcmac-aes",
2038 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2039 				.cra_flags = CRYPTO_ALG_ASYNC |
2040 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2041 				.cra_blocksize = 1,
2042 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2043 				.cra_init = safexcel_ahash_cra_init,
2044 				.cra_exit = safexcel_ahash_cra_exit,
2045 				.cra_module = THIS_MODULE,
2046 			},
2047 		},
2048 	},
2049 };
2050 
2051 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2052 				 unsigned int len)
2053 {
2054 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2055 	struct crypto_aes_ctx aes;
2056 	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2057 	int ret, i;
2058 
2059 	ret = aes_expandkey(&aes, key, len);
2060 	if (ret) {
2061 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2062 		return ret;
2063 	}
2064 
2065 	/* precompute the XCBC key material */
2066 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2067 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2068 				CRYPTO_TFM_REQ_MASK);
2069 	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2070 	crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2071 			       CRYPTO_TFM_RES_MASK);
2072 	if (ret)
2073 		return ret;
2074 
2075 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2076 		"\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2077 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2078 		"\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2079 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2080 		"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2081 	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2082 		ctx->ipad[i] =
2083 			cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
2084 
2085 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2086 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2087 				CRYPTO_TFM_REQ_MASK);
2088 	ret = crypto_cipher_setkey(ctx->kaes,
2089 				   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2090 				   AES_MIN_KEY_SIZE);
2091 	crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2092 			       CRYPTO_TFM_RES_MASK);
2093 	if (ret)
2094 		return ret;
2095 
2096 	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2097 	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2098 	ctx->cbcmac = false;
2099 
2100 	memzero_explicit(&aes, sizeof(aes));
2101 	return 0;
2102 }
2103 
2104 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2105 {
2106 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2107 
2108 	safexcel_ahash_cra_init(tfm);
2109 	ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2110 	return PTR_ERR_OR_ZERO(ctx->kaes);
2111 }
2112 
2113 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2114 {
2115 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2116 
2117 	crypto_free_cipher(ctx->kaes);
2118 	safexcel_ahash_cra_exit(tfm);
2119 }
2120 
2121 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2122 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2123 	.algo_mask = 0,
2124 	.alg.ahash = {
2125 		.init = safexcel_cbcmac_init,
2126 		.update = safexcel_ahash_update,
2127 		.final = safexcel_ahash_final,
2128 		.finup = safexcel_ahash_finup,
2129 		.digest = safexcel_cbcmac_digest,
2130 		.setkey = safexcel_xcbcmac_setkey,
2131 		.export = safexcel_ahash_export,
2132 		.import = safexcel_ahash_import,
2133 		.halg = {
2134 			.digestsize = AES_BLOCK_SIZE,
2135 			.statesize = sizeof(struct safexcel_ahash_export_state),
2136 			.base = {
2137 				.cra_name = "xcbc(aes)",
2138 				.cra_driver_name = "safexcel-xcbc-aes",
2139 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2140 				.cra_flags = CRYPTO_ALG_ASYNC |
2141 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2142 				.cra_blocksize = AES_BLOCK_SIZE,
2143 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2144 				.cra_init = safexcel_xcbcmac_cra_init,
2145 				.cra_exit = safexcel_xcbcmac_cra_exit,
2146 				.cra_module = THIS_MODULE,
2147 			},
2148 		},
2149 	},
2150 };
2151 
2152 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2153 				unsigned int len)
2154 {
2155 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2156 	struct crypto_aes_ctx aes;
2157 	__be64 consts[4];
2158 	u64 _const[2];
2159 	u8 msb_mask, gfmask;
2160 	int ret, i;
2161 
2162 	ret = aes_expandkey(&aes, key, len);
2163 	if (ret) {
2164 		crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2165 		return ret;
2166 	}
2167 
2168 	for (i = 0; i < len / sizeof(u32); i++)
2169 		ctx->ipad[i + 8] =
2170 			cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
2171 
2172 	/* precompute the CMAC key material */
2173 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2174 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2175 				CRYPTO_TFM_REQ_MASK);
2176 	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2177 	crypto_ahash_set_flags(tfm, crypto_cipher_get_flags(ctx->kaes) &
2178 			       CRYPTO_TFM_RES_MASK);
2179 	if (ret)
2180 		return ret;
2181 
2182 	/* code below borrowed from crypto/cmac.c */
2183 	/* encrypt the zero block */
2184 	memset(consts, 0, AES_BLOCK_SIZE);
2185 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2186 
2187 	gfmask = 0x87;
2188 	_const[0] = be64_to_cpu(consts[1]);
2189 	_const[1] = be64_to_cpu(consts[0]);
2190 
2191 	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2192 	for (i = 0; i < 4; i += 2) {
2193 		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2194 		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2195 		_const[0] = (_const[0] << 1) ^ msb_mask;
2196 
2197 		consts[i + 0] = cpu_to_be64(_const[1]);
2198 		consts[i + 1] = cpu_to_be64(_const[0]);
2199 	}
2200 	/* end of code borrowed from crypto/cmac.c */
2201 
2202 	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2203 		ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
2204 
2205 	if (len == AES_KEYSIZE_192) {
2206 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2207 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2208 	} else if (len == AES_KEYSIZE_256) {
2209 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2210 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2211 	} else {
2212 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2213 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2214 	}
2215 	ctx->cbcmac = false;
2216 
2217 	memzero_explicit(&aes, sizeof(aes));
2218 	return 0;
2219 }
2220 
2221 struct safexcel_alg_template safexcel_alg_cmac = {
2222 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2223 	.algo_mask = 0,
2224 	.alg.ahash = {
2225 		.init = safexcel_cbcmac_init,
2226 		.update = safexcel_ahash_update,
2227 		.final = safexcel_ahash_final,
2228 		.finup = safexcel_ahash_finup,
2229 		.digest = safexcel_cbcmac_digest,
2230 		.setkey = safexcel_cmac_setkey,
2231 		.export = safexcel_ahash_export,
2232 		.import = safexcel_ahash_import,
2233 		.halg = {
2234 			.digestsize = AES_BLOCK_SIZE,
2235 			.statesize = sizeof(struct safexcel_ahash_export_state),
2236 			.base = {
2237 				.cra_name = "cmac(aes)",
2238 				.cra_driver_name = "safexcel-cmac-aes",
2239 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2240 				.cra_flags = CRYPTO_ALG_ASYNC |
2241 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2242 				.cra_blocksize = AES_BLOCK_SIZE,
2243 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2244 				.cra_init = safexcel_xcbcmac_cra_init,
2245 				.cra_exit = safexcel_xcbcmac_cra_exit,
2246 				.cra_module = THIS_MODULE,
2247 			},
2248 		},
2249 	},
2250 };
2251 
2252 static int safexcel_sm3_init(struct ahash_request *areq)
2253 {
2254 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2255 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2256 
2257 	memset(req, 0, sizeof(*req));
2258 
2259 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2260 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2261 	req->state_sz = SM3_DIGEST_SIZE;
2262 	req->digest_sz = SM3_DIGEST_SIZE;
2263 	req->block_sz = SM3_BLOCK_SIZE;
2264 
2265 	return 0;
2266 }
2267 
2268 static int safexcel_sm3_digest(struct ahash_request *areq)
2269 {
2270 	int ret = safexcel_sm3_init(areq);
2271 
2272 	if (ret)
2273 		return ret;
2274 
2275 	return safexcel_ahash_finup(areq);
2276 }
2277 
2278 struct safexcel_alg_template safexcel_alg_sm3 = {
2279 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2280 	.algo_mask = SAFEXCEL_ALG_SM3,
2281 	.alg.ahash = {
2282 		.init = safexcel_sm3_init,
2283 		.update = safexcel_ahash_update,
2284 		.final = safexcel_ahash_final,
2285 		.finup = safexcel_ahash_finup,
2286 		.digest = safexcel_sm3_digest,
2287 		.export = safexcel_ahash_export,
2288 		.import = safexcel_ahash_import,
2289 		.halg = {
2290 			.digestsize = SM3_DIGEST_SIZE,
2291 			.statesize = sizeof(struct safexcel_ahash_export_state),
2292 			.base = {
2293 				.cra_name = "sm3",
2294 				.cra_driver_name = "safexcel-sm3",
2295 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2296 				.cra_flags = CRYPTO_ALG_ASYNC |
2297 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2298 				.cra_blocksize = SM3_BLOCK_SIZE,
2299 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2300 				.cra_init = safexcel_ahash_cra_init,
2301 				.cra_exit = safexcel_ahash_cra_exit,
2302 				.cra_module = THIS_MODULE,
2303 			},
2304 		},
2305 	},
2306 };
2307 
2308 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2309 				    unsigned int keylen)
2310 {
2311 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2312 					SM3_DIGEST_SIZE);
2313 }
2314 
2315 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2316 {
2317 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2318 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2319 
2320 	memset(req, 0, sizeof(*req));
2321 
2322 	/* Start from ipad precompute */
2323 	memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
2324 	/* Already processed the key^ipad part now! */
2325 	req->len	= SM3_BLOCK_SIZE;
2326 	req->processed	= SM3_BLOCK_SIZE;
2327 
2328 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2329 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2330 	req->state_sz = SM3_DIGEST_SIZE;
2331 	req->digest_sz = SM3_DIGEST_SIZE;
2332 	req->block_sz = SM3_BLOCK_SIZE;
2333 	req->hmac = true;
2334 
2335 	return 0;
2336 }
2337 
2338 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2339 {
2340 	int ret = safexcel_hmac_sm3_init(areq);
2341 
2342 	if (ret)
2343 		return ret;
2344 
2345 	return safexcel_ahash_finup(areq);
2346 }
2347 
2348 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2349 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2350 	.algo_mask = SAFEXCEL_ALG_SM3,
2351 	.alg.ahash = {
2352 		.init = safexcel_hmac_sm3_init,
2353 		.update = safexcel_ahash_update,
2354 		.final = safexcel_ahash_final,
2355 		.finup = safexcel_ahash_finup,
2356 		.digest = safexcel_hmac_sm3_digest,
2357 		.setkey = safexcel_hmac_sm3_setkey,
2358 		.export = safexcel_ahash_export,
2359 		.import = safexcel_ahash_import,
2360 		.halg = {
2361 			.digestsize = SM3_DIGEST_SIZE,
2362 			.statesize = sizeof(struct safexcel_ahash_export_state),
2363 			.base = {
2364 				.cra_name = "hmac(sm3)",
2365 				.cra_driver_name = "safexcel-hmac-sm3",
2366 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2367 				.cra_flags = CRYPTO_ALG_ASYNC |
2368 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2369 				.cra_blocksize = SM3_BLOCK_SIZE,
2370 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2371 				.cra_init = safexcel_ahash_cra_init,
2372 				.cra_exit = safexcel_ahash_cra_exit,
2373 				.cra_module = THIS_MODULE,
2374 			},
2375 		},
2376 	},
2377 };
2378 
2379 static int safexcel_sha3_224_init(struct ahash_request *areq)
2380 {
2381 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2382 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2383 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2384 
2385 	memset(req, 0, sizeof(*req));
2386 
2387 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2388 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2389 	req->state_sz = SHA3_224_DIGEST_SIZE;
2390 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2391 	req->block_sz = SHA3_224_BLOCK_SIZE;
2392 	ctx->do_fallback = false;
2393 	ctx->fb_init_done = false;
2394 	return 0;
2395 }
2396 
2397 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2398 {
2399 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2400 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2401 	struct ahash_request *subreq = ahash_request_ctx(req);
2402 	int ret = 0;
2403 
2404 	if (ctx->do_fallback) {
2405 		ahash_request_set_tfm(subreq, ctx->fback);
2406 		ahash_request_set_callback(subreq, req->base.flags,
2407 					   req->base.complete, req->base.data);
2408 		ahash_request_set_crypt(subreq, req->src, req->result,
2409 					req->nbytes);
2410 		if (!ctx->fb_init_done) {
2411 			if (ctx->fb_do_setkey) {
2412 				/* Set fallback cipher HMAC key */
2413 				u8 key[SHA3_224_BLOCK_SIZE];
2414 
2415 				memcpy(key, ctx->ipad,
2416 				       crypto_ahash_blocksize(ctx->fback) / 2);
2417 				memcpy(key +
2418 				       crypto_ahash_blocksize(ctx->fback) / 2,
2419 				       ctx->opad,
2420 				       crypto_ahash_blocksize(ctx->fback) / 2);
2421 				ret = crypto_ahash_setkey(ctx->fback, key,
2422 					crypto_ahash_blocksize(ctx->fback));
2423 				memzero_explicit(key,
2424 					crypto_ahash_blocksize(ctx->fback));
2425 				ctx->fb_do_setkey = false;
2426 			}
2427 			ret = ret ?: crypto_ahash_init(subreq);
2428 			ctx->fb_init_done = true;
2429 		}
2430 	}
2431 	return ret;
2432 }
2433 
2434 static int safexcel_sha3_update(struct ahash_request *req)
2435 {
2436 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2437 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2438 	struct ahash_request *subreq = ahash_request_ctx(req);
2439 
2440 	ctx->do_fallback = true;
2441 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2442 }
2443 
2444 static int safexcel_sha3_final(struct ahash_request *req)
2445 {
2446 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2447 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2448 	struct ahash_request *subreq = ahash_request_ctx(req);
2449 
2450 	ctx->do_fallback = true;
2451 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2452 }
2453 
2454 static int safexcel_sha3_finup(struct ahash_request *req)
2455 {
2456 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2457 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2458 	struct ahash_request *subreq = ahash_request_ctx(req);
2459 
2460 	ctx->do_fallback |= !req->nbytes;
2461 	if (ctx->do_fallback)
2462 		/* Update or ex/import happened or len 0, cannot use the HW */
2463 		return safexcel_sha3_fbcheck(req) ?:
2464 		       crypto_ahash_finup(subreq);
2465 	else
2466 		return safexcel_ahash_finup(req);
2467 }
2468 
2469 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2470 {
2471 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2472 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2473 	struct ahash_request *subreq = ahash_request_ctx(req);
2474 
2475 	ctx->do_fallback = true;
2476 	ctx->fb_init_done = false;
2477 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2478 }
2479 
2480 static int safexcel_sha3_224_digest(struct ahash_request *req)
2481 {
2482 	if (req->nbytes)
2483 		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2484 
2485 	/* HW cannot do zero length hash, use fallback instead */
2486 	return safexcel_sha3_digest_fallback(req);
2487 }
2488 
2489 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2490 {
2491 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2492 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2493 	struct ahash_request *subreq = ahash_request_ctx(req);
2494 
2495 	ctx->do_fallback = true;
2496 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2497 }
2498 
2499 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2500 {
2501 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2502 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2503 	struct ahash_request *subreq = ahash_request_ctx(req);
2504 
2505 	ctx->do_fallback = true;
2506 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2507 	// return safexcel_ahash_import(req, in);
2508 }
2509 
2510 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2511 {
2512 	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2513 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2514 
2515 	safexcel_ahash_cra_init(tfm);
2516 
2517 	/* Allocate fallback implementation */
2518 	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2519 					CRYPTO_ALG_ASYNC |
2520 					CRYPTO_ALG_NEED_FALLBACK);
2521 	if (IS_ERR(ctx->fback))
2522 		return PTR_ERR(ctx->fback);
2523 
2524 	/* Update statesize from fallback algorithm! */
2525 	crypto_hash_alg_common(ahash)->statesize =
2526 		crypto_ahash_statesize(ctx->fback);
2527 	crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2528 					    sizeof(struct ahash_request) +
2529 					    crypto_ahash_reqsize(ctx->fback)));
2530 	return 0;
2531 }
2532 
2533 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2534 {
2535 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2536 
2537 	crypto_free_ahash(ctx->fback);
2538 	safexcel_ahash_cra_exit(tfm);
2539 }
2540 
2541 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2542 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2543 	.algo_mask = SAFEXCEL_ALG_SHA3,
2544 	.alg.ahash = {
2545 		.init = safexcel_sha3_224_init,
2546 		.update = safexcel_sha3_update,
2547 		.final = safexcel_sha3_final,
2548 		.finup = safexcel_sha3_finup,
2549 		.digest = safexcel_sha3_224_digest,
2550 		.export = safexcel_sha3_export,
2551 		.import = safexcel_sha3_import,
2552 		.halg = {
2553 			.digestsize = SHA3_224_DIGEST_SIZE,
2554 			.statesize = sizeof(struct safexcel_ahash_export_state),
2555 			.base = {
2556 				.cra_name = "sha3-224",
2557 				.cra_driver_name = "safexcel-sha3-224",
2558 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2559 				.cra_flags = CRYPTO_ALG_ASYNC |
2560 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2561 					     CRYPTO_ALG_NEED_FALLBACK,
2562 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2563 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2564 				.cra_init = safexcel_sha3_cra_init,
2565 				.cra_exit = safexcel_sha3_cra_exit,
2566 				.cra_module = THIS_MODULE,
2567 			},
2568 		},
2569 	},
2570 };
2571 
2572 static int safexcel_sha3_256_init(struct ahash_request *areq)
2573 {
2574 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2575 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2576 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2577 
2578 	memset(req, 0, sizeof(*req));
2579 
2580 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2581 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2582 	req->state_sz = SHA3_256_DIGEST_SIZE;
2583 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2584 	req->block_sz = SHA3_256_BLOCK_SIZE;
2585 	ctx->do_fallback = false;
2586 	ctx->fb_init_done = false;
2587 	return 0;
2588 }
2589 
2590 static int safexcel_sha3_256_digest(struct ahash_request *req)
2591 {
2592 	if (req->nbytes)
2593 		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2594 
2595 	/* HW cannot do zero length hash, use fallback instead */
2596 	return safexcel_sha3_digest_fallback(req);
2597 }
2598 
2599 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2600 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2601 	.algo_mask = SAFEXCEL_ALG_SHA3,
2602 	.alg.ahash = {
2603 		.init = safexcel_sha3_256_init,
2604 		.update = safexcel_sha3_update,
2605 		.final = safexcel_sha3_final,
2606 		.finup = safexcel_sha3_finup,
2607 		.digest = safexcel_sha3_256_digest,
2608 		.export = safexcel_sha3_export,
2609 		.import = safexcel_sha3_import,
2610 		.halg = {
2611 			.digestsize = SHA3_256_DIGEST_SIZE,
2612 			.statesize = sizeof(struct safexcel_ahash_export_state),
2613 			.base = {
2614 				.cra_name = "sha3-256",
2615 				.cra_driver_name = "safexcel-sha3-256",
2616 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2617 				.cra_flags = CRYPTO_ALG_ASYNC |
2618 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2619 					     CRYPTO_ALG_NEED_FALLBACK,
2620 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2621 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2622 				.cra_init = safexcel_sha3_cra_init,
2623 				.cra_exit = safexcel_sha3_cra_exit,
2624 				.cra_module = THIS_MODULE,
2625 			},
2626 		},
2627 	},
2628 };
2629 
2630 static int safexcel_sha3_384_init(struct ahash_request *areq)
2631 {
2632 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2633 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2634 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2635 
2636 	memset(req, 0, sizeof(*req));
2637 
2638 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2639 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2640 	req->state_sz = SHA3_384_DIGEST_SIZE;
2641 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2642 	req->block_sz = SHA3_384_BLOCK_SIZE;
2643 	ctx->do_fallback = false;
2644 	ctx->fb_init_done = false;
2645 	return 0;
2646 }
2647 
2648 static int safexcel_sha3_384_digest(struct ahash_request *req)
2649 {
2650 	if (req->nbytes)
2651 		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2652 
2653 	/* HW cannot do zero length hash, use fallback instead */
2654 	return safexcel_sha3_digest_fallback(req);
2655 }
2656 
2657 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2658 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2659 	.algo_mask = SAFEXCEL_ALG_SHA3,
2660 	.alg.ahash = {
2661 		.init = safexcel_sha3_384_init,
2662 		.update = safexcel_sha3_update,
2663 		.final = safexcel_sha3_final,
2664 		.finup = safexcel_sha3_finup,
2665 		.digest = safexcel_sha3_384_digest,
2666 		.export = safexcel_sha3_export,
2667 		.import = safexcel_sha3_import,
2668 		.halg = {
2669 			.digestsize = SHA3_384_DIGEST_SIZE,
2670 			.statesize = sizeof(struct safexcel_ahash_export_state),
2671 			.base = {
2672 				.cra_name = "sha3-384",
2673 				.cra_driver_name = "safexcel-sha3-384",
2674 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2675 				.cra_flags = CRYPTO_ALG_ASYNC |
2676 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2677 					     CRYPTO_ALG_NEED_FALLBACK,
2678 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2679 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2680 				.cra_init = safexcel_sha3_cra_init,
2681 				.cra_exit = safexcel_sha3_cra_exit,
2682 				.cra_module = THIS_MODULE,
2683 			},
2684 		},
2685 	},
2686 };
2687 
2688 static int safexcel_sha3_512_init(struct ahash_request *areq)
2689 {
2690 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2691 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2692 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2693 
2694 	memset(req, 0, sizeof(*req));
2695 
2696 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2697 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2698 	req->state_sz = SHA3_512_DIGEST_SIZE;
2699 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2700 	req->block_sz = SHA3_512_BLOCK_SIZE;
2701 	ctx->do_fallback = false;
2702 	ctx->fb_init_done = false;
2703 	return 0;
2704 }
2705 
2706 static int safexcel_sha3_512_digest(struct ahash_request *req)
2707 {
2708 	if (req->nbytes)
2709 		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2710 
2711 	/* HW cannot do zero length hash, use fallback instead */
2712 	return safexcel_sha3_digest_fallback(req);
2713 }
2714 
2715 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2716 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2717 	.algo_mask = SAFEXCEL_ALG_SHA3,
2718 	.alg.ahash = {
2719 		.init = safexcel_sha3_512_init,
2720 		.update = safexcel_sha3_update,
2721 		.final = safexcel_sha3_final,
2722 		.finup = safexcel_sha3_finup,
2723 		.digest = safexcel_sha3_512_digest,
2724 		.export = safexcel_sha3_export,
2725 		.import = safexcel_sha3_import,
2726 		.halg = {
2727 			.digestsize = SHA3_512_DIGEST_SIZE,
2728 			.statesize = sizeof(struct safexcel_ahash_export_state),
2729 			.base = {
2730 				.cra_name = "sha3-512",
2731 				.cra_driver_name = "safexcel-sha3-512",
2732 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2733 				.cra_flags = CRYPTO_ALG_ASYNC |
2734 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2735 					     CRYPTO_ALG_NEED_FALLBACK,
2736 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2737 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2738 				.cra_init = safexcel_sha3_cra_init,
2739 				.cra_exit = safexcel_sha3_cra_exit,
2740 				.cra_module = THIS_MODULE,
2741 			},
2742 		},
2743 	},
2744 };
2745 
2746 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2747 {
2748 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2749 	int ret;
2750 
2751 	ret = safexcel_sha3_cra_init(tfm);
2752 	if (ret)
2753 		return ret;
2754 
2755 	/* Allocate precalc basic digest implementation */
2756 	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2757 	if (IS_ERR(ctx->shpre))
2758 		return PTR_ERR(ctx->shpre);
2759 
2760 	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2761 			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2762 	if (!ctx->shdesc) {
2763 		crypto_free_shash(ctx->shpre);
2764 		return -ENOMEM;
2765 	}
2766 	ctx->shdesc->tfm = ctx->shpre;
2767 	return 0;
2768 }
2769 
2770 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2771 {
2772 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2773 
2774 	crypto_free_ahash(ctx->fback);
2775 	crypto_free_shash(ctx->shpre);
2776 	kfree(ctx->shdesc);
2777 	safexcel_ahash_cra_exit(tfm);
2778 }
2779 
2780 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2781 				     unsigned int keylen)
2782 {
2783 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2784 	int ret = 0;
2785 
2786 	if (keylen > crypto_ahash_blocksize(tfm)) {
2787 		/*
2788 		 * If the key is larger than the blocksize, then hash it
2789 		 * first using our fallback cipher
2790 		 */
2791 		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2792 					  (u8 *)ctx->ipad);
2793 		keylen = crypto_shash_digestsize(ctx->shpre);
2794 
2795 		/*
2796 		 * If the digest is larger than half the blocksize, we need to
2797 		 * move the rest to opad due to the way our HMAC infra works.
2798 		 */
2799 		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2800 			/* Buffers overlap, need to use memmove iso memcpy! */
2801 			memmove(ctx->opad,
2802 				(u8 *)ctx->ipad +
2803 					crypto_ahash_blocksize(tfm) / 2,
2804 				keylen - crypto_ahash_blocksize(tfm) / 2);
2805 	} else {
2806 		/*
2807 		 * Copy the key to our ipad & opad buffers
2808 		 * Note that ipad and opad each contain one half of the key,
2809 		 * to match the existing HMAC driver infrastructure.
2810 		 */
2811 		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2812 			memcpy(ctx->ipad, key, keylen);
2813 		} else {
2814 			memcpy(ctx->ipad, key,
2815 			       crypto_ahash_blocksize(tfm) / 2);
2816 			memcpy(ctx->opad,
2817 			       key + crypto_ahash_blocksize(tfm) / 2,
2818 			       keylen - crypto_ahash_blocksize(tfm) / 2);
2819 		}
2820 	}
2821 
2822 	/* Pad key with zeroes */
2823 	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2824 		memset((u8 *)ctx->ipad + keylen, 0,
2825 		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2826 		memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
2827 	} else {
2828 		memset((u8 *)ctx->opad + keylen -
2829 		       crypto_ahash_blocksize(tfm) / 2, 0,
2830 		       crypto_ahash_blocksize(tfm) - keylen);
2831 	}
2832 
2833 	/* If doing fallback, still need to set the new key! */
2834 	ctx->fb_do_setkey = true;
2835 	return ret;
2836 }
2837 
2838 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2839 {
2840 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2841 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2842 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2843 
2844 	memset(req, 0, sizeof(*req));
2845 
2846 	/* Copy (half of) the key */
2847 	memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
2848 	/* Start of HMAC should have len == processed == blocksize */
2849 	req->len	= SHA3_224_BLOCK_SIZE;
2850 	req->processed	= SHA3_224_BLOCK_SIZE;
2851 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2852 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2853 	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2854 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2855 	req->block_sz = SHA3_224_BLOCK_SIZE;
2856 	req->hmac = true;
2857 	ctx->do_fallback = false;
2858 	ctx->fb_init_done = false;
2859 	return 0;
2860 }
2861 
2862 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2863 {
2864 	if (req->nbytes)
2865 		return safexcel_hmac_sha3_224_init(req) ?:
2866 		       safexcel_ahash_finup(req);
2867 
2868 	/* HW cannot do zero length HMAC, use fallback instead */
2869 	return safexcel_sha3_digest_fallback(req);
2870 }
2871 
2872 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2873 {
2874 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2875 }
2876 
2877 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2878 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2879 	.algo_mask = SAFEXCEL_ALG_SHA3,
2880 	.alg.ahash = {
2881 		.init = safexcel_hmac_sha3_224_init,
2882 		.update = safexcel_sha3_update,
2883 		.final = safexcel_sha3_final,
2884 		.finup = safexcel_sha3_finup,
2885 		.digest = safexcel_hmac_sha3_224_digest,
2886 		.setkey = safexcel_hmac_sha3_setkey,
2887 		.export = safexcel_sha3_export,
2888 		.import = safexcel_sha3_import,
2889 		.halg = {
2890 			.digestsize = SHA3_224_DIGEST_SIZE,
2891 			.statesize = sizeof(struct safexcel_ahash_export_state),
2892 			.base = {
2893 				.cra_name = "hmac(sha3-224)",
2894 				.cra_driver_name = "safexcel-hmac-sha3-224",
2895 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2896 				.cra_flags = CRYPTO_ALG_ASYNC |
2897 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2898 					     CRYPTO_ALG_NEED_FALLBACK,
2899 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2900 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2901 				.cra_init = safexcel_hmac_sha3_224_cra_init,
2902 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2903 				.cra_module = THIS_MODULE,
2904 			},
2905 		},
2906 	},
2907 };
2908 
2909 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2910 {
2911 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2912 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2913 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2914 
2915 	memset(req, 0, sizeof(*req));
2916 
2917 	/* Copy (half of) the key */
2918 	memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
2919 	/* Start of HMAC should have len == processed == blocksize */
2920 	req->len	= SHA3_256_BLOCK_SIZE;
2921 	req->processed	= SHA3_256_BLOCK_SIZE;
2922 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2923 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2924 	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2925 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2926 	req->block_sz = SHA3_256_BLOCK_SIZE;
2927 	req->hmac = true;
2928 	ctx->do_fallback = false;
2929 	ctx->fb_init_done = false;
2930 	return 0;
2931 }
2932 
2933 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2934 {
2935 	if (req->nbytes)
2936 		return safexcel_hmac_sha3_256_init(req) ?:
2937 		       safexcel_ahash_finup(req);
2938 
2939 	/* HW cannot do zero length HMAC, use fallback instead */
2940 	return safexcel_sha3_digest_fallback(req);
2941 }
2942 
2943 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2944 {
2945 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2946 }
2947 
2948 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2949 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2950 	.algo_mask = SAFEXCEL_ALG_SHA3,
2951 	.alg.ahash = {
2952 		.init = safexcel_hmac_sha3_256_init,
2953 		.update = safexcel_sha3_update,
2954 		.final = safexcel_sha3_final,
2955 		.finup = safexcel_sha3_finup,
2956 		.digest = safexcel_hmac_sha3_256_digest,
2957 		.setkey = safexcel_hmac_sha3_setkey,
2958 		.export = safexcel_sha3_export,
2959 		.import = safexcel_sha3_import,
2960 		.halg = {
2961 			.digestsize = SHA3_256_DIGEST_SIZE,
2962 			.statesize = sizeof(struct safexcel_ahash_export_state),
2963 			.base = {
2964 				.cra_name = "hmac(sha3-256)",
2965 				.cra_driver_name = "safexcel-hmac-sha3-256",
2966 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2967 				.cra_flags = CRYPTO_ALG_ASYNC |
2968 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2969 					     CRYPTO_ALG_NEED_FALLBACK,
2970 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2971 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2972 				.cra_init = safexcel_hmac_sha3_256_cra_init,
2973 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2974 				.cra_module = THIS_MODULE,
2975 			},
2976 		},
2977 	},
2978 };
2979 
2980 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2981 {
2982 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2983 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2984 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2985 
2986 	memset(req, 0, sizeof(*req));
2987 
2988 	/* Copy (half of) the key */
2989 	memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
2990 	/* Start of HMAC should have len == processed == blocksize */
2991 	req->len	= SHA3_384_BLOCK_SIZE;
2992 	req->processed	= SHA3_384_BLOCK_SIZE;
2993 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2994 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2995 	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2996 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2997 	req->block_sz = SHA3_384_BLOCK_SIZE;
2998 	req->hmac = true;
2999 	ctx->do_fallback = false;
3000 	ctx->fb_init_done = false;
3001 	return 0;
3002 }
3003 
3004 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3005 {
3006 	if (req->nbytes)
3007 		return safexcel_hmac_sha3_384_init(req) ?:
3008 		       safexcel_ahash_finup(req);
3009 
3010 	/* HW cannot do zero length HMAC, use fallback instead */
3011 	return safexcel_sha3_digest_fallback(req);
3012 }
3013 
3014 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3015 {
3016 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3017 }
3018 
3019 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3020 	.type = SAFEXCEL_ALG_TYPE_AHASH,
3021 	.algo_mask = SAFEXCEL_ALG_SHA3,
3022 	.alg.ahash = {
3023 		.init = safexcel_hmac_sha3_384_init,
3024 		.update = safexcel_sha3_update,
3025 		.final = safexcel_sha3_final,
3026 		.finup = safexcel_sha3_finup,
3027 		.digest = safexcel_hmac_sha3_384_digest,
3028 		.setkey = safexcel_hmac_sha3_setkey,
3029 		.export = safexcel_sha3_export,
3030 		.import = safexcel_sha3_import,
3031 		.halg = {
3032 			.digestsize = SHA3_384_DIGEST_SIZE,
3033 			.statesize = sizeof(struct safexcel_ahash_export_state),
3034 			.base = {
3035 				.cra_name = "hmac(sha3-384)",
3036 				.cra_driver_name = "safexcel-hmac-sha3-384",
3037 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3038 				.cra_flags = CRYPTO_ALG_ASYNC |
3039 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3040 					     CRYPTO_ALG_NEED_FALLBACK,
3041 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
3042 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3043 				.cra_init = safexcel_hmac_sha3_384_cra_init,
3044 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3045 				.cra_module = THIS_MODULE,
3046 			},
3047 		},
3048 	},
3049 };
3050 
3051 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3052 {
3053 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3054 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3055 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3056 
3057 	memset(req, 0, sizeof(*req));
3058 
3059 	/* Copy (half of) the key */
3060 	memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
3061 	/* Start of HMAC should have len == processed == blocksize */
3062 	req->len	= SHA3_512_BLOCK_SIZE;
3063 	req->processed	= SHA3_512_BLOCK_SIZE;
3064 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3065 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3066 	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3067 	req->digest_sz = SHA3_512_DIGEST_SIZE;
3068 	req->block_sz = SHA3_512_BLOCK_SIZE;
3069 	req->hmac = true;
3070 	ctx->do_fallback = false;
3071 	ctx->fb_init_done = false;
3072 	return 0;
3073 }
3074 
3075 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3076 {
3077 	if (req->nbytes)
3078 		return safexcel_hmac_sha3_512_init(req) ?:
3079 		       safexcel_ahash_finup(req);
3080 
3081 	/* HW cannot do zero length HMAC, use fallback instead */
3082 	return safexcel_sha3_digest_fallback(req);
3083 }
3084 
3085 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3086 {
3087 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3088 }
3089 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3090 	.type = SAFEXCEL_ALG_TYPE_AHASH,
3091 	.algo_mask = SAFEXCEL_ALG_SHA3,
3092 	.alg.ahash = {
3093 		.init = safexcel_hmac_sha3_512_init,
3094 		.update = safexcel_sha3_update,
3095 		.final = safexcel_sha3_final,
3096 		.finup = safexcel_sha3_finup,
3097 		.digest = safexcel_hmac_sha3_512_digest,
3098 		.setkey = safexcel_hmac_sha3_setkey,
3099 		.export = safexcel_sha3_export,
3100 		.import = safexcel_sha3_import,
3101 		.halg = {
3102 			.digestsize = SHA3_512_DIGEST_SIZE,
3103 			.statesize = sizeof(struct safexcel_ahash_export_state),
3104 			.base = {
3105 				.cra_name = "hmac(sha3-512)",
3106 				.cra_driver_name = "safexcel-hmac-sha3-512",
3107 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3108 				.cra_flags = CRYPTO_ALG_ASYNC |
3109 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3110 					     CRYPTO_ALG_NEED_FALLBACK,
3111 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3112 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3113 				.cra_init = safexcel_hmac_sha3_512_cra_init,
3114 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3115 				.cra_module = THIS_MODULE,
3116 			},
3117 		},
3118 	},
3119 };
3120