1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (C) 2017 Marvell
4  *
5  * Antoine Tenart <antoine.tenart@free-electrons.com>
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <linux/device.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmapool.h>
19 
20 #include "safexcel.h"
21 
22 struct safexcel_ahash_ctx {
23 	struct safexcel_context base;
24 
25 	u32 alg;
26 	u8  key_sz;
27 	bool cbcmac;
28 	bool do_fallback;
29 	bool fb_init_done;
30 	bool fb_do_setkey;
31 
32 	struct crypto_cipher *kaes;
33 	struct crypto_ahash *fback;
34 	struct crypto_shash *shpre;
35 	struct shash_desc *shdesc;
36 };
37 
38 struct safexcel_ahash_req {
39 	bool last_req;
40 	bool finish;
41 	bool hmac;
42 	bool needs_inv;
43 	bool hmac_zlen;
44 	bool len_is_le;
45 	bool not_first;
46 	bool xcbcmac;
47 
48 	int nents;
49 	dma_addr_t result_dma;
50 
51 	u32 digest;
52 
53 	u8 state_sz;    /* expected state size, only set once */
54 	u8 block_sz;    /* block size, only set once */
55 	u8 digest_sz;   /* output digest size, only set once */
56 	__le32 state[SHA3_512_BLOCK_SIZE /
57 		     sizeof(__le32)] __aligned(sizeof(__le32));
58 
59 	u64 len;
60 	u64 processed;
61 
62 	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
63 	dma_addr_t cache_dma;
64 	unsigned int cache_sz;
65 
66 	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
67 };
68 
69 static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
70 {
71 	return req->len - req->processed;
72 }
73 
74 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
75 				u32 input_length, u32 result_length,
76 				bool cbcmac)
77 {
78 	struct safexcel_token *token =
79 		(struct safexcel_token *)cdesc->control_data.token;
80 
81 	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
82 	token[0].packet_length = input_length;
83 	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
84 
85 	input_length &= 15;
86 	if (unlikely(cbcmac && input_length)) {
87 		token[0].stat =  0;
88 		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
89 		token[1].packet_length = 16 - input_length;
90 		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
91 		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
92 	} else {
93 		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
94 		eip197_noop_token(&token[1]);
95 	}
96 
97 	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
98 	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
99 			EIP197_TOKEN_STAT_LAST_PACKET;
100 	token[2].packet_length = result_length;
101 	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
102 				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
103 
104 	eip197_noop_token(&token[3]);
105 }
106 
107 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
108 				     struct safexcel_ahash_req *req,
109 				     struct safexcel_command_desc *cdesc)
110 {
111 	struct safexcel_crypto_priv *priv = ctx->base.priv;
112 	u64 count = 0;
113 
114 	cdesc->control_data.control0 = ctx->alg;
115 	cdesc->control_data.control1 = 0;
116 
117 	/*
118 	 * Copy the input digest if needed, and setup the context
119 	 * fields. Do this now as we need it to setup the first command
120 	 * descriptor.
121 	 */
122 	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
123 		if (req->xcbcmac)
124 			memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
125 		else
126 			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
127 
128 		if (!req->finish && req->xcbcmac)
129 			cdesc->control_data.control0 |=
130 				CONTEXT_CONTROL_DIGEST_XCM |
131 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
132 				CONTEXT_CONTROL_NO_FINISH_HASH |
133 				CONTEXT_CONTROL_SIZE(req->state_sz /
134 						     sizeof(u32));
135 		else
136 			cdesc->control_data.control0 |=
137 				CONTEXT_CONTROL_DIGEST_XCM |
138 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
139 				CONTEXT_CONTROL_SIZE(req->state_sz /
140 						     sizeof(u32));
141 		return;
142 	} else if (!req->processed) {
143 		/* First - and possibly only - block of basic hash only */
144 		if (req->finish)
145 			cdesc->control_data.control0 |= req->digest |
146 				CONTEXT_CONTROL_TYPE_HASH_OUT |
147 				CONTEXT_CONTROL_RESTART_HASH  |
148 				/* ensure its not 0! */
149 				CONTEXT_CONTROL_SIZE(1);
150 		else
151 			cdesc->control_data.control0 |= req->digest |
152 				CONTEXT_CONTROL_TYPE_HASH_OUT  |
153 				CONTEXT_CONTROL_RESTART_HASH   |
154 				CONTEXT_CONTROL_NO_FINISH_HASH |
155 				/* ensure its not 0! */
156 				CONTEXT_CONTROL_SIZE(1);
157 		return;
158 	}
159 
160 	/* Hash continuation or HMAC, setup (inner) digest from state */
161 	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
162 
163 	if (req->finish) {
164 		/* Compute digest count for hash/HMAC finish operations */
165 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
166 		    req->hmac_zlen || (req->processed != req->block_sz)) {
167 			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
168 
169 			/* This is a hardware limitation, as the
170 			 * counter must fit into an u32. This represents
171 			 * a fairly big amount of input data, so we
172 			 * shouldn't see this.
173 			 */
174 			if (unlikely(count & 0xffffffff00000000ULL)) {
175 				dev_warn(priv->dev,
176 					 "Input data is too big\n");
177 				return;
178 			}
179 		}
180 
181 		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
182 		    /* Special case: zero length HMAC */
183 		    req->hmac_zlen ||
184 		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
185 		    (req->processed != req->block_sz)) {
186 			/* Basic hash continue operation, need digest + cnt */
187 			cdesc->control_data.control0 |=
188 				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
189 				CONTEXT_CONTROL_TYPE_HASH_OUT |
190 				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
191 			/* For zero-len HMAC, don't finalize, already padded! */
192 			if (req->hmac_zlen)
193 				cdesc->control_data.control0 |=
194 					CONTEXT_CONTROL_NO_FINISH_HASH;
195 			cdesc->control_data.control1 |=
196 				CONTEXT_CONTROL_DIGEST_CNT;
197 			ctx->base.ctxr->data[req->state_sz >> 2] =
198 				cpu_to_le32(count);
199 			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
200 
201 			/* Clear zero-length HMAC flag for next operation! */
202 			req->hmac_zlen = false;
203 		} else { /* HMAC */
204 			/* Need outer digest for HMAC finalization */
205 			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
206 			       &ctx->base.opad, req->state_sz);
207 
208 			/* Single pass HMAC - no digest count */
209 			cdesc->control_data.control0 |=
210 				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
211 				CONTEXT_CONTROL_TYPE_HASH_OUT |
212 				CONTEXT_CONTROL_DIGEST_HMAC;
213 		}
214 	} else { /* Hash continuation, do not finish yet */
215 		cdesc->control_data.control0 |=
216 			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
217 			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
218 			CONTEXT_CONTROL_TYPE_HASH_OUT |
219 			CONTEXT_CONTROL_NO_FINISH_HASH;
220 	}
221 }
222 
223 static int safexcel_ahash_enqueue(struct ahash_request *areq);
224 
225 static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
226 				      int ring,
227 				      struct crypto_async_request *async,
228 				      bool *should_complete, int *ret)
229 {
230 	struct safexcel_result_desc *rdesc;
231 	struct ahash_request *areq = ahash_request_cast(async);
232 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
233 	struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
234 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
235 	u64 cache_len;
236 
237 	*ret = 0;
238 
239 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
240 	if (IS_ERR(rdesc)) {
241 		dev_err(priv->dev,
242 			"hash: result: could not retrieve the result descriptor\n");
243 		*ret = PTR_ERR(rdesc);
244 	} else {
245 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
246 	}
247 
248 	safexcel_complete(priv, ring);
249 
250 	if (sreq->nents) {
251 		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
252 		sreq->nents = 0;
253 	}
254 
255 	if (sreq->result_dma) {
256 		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
257 				 DMA_FROM_DEVICE);
258 		sreq->result_dma = 0;
259 	}
260 
261 	if (sreq->cache_dma) {
262 		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
263 				 DMA_TO_DEVICE);
264 		sreq->cache_dma = 0;
265 		sreq->cache_sz = 0;
266 	}
267 
268 	if (sreq->finish) {
269 		if (sreq->hmac &&
270 		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
271 			/* Faking HMAC using hash - need to do outer hash */
272 			memcpy(sreq->cache, sreq->state,
273 			       crypto_ahash_digestsize(ahash));
274 
275 			memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
276 
277 			sreq->len = sreq->block_sz +
278 				    crypto_ahash_digestsize(ahash);
279 			sreq->processed = sreq->block_sz;
280 			sreq->hmac = 0;
281 
282 			if (priv->flags & EIP197_TRC_CACHE)
283 				ctx->base.needs_inv = true;
284 			areq->nbytes = 0;
285 			safexcel_ahash_enqueue(areq);
286 
287 			*should_complete = false; /* Not done yet */
288 			return 1;
289 		}
290 
291 		if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
292 			     ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
293 			/* Undo final XOR with 0xffffffff ...*/
294 			*(__le32 *)areq->result = ~sreq->state[0];
295 		} else {
296 			memcpy(areq->result, sreq->state,
297 			       crypto_ahash_digestsize(ahash));
298 		}
299 	}
300 
301 	cache_len = safexcel_queued_len(sreq);
302 	if (cache_len)
303 		memcpy(sreq->cache, sreq->cache_next, cache_len);
304 
305 	*should_complete = true;
306 
307 	return 1;
308 }
309 
310 static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
311 				   int *commands, int *results)
312 {
313 	struct ahash_request *areq = ahash_request_cast(async);
314 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
315 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
316 	struct safexcel_crypto_priv *priv = ctx->base.priv;
317 	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
318 	struct safexcel_result_desc *rdesc;
319 	struct scatterlist *sg;
320 	struct safexcel_token *dmmy;
321 	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
322 	u64 queued, len;
323 
324 	queued = safexcel_queued_len(req);
325 	if (queued <= HASH_CACHE_SIZE)
326 		cache_len = queued;
327 	else
328 		cache_len = queued - areq->nbytes;
329 
330 	if (!req->finish && !req->last_req) {
331 		/* If this is not the last request and the queued data does not
332 		 * fit into full cache blocks, cache it for the next send call.
333 		 */
334 		extra = queued & (HASH_CACHE_SIZE - 1);
335 
336 		/* If this is not the last request and the queued data
337 		 * is a multiple of a block, cache the last one for now.
338 		 */
339 		if (!extra)
340 			extra = HASH_CACHE_SIZE;
341 
342 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
343 				   req->cache_next, extra,
344 				   areq->nbytes - extra);
345 
346 		queued -= extra;
347 
348 		if (!queued) {
349 			*commands = 0;
350 			*results = 0;
351 			return 0;
352 		}
353 
354 		extra = 0;
355 	}
356 
357 	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
358 		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
359 			/*
360 			 * Cache contains less than 1 full block, complete.
361 			 */
362 			extra = AES_BLOCK_SIZE - cache_len;
363 			if (queued > cache_len) {
364 				/* More data follows: borrow bytes */
365 				u64 tmp = queued - cache_len;
366 
367 				skip = min_t(u64, tmp, extra);
368 				sg_pcopy_to_buffer(areq->src,
369 					sg_nents(areq->src),
370 					req->cache + cache_len,
371 					skip, 0);
372 			}
373 			extra -= skip;
374 			memset(req->cache + cache_len + skip, 0, extra);
375 			if (!ctx->cbcmac && extra) {
376 				// 10- padding for XCBCMAC & CMAC
377 				req->cache[cache_len + skip] = 0x80;
378 				// HW will use K2 iso K3 - compensate!
379 				for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
380 					u32 *cache = (void *)req->cache;
381 					u32 *ipad = ctx->base.ipad.word;
382 					u32 x;
383 
384 					x = ipad[i] ^ ipad[i + 4];
385 					cache[i] ^= swab(x);
386 				}
387 			}
388 			cache_len = AES_BLOCK_SIZE;
389 			queued = queued + extra;
390 		}
391 
392 		/* XCBC continue: XOR previous result into 1st word */
393 		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
394 	}
395 
396 	len = queued;
397 	/* Add a command descriptor for the cached data, if any */
398 	if (cache_len) {
399 		req->cache_dma = dma_map_single(priv->dev, req->cache,
400 						cache_len, DMA_TO_DEVICE);
401 		if (dma_mapping_error(priv->dev, req->cache_dma))
402 			return -EINVAL;
403 
404 		req->cache_sz = cache_len;
405 		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
406 						 (cache_len == len),
407 						 req->cache_dma, cache_len,
408 						 len, ctx->base.ctxr_dma,
409 						 &dmmy);
410 		if (IS_ERR(first_cdesc)) {
411 			ret = PTR_ERR(first_cdesc);
412 			goto unmap_cache;
413 		}
414 		n_cdesc++;
415 
416 		queued -= cache_len;
417 		if (!queued)
418 			goto send_command;
419 	}
420 
421 	/* Now handle the current ahash request buffer(s) */
422 	req->nents = dma_map_sg(priv->dev, areq->src,
423 				sg_nents_for_len(areq->src,
424 						 areq->nbytes),
425 				DMA_TO_DEVICE);
426 	if (!req->nents) {
427 		ret = -ENOMEM;
428 		goto cdesc_rollback;
429 	}
430 
431 	for_each_sg(areq->src, sg, req->nents, i) {
432 		int sglen = sg_dma_len(sg);
433 
434 		if (unlikely(sglen <= skip)) {
435 			skip -= sglen;
436 			continue;
437 		}
438 
439 		/* Do not overflow the request */
440 		if ((queued + skip) <= sglen)
441 			sglen = queued;
442 		else
443 			sglen -= skip;
444 
445 		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
446 					   !(queued - sglen),
447 					   sg_dma_address(sg) + skip, sglen,
448 					   len, ctx->base.ctxr_dma, &dmmy);
449 		if (IS_ERR(cdesc)) {
450 			ret = PTR_ERR(cdesc);
451 			goto unmap_sg;
452 		}
453 
454 		if (!n_cdesc)
455 			first_cdesc = cdesc;
456 		n_cdesc++;
457 
458 		queued -= sglen;
459 		if (!queued)
460 			break;
461 		skip = 0;
462 	}
463 
464 send_command:
465 	/* Setup the context options */
466 	safexcel_context_control(ctx, req, first_cdesc);
467 
468 	/* Add the token */
469 	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
470 
471 	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
472 					 DMA_FROM_DEVICE);
473 	if (dma_mapping_error(priv->dev, req->result_dma)) {
474 		ret = -EINVAL;
475 		goto unmap_sg;
476 	}
477 
478 	/* Add a result descriptor */
479 	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
480 				   req->digest_sz);
481 	if (IS_ERR(rdesc)) {
482 		ret = PTR_ERR(rdesc);
483 		goto unmap_result;
484 	}
485 
486 	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
487 
488 	req->processed += len - extra;
489 
490 	*commands = n_cdesc;
491 	*results = 1;
492 	return 0;
493 
494 unmap_result:
495 	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
496 			 DMA_FROM_DEVICE);
497 unmap_sg:
498 	if (req->nents) {
499 		dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
500 		req->nents = 0;
501 	}
502 cdesc_rollback:
503 	for (i = 0; i < n_cdesc; i++)
504 		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
505 unmap_cache:
506 	if (req->cache_dma) {
507 		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
508 				 DMA_TO_DEVICE);
509 		req->cache_dma = 0;
510 		req->cache_sz = 0;
511 	}
512 
513 	return ret;
514 }
515 
516 static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
517 				      int ring,
518 				      struct crypto_async_request *async,
519 				      bool *should_complete, int *ret)
520 {
521 	struct safexcel_result_desc *rdesc;
522 	struct ahash_request *areq = ahash_request_cast(async);
523 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
524 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
525 	int enq_ret;
526 
527 	*ret = 0;
528 
529 	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
530 	if (IS_ERR(rdesc)) {
531 		dev_err(priv->dev,
532 			"hash: invalidate: could not retrieve the result descriptor\n");
533 		*ret = PTR_ERR(rdesc);
534 	} else {
535 		*ret = safexcel_rdesc_check_errors(priv, rdesc);
536 	}
537 
538 	safexcel_complete(priv, ring);
539 
540 	if (ctx->base.exit_inv) {
541 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
542 			      ctx->base.ctxr_dma);
543 
544 		*should_complete = true;
545 		return 1;
546 	}
547 
548 	ring = safexcel_select_ring(priv);
549 	ctx->base.ring = ring;
550 
551 	spin_lock_bh(&priv->ring[ring].queue_lock);
552 	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
553 	spin_unlock_bh(&priv->ring[ring].queue_lock);
554 
555 	if (enq_ret != -EINPROGRESS)
556 		*ret = enq_ret;
557 
558 	queue_work(priv->ring[ring].workqueue,
559 		   &priv->ring[ring].work_data.work);
560 
561 	*should_complete = false;
562 
563 	return 1;
564 }
565 
566 static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
567 				  struct crypto_async_request *async,
568 				  bool *should_complete, int *ret)
569 {
570 	struct ahash_request *areq = ahash_request_cast(async);
571 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
572 	int err;
573 
574 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
575 
576 	if (req->needs_inv) {
577 		req->needs_inv = false;
578 		err = safexcel_handle_inv_result(priv, ring, async,
579 						 should_complete, ret);
580 	} else {
581 		err = safexcel_handle_req_result(priv, ring, async,
582 						 should_complete, ret);
583 	}
584 
585 	return err;
586 }
587 
588 static int safexcel_ahash_send_inv(struct crypto_async_request *async,
589 				   int ring, int *commands, int *results)
590 {
591 	struct ahash_request *areq = ahash_request_cast(async);
592 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
593 	int ret;
594 
595 	ret = safexcel_invalidate_cache(async, ctx->base.priv,
596 					ctx->base.ctxr_dma, ring);
597 	if (unlikely(ret))
598 		return ret;
599 
600 	*commands = 1;
601 	*results = 1;
602 
603 	return 0;
604 }
605 
606 static int safexcel_ahash_send(struct crypto_async_request *async,
607 			       int ring, int *commands, int *results)
608 {
609 	struct ahash_request *areq = ahash_request_cast(async);
610 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
611 	int ret;
612 
613 	if (req->needs_inv)
614 		ret = safexcel_ahash_send_inv(async, ring, commands, results);
615 	else
616 		ret = safexcel_ahash_send_req(async, ring, commands, results);
617 
618 	return ret;
619 }
620 
621 static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
622 {
623 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
624 	struct safexcel_crypto_priv *priv = ctx->base.priv;
625 	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
626 	struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
627 	struct safexcel_inv_result result = {};
628 	int ring = ctx->base.ring;
629 
630 	memset(req, 0, EIP197_AHASH_REQ_SIZE);
631 
632 	/* create invalidation request */
633 	init_completion(&result.completion);
634 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
635 				   safexcel_inv_complete, &result);
636 
637 	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
638 	ctx = crypto_tfm_ctx(req->base.tfm);
639 	ctx->base.exit_inv = true;
640 	rctx->needs_inv = true;
641 
642 	spin_lock_bh(&priv->ring[ring].queue_lock);
643 	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
644 	spin_unlock_bh(&priv->ring[ring].queue_lock);
645 
646 	queue_work(priv->ring[ring].workqueue,
647 		   &priv->ring[ring].work_data.work);
648 
649 	wait_for_completion(&result.completion);
650 
651 	if (result.error) {
652 		dev_warn(priv->dev, "hash: completion error (%d)\n",
653 			 result.error);
654 		return result.error;
655 	}
656 
657 	return 0;
658 }
659 
660 /* safexcel_ahash_cache: cache data until at least one request can be sent to
661  * the engine, aka. when there is at least 1 block size in the pipe.
662  */
663 static int safexcel_ahash_cache(struct ahash_request *areq)
664 {
665 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
666 	u64 cache_len;
667 
668 	/* cache_len: everything accepted by the driver but not sent yet,
669 	 * tot sz handled by update() - last req sz - tot sz handled by send()
670 	 */
671 	cache_len = safexcel_queued_len(req);
672 
673 	/*
674 	 * In case there isn't enough bytes to proceed (less than a
675 	 * block size), cache the data until we have enough.
676 	 */
677 	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
678 		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
679 				   req->cache + cache_len,
680 				   areq->nbytes, 0);
681 		return 0;
682 	}
683 
684 	/* We couldn't cache all the data */
685 	return -E2BIG;
686 }
687 
688 static int safexcel_ahash_enqueue(struct ahash_request *areq)
689 {
690 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
691 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
692 	struct safexcel_crypto_priv *priv = ctx->base.priv;
693 	int ret, ring;
694 
695 	req->needs_inv = false;
696 
697 	if (ctx->base.ctxr) {
698 		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
699 		     /* invalidate for *any* non-XCBC continuation */
700 		   ((req->not_first && !req->xcbcmac) ||
701 		     /* invalidate if (i)digest changed */
702 		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
703 		     /* invalidate for HMAC finish with odigest changed */
704 		     (req->finish && req->hmac &&
705 		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
706 			     &ctx->base.opad, req->state_sz))))
707 			/*
708 			 * We're still setting needs_inv here, even though it is
709 			 * cleared right away, because the needs_inv flag can be
710 			 * set in other functions and we want to keep the same
711 			 * logic.
712 			 */
713 			ctx->base.needs_inv = true;
714 
715 		if (ctx->base.needs_inv) {
716 			ctx->base.needs_inv = false;
717 			req->needs_inv = true;
718 		}
719 	} else {
720 		ctx->base.ring = safexcel_select_ring(priv);
721 		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
722 						 EIP197_GFP_FLAGS(areq->base),
723 						 &ctx->base.ctxr_dma);
724 		if (!ctx->base.ctxr)
725 			return -ENOMEM;
726 	}
727 	req->not_first = true;
728 
729 	ring = ctx->base.ring;
730 
731 	spin_lock_bh(&priv->ring[ring].queue_lock);
732 	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
733 	spin_unlock_bh(&priv->ring[ring].queue_lock);
734 
735 	queue_work(priv->ring[ring].workqueue,
736 		   &priv->ring[ring].work_data.work);
737 
738 	return ret;
739 }
740 
741 static int safexcel_ahash_update(struct ahash_request *areq)
742 {
743 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
744 	int ret;
745 
746 	/* If the request is 0 length, do nothing */
747 	if (!areq->nbytes)
748 		return 0;
749 
750 	/* Add request to the cache if it fits */
751 	ret = safexcel_ahash_cache(areq);
752 
753 	/* Update total request length */
754 	req->len += areq->nbytes;
755 
756 	/* If not all data could fit into the cache, go process the excess.
757 	 * Also go process immediately for an HMAC IV precompute, which
758 	 * will never be finished at all, but needs to be processed anyway.
759 	 */
760 	if ((ret && !req->finish) || req->last_req)
761 		return safexcel_ahash_enqueue(areq);
762 
763 	return 0;
764 }
765 
766 static int safexcel_ahash_final(struct ahash_request *areq)
767 {
768 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
769 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
770 
771 	req->finish = true;
772 
773 	if (unlikely(!req->len && !areq->nbytes)) {
774 		/*
775 		 * If we have an overall 0 length *hash* request:
776 		 * The HW cannot do 0 length hash, so we provide the correct
777 		 * result directly here.
778 		 */
779 		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
780 			memcpy(areq->result, md5_zero_message_hash,
781 			       MD5_DIGEST_SIZE);
782 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
783 			memcpy(areq->result, sha1_zero_message_hash,
784 			       SHA1_DIGEST_SIZE);
785 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
786 			memcpy(areq->result, sha224_zero_message_hash,
787 			       SHA224_DIGEST_SIZE);
788 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
789 			memcpy(areq->result, sha256_zero_message_hash,
790 			       SHA256_DIGEST_SIZE);
791 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
792 			memcpy(areq->result, sha384_zero_message_hash,
793 			       SHA384_DIGEST_SIZE);
794 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
795 			memcpy(areq->result, sha512_zero_message_hash,
796 			       SHA512_DIGEST_SIZE);
797 		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
798 			memcpy(areq->result,
799 			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
800 		}
801 
802 		return 0;
803 	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
804 			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
805 			    req->len == sizeof(u32) && !areq->nbytes)) {
806 		/* Zero length CRC32 */
807 		memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
808 		return 0;
809 	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
810 			    !areq->nbytes)) {
811 		/* Zero length CBC MAC */
812 		memset(areq->result, 0, AES_BLOCK_SIZE);
813 		return 0;
814 	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
815 			    !areq->nbytes)) {
816 		/* Zero length (X)CBC/CMAC */
817 		int i;
818 
819 		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
820 			u32 *result = (void *)areq->result;
821 
822 			/* K3 */
823 			result[i] = swab(ctx->base.ipad.word[i + 4]);
824 		}
825 		areq->result[0] ^= 0x80;			// 10- padding
826 		crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
827 		return 0;
828 	} else if (unlikely(req->hmac &&
829 			    (req->len == req->block_sz) &&
830 			    !areq->nbytes)) {
831 		/*
832 		 * If we have an overall 0 length *HMAC* request:
833 		 * For HMAC, we need to finalize the inner digest
834 		 * and then perform the outer hash.
835 		 */
836 
837 		/* generate pad block in the cache */
838 		/* start with a hash block of all zeroes */
839 		memset(req->cache, 0, req->block_sz);
840 		/* set the first byte to 0x80 to 'append a 1 bit' */
841 		req->cache[0] = 0x80;
842 		/* add the length in bits in the last 2 bytes */
843 		if (req->len_is_le) {
844 			/* Little endian length word (e.g. MD5) */
845 			req->cache[req->block_sz-8] = (req->block_sz << 3) &
846 						      255;
847 			req->cache[req->block_sz-7] = (req->block_sz >> 5);
848 		} else {
849 			/* Big endian length word (e.g. any SHA) */
850 			req->cache[req->block_sz-2] = (req->block_sz >> 5);
851 			req->cache[req->block_sz-1] = (req->block_sz << 3) &
852 						      255;
853 		}
854 
855 		req->len += req->block_sz; /* plus 1 hash block */
856 
857 		/* Set special zero-length HMAC flag */
858 		req->hmac_zlen = true;
859 
860 		/* Finalize HMAC */
861 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
862 	} else if (req->hmac) {
863 		/* Finalize HMAC */
864 		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
865 	}
866 
867 	return safexcel_ahash_enqueue(areq);
868 }
869 
870 static int safexcel_ahash_finup(struct ahash_request *areq)
871 {
872 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
873 
874 	req->finish = true;
875 
876 	safexcel_ahash_update(areq);
877 	return safexcel_ahash_final(areq);
878 }
879 
880 static int safexcel_ahash_export(struct ahash_request *areq, void *out)
881 {
882 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
883 	struct safexcel_ahash_export_state *export = out;
884 
885 	export->len = req->len;
886 	export->processed = req->processed;
887 
888 	export->digest = req->digest;
889 
890 	memcpy(export->state, req->state, req->state_sz);
891 	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
892 
893 	return 0;
894 }
895 
896 static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
897 {
898 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
899 	const struct safexcel_ahash_export_state *export = in;
900 	int ret;
901 
902 	ret = crypto_ahash_init(areq);
903 	if (ret)
904 		return ret;
905 
906 	req->len = export->len;
907 	req->processed = export->processed;
908 
909 	req->digest = export->digest;
910 
911 	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
912 	memcpy(req->state, export->state, req->state_sz);
913 
914 	return 0;
915 }
916 
917 static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
918 {
919 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
920 	struct safexcel_alg_template *tmpl =
921 		container_of(__crypto_ahash_alg(tfm->__crt_alg),
922 			     struct safexcel_alg_template, alg.ahash);
923 
924 	ctx->base.priv = tmpl->priv;
925 	ctx->base.send = safexcel_ahash_send;
926 	ctx->base.handle_result = safexcel_handle_result;
927 	ctx->fb_do_setkey = false;
928 
929 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
930 				 sizeof(struct safexcel_ahash_req));
931 	return 0;
932 }
933 
934 static int safexcel_sha1_init(struct ahash_request *areq)
935 {
936 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
937 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
938 
939 	memset(req, 0, sizeof(*req));
940 
941 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
942 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
943 	req->state_sz = SHA1_DIGEST_SIZE;
944 	req->digest_sz = SHA1_DIGEST_SIZE;
945 	req->block_sz = SHA1_BLOCK_SIZE;
946 
947 	return 0;
948 }
949 
950 static int safexcel_sha1_digest(struct ahash_request *areq)
951 {
952 	int ret = safexcel_sha1_init(areq);
953 
954 	if (ret)
955 		return ret;
956 
957 	return safexcel_ahash_finup(areq);
958 }
959 
960 static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
961 {
962 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
963 	struct safexcel_crypto_priv *priv = ctx->base.priv;
964 	int ret;
965 
966 	/* context not allocated, skip invalidation */
967 	if (!ctx->base.ctxr)
968 		return;
969 
970 	if (priv->flags & EIP197_TRC_CACHE) {
971 		ret = safexcel_ahash_exit_inv(tfm);
972 		if (ret)
973 			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
974 	} else {
975 		dma_pool_free(priv->context_pool, ctx->base.ctxr,
976 			      ctx->base.ctxr_dma);
977 	}
978 }
979 
980 struct safexcel_alg_template safexcel_alg_sha1 = {
981 	.type = SAFEXCEL_ALG_TYPE_AHASH,
982 	.algo_mask = SAFEXCEL_ALG_SHA1,
983 	.alg.ahash = {
984 		.init = safexcel_sha1_init,
985 		.update = safexcel_ahash_update,
986 		.final = safexcel_ahash_final,
987 		.finup = safexcel_ahash_finup,
988 		.digest = safexcel_sha1_digest,
989 		.export = safexcel_ahash_export,
990 		.import = safexcel_ahash_import,
991 		.halg = {
992 			.digestsize = SHA1_DIGEST_SIZE,
993 			.statesize = sizeof(struct safexcel_ahash_export_state),
994 			.base = {
995 				.cra_name = "sha1",
996 				.cra_driver_name = "safexcel-sha1",
997 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
998 				.cra_flags = CRYPTO_ALG_ASYNC |
999 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1000 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1001 				.cra_blocksize = SHA1_BLOCK_SIZE,
1002 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1003 				.cra_init = safexcel_ahash_cra_init,
1004 				.cra_exit = safexcel_ahash_cra_exit,
1005 				.cra_module = THIS_MODULE,
1006 			},
1007 		},
1008 	},
1009 };
1010 
1011 static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1012 {
1013 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1014 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1015 
1016 	memset(req, 0, sizeof(*req));
1017 
1018 	/* Start from ipad precompute */
1019 	memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1020 	/* Already processed the key^ipad part now! */
1021 	req->len	= SHA1_BLOCK_SIZE;
1022 	req->processed	= SHA1_BLOCK_SIZE;
1023 
1024 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1025 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1026 	req->state_sz = SHA1_DIGEST_SIZE;
1027 	req->digest_sz = SHA1_DIGEST_SIZE;
1028 	req->block_sz = SHA1_BLOCK_SIZE;
1029 	req->hmac = true;
1030 
1031 	return 0;
1032 }
1033 
1034 static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1035 {
1036 	int ret = safexcel_hmac_sha1_init(areq);
1037 
1038 	if (ret)
1039 		return ret;
1040 
1041 	return safexcel_ahash_finup(areq);
1042 }
1043 
1044 struct safexcel_ahash_result {
1045 	struct completion completion;
1046 	int error;
1047 };
1048 
1049 static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1050 {
1051 	struct safexcel_ahash_result *result = req->data;
1052 
1053 	if (error == -EINPROGRESS)
1054 		return;
1055 
1056 	result->error = error;
1057 	complete(&result->completion);
1058 }
1059 
1060 static int safexcel_hmac_init_pad(struct ahash_request *areq,
1061 				  unsigned int blocksize, const u8 *key,
1062 				  unsigned int keylen, u8 *ipad, u8 *opad)
1063 {
1064 	struct safexcel_ahash_result result;
1065 	struct scatterlist sg;
1066 	int ret, i;
1067 	u8 *keydup;
1068 
1069 	if (keylen <= blocksize) {
1070 		memcpy(ipad, key, keylen);
1071 	} else {
1072 		keydup = kmemdup(key, keylen, GFP_KERNEL);
1073 		if (!keydup)
1074 			return -ENOMEM;
1075 
1076 		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1077 					   safexcel_ahash_complete, &result);
1078 		sg_init_one(&sg, keydup, keylen);
1079 		ahash_request_set_crypt(areq, &sg, ipad, keylen);
1080 		init_completion(&result.completion);
1081 
1082 		ret = crypto_ahash_digest(areq);
1083 		if (ret == -EINPROGRESS || ret == -EBUSY) {
1084 			wait_for_completion_interruptible(&result.completion);
1085 			ret = result.error;
1086 		}
1087 
1088 		/* Avoid leaking */
1089 		kfree_sensitive(keydup);
1090 
1091 		if (ret)
1092 			return ret;
1093 
1094 		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1095 	}
1096 
1097 	memset(ipad + keylen, 0, blocksize - keylen);
1098 	memcpy(opad, ipad, blocksize);
1099 
1100 	for (i = 0; i < blocksize; i++) {
1101 		ipad[i] ^= HMAC_IPAD_VALUE;
1102 		opad[i] ^= HMAC_OPAD_VALUE;
1103 	}
1104 
1105 	return 0;
1106 }
1107 
1108 static int safexcel_hmac_init_iv(struct ahash_request *areq,
1109 				 unsigned int blocksize, u8 *pad, void *state)
1110 {
1111 	struct safexcel_ahash_result result;
1112 	struct safexcel_ahash_req *req;
1113 	struct scatterlist sg;
1114 	int ret;
1115 
1116 	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1117 				   safexcel_ahash_complete, &result);
1118 	sg_init_one(&sg, pad, blocksize);
1119 	ahash_request_set_crypt(areq, &sg, pad, blocksize);
1120 	init_completion(&result.completion);
1121 
1122 	ret = crypto_ahash_init(areq);
1123 	if (ret)
1124 		return ret;
1125 
1126 	req = ahash_request_ctx(areq);
1127 	req->hmac = true;
1128 	req->last_req = true;
1129 
1130 	ret = crypto_ahash_update(areq);
1131 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1132 		return ret;
1133 
1134 	wait_for_completion_interruptible(&result.completion);
1135 	if (result.error)
1136 		return result.error;
1137 
1138 	return crypto_ahash_export(areq, state);
1139 }
1140 
1141 static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1142 				  unsigned int keylen,
1143 				  void *istate, void *ostate)
1144 {
1145 	struct ahash_request *areq;
1146 	struct crypto_ahash *tfm;
1147 	unsigned int blocksize;
1148 	u8 *ipad, *opad;
1149 	int ret;
1150 
1151 	tfm = crypto_alloc_ahash(alg, 0, 0);
1152 	if (IS_ERR(tfm))
1153 		return PTR_ERR(tfm);
1154 
1155 	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1156 	if (!areq) {
1157 		ret = -ENOMEM;
1158 		goto free_ahash;
1159 	}
1160 
1161 	crypto_ahash_clear_flags(tfm, ~0);
1162 	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1163 
1164 	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1165 	if (!ipad) {
1166 		ret = -ENOMEM;
1167 		goto free_request;
1168 	}
1169 
1170 	opad = ipad + blocksize;
1171 
1172 	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1173 	if (ret)
1174 		goto free_ipad;
1175 
1176 	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1177 	if (ret)
1178 		goto free_ipad;
1179 
1180 	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1181 
1182 free_ipad:
1183 	kfree(ipad);
1184 free_request:
1185 	ahash_request_free(areq);
1186 free_ahash:
1187 	crypto_free_ahash(tfm);
1188 
1189 	return ret;
1190 }
1191 
1192 int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1193 			 unsigned int keylen, const char *alg,
1194 			 unsigned int state_sz)
1195 {
1196 	struct safexcel_crypto_priv *priv = base->priv;
1197 	struct safexcel_ahash_export_state istate, ostate;
1198 	int ret;
1199 
1200 	ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1201 	if (ret)
1202 		return ret;
1203 
1204 	if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1205 	    (memcmp(&base->ipad, istate.state, state_sz) ||
1206 	     memcmp(&base->opad, ostate.state, state_sz)))
1207 		base->needs_inv = true;
1208 
1209 	memcpy(&base->ipad, &istate.state, state_sz);
1210 	memcpy(&base->opad, &ostate.state, state_sz);
1211 
1212 	return 0;
1213 }
1214 
1215 static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1216 				    unsigned int keylen, const char *alg,
1217 				    unsigned int state_sz)
1218 {
1219 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1220 
1221 	return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1222 }
1223 
1224 static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1225 				     unsigned int keylen)
1226 {
1227 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1228 					SHA1_DIGEST_SIZE);
1229 }
1230 
1231 struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1232 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1233 	.algo_mask = SAFEXCEL_ALG_SHA1,
1234 	.alg.ahash = {
1235 		.init = safexcel_hmac_sha1_init,
1236 		.update = safexcel_ahash_update,
1237 		.final = safexcel_ahash_final,
1238 		.finup = safexcel_ahash_finup,
1239 		.digest = safexcel_hmac_sha1_digest,
1240 		.setkey = safexcel_hmac_sha1_setkey,
1241 		.export = safexcel_ahash_export,
1242 		.import = safexcel_ahash_import,
1243 		.halg = {
1244 			.digestsize = SHA1_DIGEST_SIZE,
1245 			.statesize = sizeof(struct safexcel_ahash_export_state),
1246 			.base = {
1247 				.cra_name = "hmac(sha1)",
1248 				.cra_driver_name = "safexcel-hmac-sha1",
1249 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1250 				.cra_flags = CRYPTO_ALG_ASYNC |
1251 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1252 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1253 				.cra_blocksize = SHA1_BLOCK_SIZE,
1254 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1255 				.cra_init = safexcel_ahash_cra_init,
1256 				.cra_exit = safexcel_ahash_cra_exit,
1257 				.cra_module = THIS_MODULE,
1258 			},
1259 		},
1260 	},
1261 };
1262 
1263 static int safexcel_sha256_init(struct ahash_request *areq)
1264 {
1265 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1266 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1267 
1268 	memset(req, 0, sizeof(*req));
1269 
1270 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1271 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1272 	req->state_sz = SHA256_DIGEST_SIZE;
1273 	req->digest_sz = SHA256_DIGEST_SIZE;
1274 	req->block_sz = SHA256_BLOCK_SIZE;
1275 
1276 	return 0;
1277 }
1278 
1279 static int safexcel_sha256_digest(struct ahash_request *areq)
1280 {
1281 	int ret = safexcel_sha256_init(areq);
1282 
1283 	if (ret)
1284 		return ret;
1285 
1286 	return safexcel_ahash_finup(areq);
1287 }
1288 
1289 struct safexcel_alg_template safexcel_alg_sha256 = {
1290 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1291 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1292 	.alg.ahash = {
1293 		.init = safexcel_sha256_init,
1294 		.update = safexcel_ahash_update,
1295 		.final = safexcel_ahash_final,
1296 		.finup = safexcel_ahash_finup,
1297 		.digest = safexcel_sha256_digest,
1298 		.export = safexcel_ahash_export,
1299 		.import = safexcel_ahash_import,
1300 		.halg = {
1301 			.digestsize = SHA256_DIGEST_SIZE,
1302 			.statesize = sizeof(struct safexcel_ahash_export_state),
1303 			.base = {
1304 				.cra_name = "sha256",
1305 				.cra_driver_name = "safexcel-sha256",
1306 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1307 				.cra_flags = CRYPTO_ALG_ASYNC |
1308 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1309 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1310 				.cra_blocksize = SHA256_BLOCK_SIZE,
1311 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1312 				.cra_init = safexcel_ahash_cra_init,
1313 				.cra_exit = safexcel_ahash_cra_exit,
1314 				.cra_module = THIS_MODULE,
1315 			},
1316 		},
1317 	},
1318 };
1319 
1320 static int safexcel_sha224_init(struct ahash_request *areq)
1321 {
1322 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1323 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1324 
1325 	memset(req, 0, sizeof(*req));
1326 
1327 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1328 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1329 	req->state_sz = SHA256_DIGEST_SIZE;
1330 	req->digest_sz = SHA256_DIGEST_SIZE;
1331 	req->block_sz = SHA256_BLOCK_SIZE;
1332 
1333 	return 0;
1334 }
1335 
1336 static int safexcel_sha224_digest(struct ahash_request *areq)
1337 {
1338 	int ret = safexcel_sha224_init(areq);
1339 
1340 	if (ret)
1341 		return ret;
1342 
1343 	return safexcel_ahash_finup(areq);
1344 }
1345 
1346 struct safexcel_alg_template safexcel_alg_sha224 = {
1347 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1348 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1349 	.alg.ahash = {
1350 		.init = safexcel_sha224_init,
1351 		.update = safexcel_ahash_update,
1352 		.final = safexcel_ahash_final,
1353 		.finup = safexcel_ahash_finup,
1354 		.digest = safexcel_sha224_digest,
1355 		.export = safexcel_ahash_export,
1356 		.import = safexcel_ahash_import,
1357 		.halg = {
1358 			.digestsize = SHA224_DIGEST_SIZE,
1359 			.statesize = sizeof(struct safexcel_ahash_export_state),
1360 			.base = {
1361 				.cra_name = "sha224",
1362 				.cra_driver_name = "safexcel-sha224",
1363 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1364 				.cra_flags = CRYPTO_ALG_ASYNC |
1365 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1366 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1367 				.cra_blocksize = SHA224_BLOCK_SIZE,
1368 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1369 				.cra_init = safexcel_ahash_cra_init,
1370 				.cra_exit = safexcel_ahash_cra_exit,
1371 				.cra_module = THIS_MODULE,
1372 			},
1373 		},
1374 	},
1375 };
1376 
1377 static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1378 				       unsigned int keylen)
1379 {
1380 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1381 					SHA256_DIGEST_SIZE);
1382 }
1383 
1384 static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1385 {
1386 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1387 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1388 
1389 	memset(req, 0, sizeof(*req));
1390 
1391 	/* Start from ipad precompute */
1392 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1393 	/* Already processed the key^ipad part now! */
1394 	req->len	= SHA256_BLOCK_SIZE;
1395 	req->processed	= SHA256_BLOCK_SIZE;
1396 
1397 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1398 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1399 	req->state_sz = SHA256_DIGEST_SIZE;
1400 	req->digest_sz = SHA256_DIGEST_SIZE;
1401 	req->block_sz = SHA256_BLOCK_SIZE;
1402 	req->hmac = true;
1403 
1404 	return 0;
1405 }
1406 
1407 static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1408 {
1409 	int ret = safexcel_hmac_sha224_init(areq);
1410 
1411 	if (ret)
1412 		return ret;
1413 
1414 	return safexcel_ahash_finup(areq);
1415 }
1416 
1417 struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1418 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1419 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1420 	.alg.ahash = {
1421 		.init = safexcel_hmac_sha224_init,
1422 		.update = safexcel_ahash_update,
1423 		.final = safexcel_ahash_final,
1424 		.finup = safexcel_ahash_finup,
1425 		.digest = safexcel_hmac_sha224_digest,
1426 		.setkey = safexcel_hmac_sha224_setkey,
1427 		.export = safexcel_ahash_export,
1428 		.import = safexcel_ahash_import,
1429 		.halg = {
1430 			.digestsize = SHA224_DIGEST_SIZE,
1431 			.statesize = sizeof(struct safexcel_ahash_export_state),
1432 			.base = {
1433 				.cra_name = "hmac(sha224)",
1434 				.cra_driver_name = "safexcel-hmac-sha224",
1435 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1436 				.cra_flags = CRYPTO_ALG_ASYNC |
1437 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1438 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1439 				.cra_blocksize = SHA224_BLOCK_SIZE,
1440 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1441 				.cra_init = safexcel_ahash_cra_init,
1442 				.cra_exit = safexcel_ahash_cra_exit,
1443 				.cra_module = THIS_MODULE,
1444 			},
1445 		},
1446 	},
1447 };
1448 
1449 static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1450 				     unsigned int keylen)
1451 {
1452 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1453 					SHA256_DIGEST_SIZE);
1454 }
1455 
1456 static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1457 {
1458 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1459 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1460 
1461 	memset(req, 0, sizeof(*req));
1462 
1463 	/* Start from ipad precompute */
1464 	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1465 	/* Already processed the key^ipad part now! */
1466 	req->len	= SHA256_BLOCK_SIZE;
1467 	req->processed	= SHA256_BLOCK_SIZE;
1468 
1469 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1470 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1471 	req->state_sz = SHA256_DIGEST_SIZE;
1472 	req->digest_sz = SHA256_DIGEST_SIZE;
1473 	req->block_sz = SHA256_BLOCK_SIZE;
1474 	req->hmac = true;
1475 
1476 	return 0;
1477 }
1478 
1479 static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1480 {
1481 	int ret = safexcel_hmac_sha256_init(areq);
1482 
1483 	if (ret)
1484 		return ret;
1485 
1486 	return safexcel_ahash_finup(areq);
1487 }
1488 
1489 struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1490 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1491 	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1492 	.alg.ahash = {
1493 		.init = safexcel_hmac_sha256_init,
1494 		.update = safexcel_ahash_update,
1495 		.final = safexcel_ahash_final,
1496 		.finup = safexcel_ahash_finup,
1497 		.digest = safexcel_hmac_sha256_digest,
1498 		.setkey = safexcel_hmac_sha256_setkey,
1499 		.export = safexcel_ahash_export,
1500 		.import = safexcel_ahash_import,
1501 		.halg = {
1502 			.digestsize = SHA256_DIGEST_SIZE,
1503 			.statesize = sizeof(struct safexcel_ahash_export_state),
1504 			.base = {
1505 				.cra_name = "hmac(sha256)",
1506 				.cra_driver_name = "safexcel-hmac-sha256",
1507 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1508 				.cra_flags = CRYPTO_ALG_ASYNC |
1509 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1510 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1511 				.cra_blocksize = SHA256_BLOCK_SIZE,
1512 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1513 				.cra_init = safexcel_ahash_cra_init,
1514 				.cra_exit = safexcel_ahash_cra_exit,
1515 				.cra_module = THIS_MODULE,
1516 			},
1517 		},
1518 	},
1519 };
1520 
1521 static int safexcel_sha512_init(struct ahash_request *areq)
1522 {
1523 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1524 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1525 
1526 	memset(req, 0, sizeof(*req));
1527 
1528 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1529 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1530 	req->state_sz = SHA512_DIGEST_SIZE;
1531 	req->digest_sz = SHA512_DIGEST_SIZE;
1532 	req->block_sz = SHA512_BLOCK_SIZE;
1533 
1534 	return 0;
1535 }
1536 
1537 static int safexcel_sha512_digest(struct ahash_request *areq)
1538 {
1539 	int ret = safexcel_sha512_init(areq);
1540 
1541 	if (ret)
1542 		return ret;
1543 
1544 	return safexcel_ahash_finup(areq);
1545 }
1546 
1547 struct safexcel_alg_template safexcel_alg_sha512 = {
1548 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1549 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1550 	.alg.ahash = {
1551 		.init = safexcel_sha512_init,
1552 		.update = safexcel_ahash_update,
1553 		.final = safexcel_ahash_final,
1554 		.finup = safexcel_ahash_finup,
1555 		.digest = safexcel_sha512_digest,
1556 		.export = safexcel_ahash_export,
1557 		.import = safexcel_ahash_import,
1558 		.halg = {
1559 			.digestsize = SHA512_DIGEST_SIZE,
1560 			.statesize = sizeof(struct safexcel_ahash_export_state),
1561 			.base = {
1562 				.cra_name = "sha512",
1563 				.cra_driver_name = "safexcel-sha512",
1564 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1565 				.cra_flags = CRYPTO_ALG_ASYNC |
1566 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1567 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1568 				.cra_blocksize = SHA512_BLOCK_SIZE,
1569 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1570 				.cra_init = safexcel_ahash_cra_init,
1571 				.cra_exit = safexcel_ahash_cra_exit,
1572 				.cra_module = THIS_MODULE,
1573 			},
1574 		},
1575 	},
1576 };
1577 
1578 static int safexcel_sha384_init(struct ahash_request *areq)
1579 {
1580 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1581 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1582 
1583 	memset(req, 0, sizeof(*req));
1584 
1585 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1586 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1587 	req->state_sz = SHA512_DIGEST_SIZE;
1588 	req->digest_sz = SHA512_DIGEST_SIZE;
1589 	req->block_sz = SHA512_BLOCK_SIZE;
1590 
1591 	return 0;
1592 }
1593 
1594 static int safexcel_sha384_digest(struct ahash_request *areq)
1595 {
1596 	int ret = safexcel_sha384_init(areq);
1597 
1598 	if (ret)
1599 		return ret;
1600 
1601 	return safexcel_ahash_finup(areq);
1602 }
1603 
1604 struct safexcel_alg_template safexcel_alg_sha384 = {
1605 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1606 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1607 	.alg.ahash = {
1608 		.init = safexcel_sha384_init,
1609 		.update = safexcel_ahash_update,
1610 		.final = safexcel_ahash_final,
1611 		.finup = safexcel_ahash_finup,
1612 		.digest = safexcel_sha384_digest,
1613 		.export = safexcel_ahash_export,
1614 		.import = safexcel_ahash_import,
1615 		.halg = {
1616 			.digestsize = SHA384_DIGEST_SIZE,
1617 			.statesize = sizeof(struct safexcel_ahash_export_state),
1618 			.base = {
1619 				.cra_name = "sha384",
1620 				.cra_driver_name = "safexcel-sha384",
1621 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1622 				.cra_flags = CRYPTO_ALG_ASYNC |
1623 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1624 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1625 				.cra_blocksize = SHA384_BLOCK_SIZE,
1626 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1627 				.cra_init = safexcel_ahash_cra_init,
1628 				.cra_exit = safexcel_ahash_cra_exit,
1629 				.cra_module = THIS_MODULE,
1630 			},
1631 		},
1632 	},
1633 };
1634 
1635 static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1636 				       unsigned int keylen)
1637 {
1638 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1639 					SHA512_DIGEST_SIZE);
1640 }
1641 
1642 static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1643 {
1644 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1645 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1646 
1647 	memset(req, 0, sizeof(*req));
1648 
1649 	/* Start from ipad precompute */
1650 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1651 	/* Already processed the key^ipad part now! */
1652 	req->len	= SHA512_BLOCK_SIZE;
1653 	req->processed	= SHA512_BLOCK_SIZE;
1654 
1655 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1656 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1657 	req->state_sz = SHA512_DIGEST_SIZE;
1658 	req->digest_sz = SHA512_DIGEST_SIZE;
1659 	req->block_sz = SHA512_BLOCK_SIZE;
1660 	req->hmac = true;
1661 
1662 	return 0;
1663 }
1664 
1665 static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1666 {
1667 	int ret = safexcel_hmac_sha512_init(areq);
1668 
1669 	if (ret)
1670 		return ret;
1671 
1672 	return safexcel_ahash_finup(areq);
1673 }
1674 
1675 struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1676 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1677 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1678 	.alg.ahash = {
1679 		.init = safexcel_hmac_sha512_init,
1680 		.update = safexcel_ahash_update,
1681 		.final = safexcel_ahash_final,
1682 		.finup = safexcel_ahash_finup,
1683 		.digest = safexcel_hmac_sha512_digest,
1684 		.setkey = safexcel_hmac_sha512_setkey,
1685 		.export = safexcel_ahash_export,
1686 		.import = safexcel_ahash_import,
1687 		.halg = {
1688 			.digestsize = SHA512_DIGEST_SIZE,
1689 			.statesize = sizeof(struct safexcel_ahash_export_state),
1690 			.base = {
1691 				.cra_name = "hmac(sha512)",
1692 				.cra_driver_name = "safexcel-hmac-sha512",
1693 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1694 				.cra_flags = CRYPTO_ALG_ASYNC |
1695 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1696 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1697 				.cra_blocksize = SHA512_BLOCK_SIZE,
1698 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1699 				.cra_init = safexcel_ahash_cra_init,
1700 				.cra_exit = safexcel_ahash_cra_exit,
1701 				.cra_module = THIS_MODULE,
1702 			},
1703 		},
1704 	},
1705 };
1706 
1707 static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1708 				       unsigned int keylen)
1709 {
1710 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1711 					SHA512_DIGEST_SIZE);
1712 }
1713 
1714 static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1715 {
1716 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1717 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1718 
1719 	memset(req, 0, sizeof(*req));
1720 
1721 	/* Start from ipad precompute */
1722 	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1723 	/* Already processed the key^ipad part now! */
1724 	req->len	= SHA512_BLOCK_SIZE;
1725 	req->processed	= SHA512_BLOCK_SIZE;
1726 
1727 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1728 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1729 	req->state_sz = SHA512_DIGEST_SIZE;
1730 	req->digest_sz = SHA512_DIGEST_SIZE;
1731 	req->block_sz = SHA512_BLOCK_SIZE;
1732 	req->hmac = true;
1733 
1734 	return 0;
1735 }
1736 
1737 static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1738 {
1739 	int ret = safexcel_hmac_sha384_init(areq);
1740 
1741 	if (ret)
1742 		return ret;
1743 
1744 	return safexcel_ahash_finup(areq);
1745 }
1746 
1747 struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1748 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1749 	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1750 	.alg.ahash = {
1751 		.init = safexcel_hmac_sha384_init,
1752 		.update = safexcel_ahash_update,
1753 		.final = safexcel_ahash_final,
1754 		.finup = safexcel_ahash_finup,
1755 		.digest = safexcel_hmac_sha384_digest,
1756 		.setkey = safexcel_hmac_sha384_setkey,
1757 		.export = safexcel_ahash_export,
1758 		.import = safexcel_ahash_import,
1759 		.halg = {
1760 			.digestsize = SHA384_DIGEST_SIZE,
1761 			.statesize = sizeof(struct safexcel_ahash_export_state),
1762 			.base = {
1763 				.cra_name = "hmac(sha384)",
1764 				.cra_driver_name = "safexcel-hmac-sha384",
1765 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1766 				.cra_flags = CRYPTO_ALG_ASYNC |
1767 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1768 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1769 				.cra_blocksize = SHA384_BLOCK_SIZE,
1770 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1771 				.cra_init = safexcel_ahash_cra_init,
1772 				.cra_exit = safexcel_ahash_cra_exit,
1773 				.cra_module = THIS_MODULE,
1774 			},
1775 		},
1776 	},
1777 };
1778 
1779 static int safexcel_md5_init(struct ahash_request *areq)
1780 {
1781 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1782 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1783 
1784 	memset(req, 0, sizeof(*req));
1785 
1786 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1787 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1788 	req->state_sz = MD5_DIGEST_SIZE;
1789 	req->digest_sz = MD5_DIGEST_SIZE;
1790 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1791 
1792 	return 0;
1793 }
1794 
1795 static int safexcel_md5_digest(struct ahash_request *areq)
1796 {
1797 	int ret = safexcel_md5_init(areq);
1798 
1799 	if (ret)
1800 		return ret;
1801 
1802 	return safexcel_ahash_finup(areq);
1803 }
1804 
1805 struct safexcel_alg_template safexcel_alg_md5 = {
1806 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1807 	.algo_mask = SAFEXCEL_ALG_MD5,
1808 	.alg.ahash = {
1809 		.init = safexcel_md5_init,
1810 		.update = safexcel_ahash_update,
1811 		.final = safexcel_ahash_final,
1812 		.finup = safexcel_ahash_finup,
1813 		.digest = safexcel_md5_digest,
1814 		.export = safexcel_ahash_export,
1815 		.import = safexcel_ahash_import,
1816 		.halg = {
1817 			.digestsize = MD5_DIGEST_SIZE,
1818 			.statesize = sizeof(struct safexcel_ahash_export_state),
1819 			.base = {
1820 				.cra_name = "md5",
1821 				.cra_driver_name = "safexcel-md5",
1822 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1823 				.cra_flags = CRYPTO_ALG_ASYNC |
1824 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1825 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1826 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1827 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1828 				.cra_init = safexcel_ahash_cra_init,
1829 				.cra_exit = safexcel_ahash_cra_exit,
1830 				.cra_module = THIS_MODULE,
1831 			},
1832 		},
1833 	},
1834 };
1835 
1836 static int safexcel_hmac_md5_init(struct ahash_request *areq)
1837 {
1838 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1839 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1840 
1841 	memset(req, 0, sizeof(*req));
1842 
1843 	/* Start from ipad precompute */
1844 	memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1845 	/* Already processed the key^ipad part now! */
1846 	req->len	= MD5_HMAC_BLOCK_SIZE;
1847 	req->processed	= MD5_HMAC_BLOCK_SIZE;
1848 
1849 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1850 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1851 	req->state_sz = MD5_DIGEST_SIZE;
1852 	req->digest_sz = MD5_DIGEST_SIZE;
1853 	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1854 	req->len_is_le = true; /* MD5 is little endian! ... */
1855 	req->hmac = true;
1856 
1857 	return 0;
1858 }
1859 
1860 static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1861 				     unsigned int keylen)
1862 {
1863 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1864 					MD5_DIGEST_SIZE);
1865 }
1866 
1867 static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1868 {
1869 	int ret = safexcel_hmac_md5_init(areq);
1870 
1871 	if (ret)
1872 		return ret;
1873 
1874 	return safexcel_ahash_finup(areq);
1875 }
1876 
1877 struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1878 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1879 	.algo_mask = SAFEXCEL_ALG_MD5,
1880 	.alg.ahash = {
1881 		.init = safexcel_hmac_md5_init,
1882 		.update = safexcel_ahash_update,
1883 		.final = safexcel_ahash_final,
1884 		.finup = safexcel_ahash_finup,
1885 		.digest = safexcel_hmac_md5_digest,
1886 		.setkey = safexcel_hmac_md5_setkey,
1887 		.export = safexcel_ahash_export,
1888 		.import = safexcel_ahash_import,
1889 		.halg = {
1890 			.digestsize = MD5_DIGEST_SIZE,
1891 			.statesize = sizeof(struct safexcel_ahash_export_state),
1892 			.base = {
1893 				.cra_name = "hmac(md5)",
1894 				.cra_driver_name = "safexcel-hmac-md5",
1895 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1896 				.cra_flags = CRYPTO_ALG_ASYNC |
1897 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1898 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1899 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1900 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1901 				.cra_init = safexcel_ahash_cra_init,
1902 				.cra_exit = safexcel_ahash_cra_exit,
1903 				.cra_module = THIS_MODULE,
1904 			},
1905 		},
1906 	},
1907 };
1908 
1909 static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1910 {
1911 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1912 	int ret = safexcel_ahash_cra_init(tfm);
1913 
1914 	/* Default 'key' is all zeroes */
1915 	memset(&ctx->base.ipad, 0, sizeof(u32));
1916 	return ret;
1917 }
1918 
1919 static int safexcel_crc32_init(struct ahash_request *areq)
1920 {
1921 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1922 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1923 
1924 	memset(req, 0, sizeof(*req));
1925 
1926 	/* Start from loaded key */
1927 	req->state[0]	= cpu_to_le32(~ctx->base.ipad.word[0]);
1928 	/* Set processed to non-zero to enable invalidation detection */
1929 	req->len	= sizeof(u32);
1930 	req->processed	= sizeof(u32);
1931 
1932 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1933 	req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1934 	req->state_sz = sizeof(u32);
1935 	req->digest_sz = sizeof(u32);
1936 	req->block_sz = sizeof(u32);
1937 
1938 	return 0;
1939 }
1940 
1941 static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1942 				 unsigned int keylen)
1943 {
1944 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1945 
1946 	if (keylen != sizeof(u32))
1947 		return -EINVAL;
1948 
1949 	memcpy(&ctx->base.ipad, key, sizeof(u32));
1950 	return 0;
1951 }
1952 
1953 static int safexcel_crc32_digest(struct ahash_request *areq)
1954 {
1955 	return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1956 }
1957 
1958 struct safexcel_alg_template safexcel_alg_crc32 = {
1959 	.type = SAFEXCEL_ALG_TYPE_AHASH,
1960 	.algo_mask = 0,
1961 	.alg.ahash = {
1962 		.init = safexcel_crc32_init,
1963 		.update = safexcel_ahash_update,
1964 		.final = safexcel_ahash_final,
1965 		.finup = safexcel_ahash_finup,
1966 		.digest = safexcel_crc32_digest,
1967 		.setkey = safexcel_crc32_setkey,
1968 		.export = safexcel_ahash_export,
1969 		.import = safexcel_ahash_import,
1970 		.halg = {
1971 			.digestsize = sizeof(u32),
1972 			.statesize = sizeof(struct safexcel_ahash_export_state),
1973 			.base = {
1974 				.cra_name = "crc32",
1975 				.cra_driver_name = "safexcel-crc32",
1976 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1977 				.cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1978 					     CRYPTO_ALG_ASYNC |
1979 					     CRYPTO_ALG_ALLOCATES_MEMORY |
1980 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1981 				.cra_blocksize = 1,
1982 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1983 				.cra_init = safexcel_crc32_cra_init,
1984 				.cra_exit = safexcel_ahash_cra_exit,
1985 				.cra_module = THIS_MODULE,
1986 			},
1987 		},
1988 	},
1989 };
1990 
1991 static int safexcel_cbcmac_init(struct ahash_request *areq)
1992 {
1993 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1994 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1995 
1996 	memset(req, 0, sizeof(*req));
1997 
1998 	/* Start from loaded keys */
1999 	memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2000 	/* Set processed to non-zero to enable invalidation detection */
2001 	req->len	= AES_BLOCK_SIZE;
2002 	req->processed	= AES_BLOCK_SIZE;
2003 
2004 	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
2005 	req->state_sz = ctx->key_sz;
2006 	req->digest_sz = AES_BLOCK_SIZE;
2007 	req->block_sz = AES_BLOCK_SIZE;
2008 	req->xcbcmac  = true;
2009 
2010 	return 0;
2011 }
2012 
2013 static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2014 				 unsigned int len)
2015 {
2016 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2017 	struct crypto_aes_ctx aes;
2018 	int ret, i;
2019 
2020 	ret = aes_expandkey(&aes, key, len);
2021 	if (ret)
2022 		return ret;
2023 
2024 	memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2025 	for (i = 0; i < len / sizeof(u32); i++)
2026 		ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2027 
2028 	if (len == AES_KEYSIZE_192) {
2029 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2030 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2031 	} else if (len == AES_KEYSIZE_256) {
2032 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2033 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2034 	} else {
2035 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2036 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2037 	}
2038 	ctx->cbcmac  = true;
2039 
2040 	memzero_explicit(&aes, sizeof(aes));
2041 	return 0;
2042 }
2043 
2044 static int safexcel_cbcmac_digest(struct ahash_request *areq)
2045 {
2046 	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2047 }
2048 
2049 struct safexcel_alg_template safexcel_alg_cbcmac = {
2050 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2051 	.algo_mask = 0,
2052 	.alg.ahash = {
2053 		.init = safexcel_cbcmac_init,
2054 		.update = safexcel_ahash_update,
2055 		.final = safexcel_ahash_final,
2056 		.finup = safexcel_ahash_finup,
2057 		.digest = safexcel_cbcmac_digest,
2058 		.setkey = safexcel_cbcmac_setkey,
2059 		.export = safexcel_ahash_export,
2060 		.import = safexcel_ahash_import,
2061 		.halg = {
2062 			.digestsize = AES_BLOCK_SIZE,
2063 			.statesize = sizeof(struct safexcel_ahash_export_state),
2064 			.base = {
2065 				.cra_name = "cbcmac(aes)",
2066 				.cra_driver_name = "safexcel-cbcmac-aes",
2067 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2068 				.cra_flags = CRYPTO_ALG_ASYNC |
2069 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2070 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2071 				.cra_blocksize = 1,
2072 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2073 				.cra_init = safexcel_ahash_cra_init,
2074 				.cra_exit = safexcel_ahash_cra_exit,
2075 				.cra_module = THIS_MODULE,
2076 			},
2077 		},
2078 	},
2079 };
2080 
2081 static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2082 				 unsigned int len)
2083 {
2084 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2085 	struct crypto_aes_ctx aes;
2086 	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2087 	int ret, i;
2088 
2089 	ret = aes_expandkey(&aes, key, len);
2090 	if (ret)
2091 		return ret;
2092 
2093 	/* precompute the XCBC key material */
2094 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2095 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2096 				CRYPTO_TFM_REQ_MASK);
2097 	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2098 	if (ret)
2099 		return ret;
2100 
2101 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2102 		"\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2103 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2104 		"\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2105 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2106 		"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2107 	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2108 		ctx->base.ipad.word[i] = swab(key_tmp[i]);
2109 
2110 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2111 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2112 				CRYPTO_TFM_REQ_MASK);
2113 	ret = crypto_cipher_setkey(ctx->kaes,
2114 				   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2115 				   AES_MIN_KEY_SIZE);
2116 	if (ret)
2117 		return ret;
2118 
2119 	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2120 	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2121 	ctx->cbcmac = false;
2122 
2123 	memzero_explicit(&aes, sizeof(aes));
2124 	return 0;
2125 }
2126 
2127 static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2128 {
2129 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2130 
2131 	safexcel_ahash_cra_init(tfm);
2132 	ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2133 	return PTR_ERR_OR_ZERO(ctx->kaes);
2134 }
2135 
2136 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2137 {
2138 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2139 
2140 	crypto_free_cipher(ctx->kaes);
2141 	safexcel_ahash_cra_exit(tfm);
2142 }
2143 
2144 struct safexcel_alg_template safexcel_alg_xcbcmac = {
2145 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2146 	.algo_mask = 0,
2147 	.alg.ahash = {
2148 		.init = safexcel_cbcmac_init,
2149 		.update = safexcel_ahash_update,
2150 		.final = safexcel_ahash_final,
2151 		.finup = safexcel_ahash_finup,
2152 		.digest = safexcel_cbcmac_digest,
2153 		.setkey = safexcel_xcbcmac_setkey,
2154 		.export = safexcel_ahash_export,
2155 		.import = safexcel_ahash_import,
2156 		.halg = {
2157 			.digestsize = AES_BLOCK_SIZE,
2158 			.statesize = sizeof(struct safexcel_ahash_export_state),
2159 			.base = {
2160 				.cra_name = "xcbc(aes)",
2161 				.cra_driver_name = "safexcel-xcbc-aes",
2162 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2163 				.cra_flags = CRYPTO_ALG_ASYNC |
2164 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2165 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2166 				.cra_blocksize = AES_BLOCK_SIZE,
2167 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2168 				.cra_init = safexcel_xcbcmac_cra_init,
2169 				.cra_exit = safexcel_xcbcmac_cra_exit,
2170 				.cra_module = THIS_MODULE,
2171 			},
2172 		},
2173 	},
2174 };
2175 
2176 static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2177 				unsigned int len)
2178 {
2179 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2180 	struct crypto_aes_ctx aes;
2181 	__be64 consts[4];
2182 	u64 _const[2];
2183 	u8 msb_mask, gfmask;
2184 	int ret, i;
2185 
2186 	ret = aes_expandkey(&aes, key, len);
2187 	if (ret)
2188 		return ret;
2189 
2190 	for (i = 0; i < len / sizeof(u32); i++)
2191 		ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
2192 
2193 	/* precompute the CMAC key material */
2194 	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2195 	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2196 				CRYPTO_TFM_REQ_MASK);
2197 	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2198 	if (ret)
2199 		return ret;
2200 
2201 	/* code below borrowed from crypto/cmac.c */
2202 	/* encrypt the zero block */
2203 	memset(consts, 0, AES_BLOCK_SIZE);
2204 	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2205 
2206 	gfmask = 0x87;
2207 	_const[0] = be64_to_cpu(consts[1]);
2208 	_const[1] = be64_to_cpu(consts[0]);
2209 
2210 	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2211 	for (i = 0; i < 4; i += 2) {
2212 		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2213 		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2214 		_const[0] = (_const[0] << 1) ^ msb_mask;
2215 
2216 		consts[i + 0] = cpu_to_be64(_const[1]);
2217 		consts[i + 1] = cpu_to_be64(_const[0]);
2218 	}
2219 	/* end of code borrowed from crypto/cmac.c */
2220 
2221 	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2222 		ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2223 
2224 	if (len == AES_KEYSIZE_192) {
2225 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2226 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2227 	} else if (len == AES_KEYSIZE_256) {
2228 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2229 		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2230 	} else {
2231 		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2232 		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2233 	}
2234 	ctx->cbcmac = false;
2235 
2236 	memzero_explicit(&aes, sizeof(aes));
2237 	return 0;
2238 }
2239 
2240 struct safexcel_alg_template safexcel_alg_cmac = {
2241 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2242 	.algo_mask = 0,
2243 	.alg.ahash = {
2244 		.init = safexcel_cbcmac_init,
2245 		.update = safexcel_ahash_update,
2246 		.final = safexcel_ahash_final,
2247 		.finup = safexcel_ahash_finup,
2248 		.digest = safexcel_cbcmac_digest,
2249 		.setkey = safexcel_cmac_setkey,
2250 		.export = safexcel_ahash_export,
2251 		.import = safexcel_ahash_import,
2252 		.halg = {
2253 			.digestsize = AES_BLOCK_SIZE,
2254 			.statesize = sizeof(struct safexcel_ahash_export_state),
2255 			.base = {
2256 				.cra_name = "cmac(aes)",
2257 				.cra_driver_name = "safexcel-cmac-aes",
2258 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2259 				.cra_flags = CRYPTO_ALG_ASYNC |
2260 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2261 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2262 				.cra_blocksize = AES_BLOCK_SIZE,
2263 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2264 				.cra_init = safexcel_xcbcmac_cra_init,
2265 				.cra_exit = safexcel_xcbcmac_cra_exit,
2266 				.cra_module = THIS_MODULE,
2267 			},
2268 		},
2269 	},
2270 };
2271 
2272 static int safexcel_sm3_init(struct ahash_request *areq)
2273 {
2274 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2275 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2276 
2277 	memset(req, 0, sizeof(*req));
2278 
2279 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2280 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2281 	req->state_sz = SM3_DIGEST_SIZE;
2282 	req->digest_sz = SM3_DIGEST_SIZE;
2283 	req->block_sz = SM3_BLOCK_SIZE;
2284 
2285 	return 0;
2286 }
2287 
2288 static int safexcel_sm3_digest(struct ahash_request *areq)
2289 {
2290 	int ret = safexcel_sm3_init(areq);
2291 
2292 	if (ret)
2293 		return ret;
2294 
2295 	return safexcel_ahash_finup(areq);
2296 }
2297 
2298 struct safexcel_alg_template safexcel_alg_sm3 = {
2299 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2300 	.algo_mask = SAFEXCEL_ALG_SM3,
2301 	.alg.ahash = {
2302 		.init = safexcel_sm3_init,
2303 		.update = safexcel_ahash_update,
2304 		.final = safexcel_ahash_final,
2305 		.finup = safexcel_ahash_finup,
2306 		.digest = safexcel_sm3_digest,
2307 		.export = safexcel_ahash_export,
2308 		.import = safexcel_ahash_import,
2309 		.halg = {
2310 			.digestsize = SM3_DIGEST_SIZE,
2311 			.statesize = sizeof(struct safexcel_ahash_export_state),
2312 			.base = {
2313 				.cra_name = "sm3",
2314 				.cra_driver_name = "safexcel-sm3",
2315 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2316 				.cra_flags = CRYPTO_ALG_ASYNC |
2317 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2318 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2319 				.cra_blocksize = SM3_BLOCK_SIZE,
2320 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2321 				.cra_init = safexcel_ahash_cra_init,
2322 				.cra_exit = safexcel_ahash_cra_exit,
2323 				.cra_module = THIS_MODULE,
2324 			},
2325 		},
2326 	},
2327 };
2328 
2329 static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2330 				    unsigned int keylen)
2331 {
2332 	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2333 					SM3_DIGEST_SIZE);
2334 }
2335 
2336 static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2337 {
2338 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2339 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2340 
2341 	memset(req, 0, sizeof(*req));
2342 
2343 	/* Start from ipad precompute */
2344 	memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2345 	/* Already processed the key^ipad part now! */
2346 	req->len	= SM3_BLOCK_SIZE;
2347 	req->processed	= SM3_BLOCK_SIZE;
2348 
2349 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2350 	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2351 	req->state_sz = SM3_DIGEST_SIZE;
2352 	req->digest_sz = SM3_DIGEST_SIZE;
2353 	req->block_sz = SM3_BLOCK_SIZE;
2354 	req->hmac = true;
2355 
2356 	return 0;
2357 }
2358 
2359 static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2360 {
2361 	int ret = safexcel_hmac_sm3_init(areq);
2362 
2363 	if (ret)
2364 		return ret;
2365 
2366 	return safexcel_ahash_finup(areq);
2367 }
2368 
2369 struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2370 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2371 	.algo_mask = SAFEXCEL_ALG_SM3,
2372 	.alg.ahash = {
2373 		.init = safexcel_hmac_sm3_init,
2374 		.update = safexcel_ahash_update,
2375 		.final = safexcel_ahash_final,
2376 		.finup = safexcel_ahash_finup,
2377 		.digest = safexcel_hmac_sm3_digest,
2378 		.setkey = safexcel_hmac_sm3_setkey,
2379 		.export = safexcel_ahash_export,
2380 		.import = safexcel_ahash_import,
2381 		.halg = {
2382 			.digestsize = SM3_DIGEST_SIZE,
2383 			.statesize = sizeof(struct safexcel_ahash_export_state),
2384 			.base = {
2385 				.cra_name = "hmac(sm3)",
2386 				.cra_driver_name = "safexcel-hmac-sm3",
2387 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2388 				.cra_flags = CRYPTO_ALG_ASYNC |
2389 					     CRYPTO_ALG_ALLOCATES_MEMORY |
2390 					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2391 				.cra_blocksize = SM3_BLOCK_SIZE,
2392 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2393 				.cra_init = safexcel_ahash_cra_init,
2394 				.cra_exit = safexcel_ahash_cra_exit,
2395 				.cra_module = THIS_MODULE,
2396 			},
2397 		},
2398 	},
2399 };
2400 
2401 static int safexcel_sha3_224_init(struct ahash_request *areq)
2402 {
2403 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2404 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2405 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2406 
2407 	memset(req, 0, sizeof(*req));
2408 
2409 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2410 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2411 	req->state_sz = SHA3_224_DIGEST_SIZE;
2412 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2413 	req->block_sz = SHA3_224_BLOCK_SIZE;
2414 	ctx->do_fallback = false;
2415 	ctx->fb_init_done = false;
2416 	return 0;
2417 }
2418 
2419 static int safexcel_sha3_fbcheck(struct ahash_request *req)
2420 {
2421 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2422 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2423 	struct ahash_request *subreq = ahash_request_ctx(req);
2424 	int ret = 0;
2425 
2426 	if (ctx->do_fallback) {
2427 		ahash_request_set_tfm(subreq, ctx->fback);
2428 		ahash_request_set_callback(subreq, req->base.flags,
2429 					   req->base.complete, req->base.data);
2430 		ahash_request_set_crypt(subreq, req->src, req->result,
2431 					req->nbytes);
2432 		if (!ctx->fb_init_done) {
2433 			if (ctx->fb_do_setkey) {
2434 				/* Set fallback cipher HMAC key */
2435 				u8 key[SHA3_224_BLOCK_SIZE];
2436 
2437 				memcpy(key, &ctx->base.ipad,
2438 				       crypto_ahash_blocksize(ctx->fback) / 2);
2439 				memcpy(key +
2440 				       crypto_ahash_blocksize(ctx->fback) / 2,
2441 				       &ctx->base.opad,
2442 				       crypto_ahash_blocksize(ctx->fback) / 2);
2443 				ret = crypto_ahash_setkey(ctx->fback, key,
2444 					crypto_ahash_blocksize(ctx->fback));
2445 				memzero_explicit(key,
2446 					crypto_ahash_blocksize(ctx->fback));
2447 				ctx->fb_do_setkey = false;
2448 			}
2449 			ret = ret ?: crypto_ahash_init(subreq);
2450 			ctx->fb_init_done = true;
2451 		}
2452 	}
2453 	return ret;
2454 }
2455 
2456 static int safexcel_sha3_update(struct ahash_request *req)
2457 {
2458 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2459 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2460 	struct ahash_request *subreq = ahash_request_ctx(req);
2461 
2462 	ctx->do_fallback = true;
2463 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2464 }
2465 
2466 static int safexcel_sha3_final(struct ahash_request *req)
2467 {
2468 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2469 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2470 	struct ahash_request *subreq = ahash_request_ctx(req);
2471 
2472 	ctx->do_fallback = true;
2473 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2474 }
2475 
2476 static int safexcel_sha3_finup(struct ahash_request *req)
2477 {
2478 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2479 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2480 	struct ahash_request *subreq = ahash_request_ctx(req);
2481 
2482 	ctx->do_fallback |= !req->nbytes;
2483 	if (ctx->do_fallback)
2484 		/* Update or ex/import happened or len 0, cannot use the HW */
2485 		return safexcel_sha3_fbcheck(req) ?:
2486 		       crypto_ahash_finup(subreq);
2487 	else
2488 		return safexcel_ahash_finup(req);
2489 }
2490 
2491 static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2492 {
2493 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2494 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2495 	struct ahash_request *subreq = ahash_request_ctx(req);
2496 
2497 	ctx->do_fallback = true;
2498 	ctx->fb_init_done = false;
2499 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2500 }
2501 
2502 static int safexcel_sha3_224_digest(struct ahash_request *req)
2503 {
2504 	if (req->nbytes)
2505 		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2506 
2507 	/* HW cannot do zero length hash, use fallback instead */
2508 	return safexcel_sha3_digest_fallback(req);
2509 }
2510 
2511 static int safexcel_sha3_export(struct ahash_request *req, void *out)
2512 {
2513 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2514 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2515 	struct ahash_request *subreq = ahash_request_ctx(req);
2516 
2517 	ctx->do_fallback = true;
2518 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2519 }
2520 
2521 static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2522 {
2523 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2524 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2525 	struct ahash_request *subreq = ahash_request_ctx(req);
2526 
2527 	ctx->do_fallback = true;
2528 	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2529 	// return safexcel_ahash_import(req, in);
2530 }
2531 
2532 static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2533 {
2534 	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2535 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2536 
2537 	safexcel_ahash_cra_init(tfm);
2538 
2539 	/* Allocate fallback implementation */
2540 	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2541 					CRYPTO_ALG_ASYNC |
2542 					CRYPTO_ALG_NEED_FALLBACK);
2543 	if (IS_ERR(ctx->fback))
2544 		return PTR_ERR(ctx->fback);
2545 
2546 	/* Update statesize from fallback algorithm! */
2547 	crypto_hash_alg_common(ahash)->statesize =
2548 		crypto_ahash_statesize(ctx->fback);
2549 	crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2550 					    sizeof(struct ahash_request) +
2551 					    crypto_ahash_reqsize(ctx->fback)));
2552 	return 0;
2553 }
2554 
2555 static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2556 {
2557 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2558 
2559 	crypto_free_ahash(ctx->fback);
2560 	safexcel_ahash_cra_exit(tfm);
2561 }
2562 
2563 struct safexcel_alg_template safexcel_alg_sha3_224 = {
2564 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2565 	.algo_mask = SAFEXCEL_ALG_SHA3,
2566 	.alg.ahash = {
2567 		.init = safexcel_sha3_224_init,
2568 		.update = safexcel_sha3_update,
2569 		.final = safexcel_sha3_final,
2570 		.finup = safexcel_sha3_finup,
2571 		.digest = safexcel_sha3_224_digest,
2572 		.export = safexcel_sha3_export,
2573 		.import = safexcel_sha3_import,
2574 		.halg = {
2575 			.digestsize = SHA3_224_DIGEST_SIZE,
2576 			.statesize = sizeof(struct safexcel_ahash_export_state),
2577 			.base = {
2578 				.cra_name = "sha3-224",
2579 				.cra_driver_name = "safexcel-sha3-224",
2580 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2581 				.cra_flags = CRYPTO_ALG_ASYNC |
2582 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2583 					     CRYPTO_ALG_NEED_FALLBACK,
2584 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2585 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2586 				.cra_init = safexcel_sha3_cra_init,
2587 				.cra_exit = safexcel_sha3_cra_exit,
2588 				.cra_module = THIS_MODULE,
2589 			},
2590 		},
2591 	},
2592 };
2593 
2594 static int safexcel_sha3_256_init(struct ahash_request *areq)
2595 {
2596 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2597 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2598 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2599 
2600 	memset(req, 0, sizeof(*req));
2601 
2602 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2603 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2604 	req->state_sz = SHA3_256_DIGEST_SIZE;
2605 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2606 	req->block_sz = SHA3_256_BLOCK_SIZE;
2607 	ctx->do_fallback = false;
2608 	ctx->fb_init_done = false;
2609 	return 0;
2610 }
2611 
2612 static int safexcel_sha3_256_digest(struct ahash_request *req)
2613 {
2614 	if (req->nbytes)
2615 		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2616 
2617 	/* HW cannot do zero length hash, use fallback instead */
2618 	return safexcel_sha3_digest_fallback(req);
2619 }
2620 
2621 struct safexcel_alg_template safexcel_alg_sha3_256 = {
2622 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2623 	.algo_mask = SAFEXCEL_ALG_SHA3,
2624 	.alg.ahash = {
2625 		.init = safexcel_sha3_256_init,
2626 		.update = safexcel_sha3_update,
2627 		.final = safexcel_sha3_final,
2628 		.finup = safexcel_sha3_finup,
2629 		.digest = safexcel_sha3_256_digest,
2630 		.export = safexcel_sha3_export,
2631 		.import = safexcel_sha3_import,
2632 		.halg = {
2633 			.digestsize = SHA3_256_DIGEST_SIZE,
2634 			.statesize = sizeof(struct safexcel_ahash_export_state),
2635 			.base = {
2636 				.cra_name = "sha3-256",
2637 				.cra_driver_name = "safexcel-sha3-256",
2638 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2639 				.cra_flags = CRYPTO_ALG_ASYNC |
2640 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2641 					     CRYPTO_ALG_NEED_FALLBACK,
2642 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2643 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2644 				.cra_init = safexcel_sha3_cra_init,
2645 				.cra_exit = safexcel_sha3_cra_exit,
2646 				.cra_module = THIS_MODULE,
2647 			},
2648 		},
2649 	},
2650 };
2651 
2652 static int safexcel_sha3_384_init(struct ahash_request *areq)
2653 {
2654 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2655 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2656 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2657 
2658 	memset(req, 0, sizeof(*req));
2659 
2660 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2661 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2662 	req->state_sz = SHA3_384_DIGEST_SIZE;
2663 	req->digest_sz = SHA3_384_DIGEST_SIZE;
2664 	req->block_sz = SHA3_384_BLOCK_SIZE;
2665 	ctx->do_fallback = false;
2666 	ctx->fb_init_done = false;
2667 	return 0;
2668 }
2669 
2670 static int safexcel_sha3_384_digest(struct ahash_request *req)
2671 {
2672 	if (req->nbytes)
2673 		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2674 
2675 	/* HW cannot do zero length hash, use fallback instead */
2676 	return safexcel_sha3_digest_fallback(req);
2677 }
2678 
2679 struct safexcel_alg_template safexcel_alg_sha3_384 = {
2680 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2681 	.algo_mask = SAFEXCEL_ALG_SHA3,
2682 	.alg.ahash = {
2683 		.init = safexcel_sha3_384_init,
2684 		.update = safexcel_sha3_update,
2685 		.final = safexcel_sha3_final,
2686 		.finup = safexcel_sha3_finup,
2687 		.digest = safexcel_sha3_384_digest,
2688 		.export = safexcel_sha3_export,
2689 		.import = safexcel_sha3_import,
2690 		.halg = {
2691 			.digestsize = SHA3_384_DIGEST_SIZE,
2692 			.statesize = sizeof(struct safexcel_ahash_export_state),
2693 			.base = {
2694 				.cra_name = "sha3-384",
2695 				.cra_driver_name = "safexcel-sha3-384",
2696 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2697 				.cra_flags = CRYPTO_ALG_ASYNC |
2698 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2699 					     CRYPTO_ALG_NEED_FALLBACK,
2700 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2701 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2702 				.cra_init = safexcel_sha3_cra_init,
2703 				.cra_exit = safexcel_sha3_cra_exit,
2704 				.cra_module = THIS_MODULE,
2705 			},
2706 		},
2707 	},
2708 };
2709 
2710 static int safexcel_sha3_512_init(struct ahash_request *areq)
2711 {
2712 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2713 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2714 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2715 
2716 	memset(req, 0, sizeof(*req));
2717 
2718 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2719 	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2720 	req->state_sz = SHA3_512_DIGEST_SIZE;
2721 	req->digest_sz = SHA3_512_DIGEST_SIZE;
2722 	req->block_sz = SHA3_512_BLOCK_SIZE;
2723 	ctx->do_fallback = false;
2724 	ctx->fb_init_done = false;
2725 	return 0;
2726 }
2727 
2728 static int safexcel_sha3_512_digest(struct ahash_request *req)
2729 {
2730 	if (req->nbytes)
2731 		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2732 
2733 	/* HW cannot do zero length hash, use fallback instead */
2734 	return safexcel_sha3_digest_fallback(req);
2735 }
2736 
2737 struct safexcel_alg_template safexcel_alg_sha3_512 = {
2738 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2739 	.algo_mask = SAFEXCEL_ALG_SHA3,
2740 	.alg.ahash = {
2741 		.init = safexcel_sha3_512_init,
2742 		.update = safexcel_sha3_update,
2743 		.final = safexcel_sha3_final,
2744 		.finup = safexcel_sha3_finup,
2745 		.digest = safexcel_sha3_512_digest,
2746 		.export = safexcel_sha3_export,
2747 		.import = safexcel_sha3_import,
2748 		.halg = {
2749 			.digestsize = SHA3_512_DIGEST_SIZE,
2750 			.statesize = sizeof(struct safexcel_ahash_export_state),
2751 			.base = {
2752 				.cra_name = "sha3-512",
2753 				.cra_driver_name = "safexcel-sha3-512",
2754 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2755 				.cra_flags = CRYPTO_ALG_ASYNC |
2756 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2757 					     CRYPTO_ALG_NEED_FALLBACK,
2758 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2759 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2760 				.cra_init = safexcel_sha3_cra_init,
2761 				.cra_exit = safexcel_sha3_cra_exit,
2762 				.cra_module = THIS_MODULE,
2763 			},
2764 		},
2765 	},
2766 };
2767 
2768 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2769 {
2770 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2771 	int ret;
2772 
2773 	ret = safexcel_sha3_cra_init(tfm);
2774 	if (ret)
2775 		return ret;
2776 
2777 	/* Allocate precalc basic digest implementation */
2778 	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2779 	if (IS_ERR(ctx->shpre))
2780 		return PTR_ERR(ctx->shpre);
2781 
2782 	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2783 			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2784 	if (!ctx->shdesc) {
2785 		crypto_free_shash(ctx->shpre);
2786 		return -ENOMEM;
2787 	}
2788 	ctx->shdesc->tfm = ctx->shpre;
2789 	return 0;
2790 }
2791 
2792 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2793 {
2794 	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2795 
2796 	crypto_free_ahash(ctx->fback);
2797 	crypto_free_shash(ctx->shpre);
2798 	kfree(ctx->shdesc);
2799 	safexcel_ahash_cra_exit(tfm);
2800 }
2801 
2802 static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2803 				     unsigned int keylen)
2804 {
2805 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2806 	int ret = 0;
2807 
2808 	if (keylen > crypto_ahash_blocksize(tfm)) {
2809 		/*
2810 		 * If the key is larger than the blocksize, then hash it
2811 		 * first using our fallback cipher
2812 		 */
2813 		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2814 					  ctx->base.ipad.byte);
2815 		keylen = crypto_shash_digestsize(ctx->shpre);
2816 
2817 		/*
2818 		 * If the digest is larger than half the blocksize, we need to
2819 		 * move the rest to opad due to the way our HMAC infra works.
2820 		 */
2821 		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2822 			/* Buffers overlap, need to use memmove iso memcpy! */
2823 			memmove(&ctx->base.opad,
2824 				ctx->base.ipad.byte +
2825 					crypto_ahash_blocksize(tfm) / 2,
2826 				keylen - crypto_ahash_blocksize(tfm) / 2);
2827 	} else {
2828 		/*
2829 		 * Copy the key to our ipad & opad buffers
2830 		 * Note that ipad and opad each contain one half of the key,
2831 		 * to match the existing HMAC driver infrastructure.
2832 		 */
2833 		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2834 			memcpy(&ctx->base.ipad, key, keylen);
2835 		} else {
2836 			memcpy(&ctx->base.ipad, key,
2837 			       crypto_ahash_blocksize(tfm) / 2);
2838 			memcpy(&ctx->base.opad,
2839 			       key + crypto_ahash_blocksize(tfm) / 2,
2840 			       keylen - crypto_ahash_blocksize(tfm) / 2);
2841 		}
2842 	}
2843 
2844 	/* Pad key with zeroes */
2845 	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2846 		memset(ctx->base.ipad.byte + keylen, 0,
2847 		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2848 		memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2849 	} else {
2850 		memset(ctx->base.opad.byte + keylen -
2851 		       crypto_ahash_blocksize(tfm) / 2, 0,
2852 		       crypto_ahash_blocksize(tfm) - keylen);
2853 	}
2854 
2855 	/* If doing fallback, still need to set the new key! */
2856 	ctx->fb_do_setkey = true;
2857 	return ret;
2858 }
2859 
2860 static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2861 {
2862 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2863 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2864 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2865 
2866 	memset(req, 0, sizeof(*req));
2867 
2868 	/* Copy (half of) the key */
2869 	memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2870 	/* Start of HMAC should have len == processed == blocksize */
2871 	req->len	= SHA3_224_BLOCK_SIZE;
2872 	req->processed	= SHA3_224_BLOCK_SIZE;
2873 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2874 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2875 	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2876 	req->digest_sz = SHA3_224_DIGEST_SIZE;
2877 	req->block_sz = SHA3_224_BLOCK_SIZE;
2878 	req->hmac = true;
2879 	ctx->do_fallback = false;
2880 	ctx->fb_init_done = false;
2881 	return 0;
2882 }
2883 
2884 static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2885 {
2886 	if (req->nbytes)
2887 		return safexcel_hmac_sha3_224_init(req) ?:
2888 		       safexcel_ahash_finup(req);
2889 
2890 	/* HW cannot do zero length HMAC, use fallback instead */
2891 	return safexcel_sha3_digest_fallback(req);
2892 }
2893 
2894 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2895 {
2896 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2897 }
2898 
2899 struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2900 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2901 	.algo_mask = SAFEXCEL_ALG_SHA3,
2902 	.alg.ahash = {
2903 		.init = safexcel_hmac_sha3_224_init,
2904 		.update = safexcel_sha3_update,
2905 		.final = safexcel_sha3_final,
2906 		.finup = safexcel_sha3_finup,
2907 		.digest = safexcel_hmac_sha3_224_digest,
2908 		.setkey = safexcel_hmac_sha3_setkey,
2909 		.export = safexcel_sha3_export,
2910 		.import = safexcel_sha3_import,
2911 		.halg = {
2912 			.digestsize = SHA3_224_DIGEST_SIZE,
2913 			.statesize = sizeof(struct safexcel_ahash_export_state),
2914 			.base = {
2915 				.cra_name = "hmac(sha3-224)",
2916 				.cra_driver_name = "safexcel-hmac-sha3-224",
2917 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2918 				.cra_flags = CRYPTO_ALG_ASYNC |
2919 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2920 					     CRYPTO_ALG_NEED_FALLBACK,
2921 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2922 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2923 				.cra_init = safexcel_hmac_sha3_224_cra_init,
2924 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2925 				.cra_module = THIS_MODULE,
2926 			},
2927 		},
2928 	},
2929 };
2930 
2931 static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2932 {
2933 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2934 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2935 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2936 
2937 	memset(req, 0, sizeof(*req));
2938 
2939 	/* Copy (half of) the key */
2940 	memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2941 	/* Start of HMAC should have len == processed == blocksize */
2942 	req->len	= SHA3_256_BLOCK_SIZE;
2943 	req->processed	= SHA3_256_BLOCK_SIZE;
2944 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2945 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2946 	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2947 	req->digest_sz = SHA3_256_DIGEST_SIZE;
2948 	req->block_sz = SHA3_256_BLOCK_SIZE;
2949 	req->hmac = true;
2950 	ctx->do_fallback = false;
2951 	ctx->fb_init_done = false;
2952 	return 0;
2953 }
2954 
2955 static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2956 {
2957 	if (req->nbytes)
2958 		return safexcel_hmac_sha3_256_init(req) ?:
2959 		       safexcel_ahash_finup(req);
2960 
2961 	/* HW cannot do zero length HMAC, use fallback instead */
2962 	return safexcel_sha3_digest_fallback(req);
2963 }
2964 
2965 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2966 {
2967 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2968 }
2969 
2970 struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2971 	.type = SAFEXCEL_ALG_TYPE_AHASH,
2972 	.algo_mask = SAFEXCEL_ALG_SHA3,
2973 	.alg.ahash = {
2974 		.init = safexcel_hmac_sha3_256_init,
2975 		.update = safexcel_sha3_update,
2976 		.final = safexcel_sha3_final,
2977 		.finup = safexcel_sha3_finup,
2978 		.digest = safexcel_hmac_sha3_256_digest,
2979 		.setkey = safexcel_hmac_sha3_setkey,
2980 		.export = safexcel_sha3_export,
2981 		.import = safexcel_sha3_import,
2982 		.halg = {
2983 			.digestsize = SHA3_256_DIGEST_SIZE,
2984 			.statesize = sizeof(struct safexcel_ahash_export_state),
2985 			.base = {
2986 				.cra_name = "hmac(sha3-256)",
2987 				.cra_driver_name = "safexcel-hmac-sha3-256",
2988 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2989 				.cra_flags = CRYPTO_ALG_ASYNC |
2990 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2991 					     CRYPTO_ALG_NEED_FALLBACK,
2992 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2993 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2994 				.cra_init = safexcel_hmac_sha3_256_cra_init,
2995 				.cra_exit = safexcel_hmac_sha3_cra_exit,
2996 				.cra_module = THIS_MODULE,
2997 			},
2998 		},
2999 	},
3000 };
3001 
3002 static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3003 {
3004 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3005 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3006 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3007 
3008 	memset(req, 0, sizeof(*req));
3009 
3010 	/* Copy (half of) the key */
3011 	memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3012 	/* Start of HMAC should have len == processed == blocksize */
3013 	req->len	= SHA3_384_BLOCK_SIZE;
3014 	req->processed	= SHA3_384_BLOCK_SIZE;
3015 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3016 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3017 	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3018 	req->digest_sz = SHA3_384_DIGEST_SIZE;
3019 	req->block_sz = SHA3_384_BLOCK_SIZE;
3020 	req->hmac = true;
3021 	ctx->do_fallback = false;
3022 	ctx->fb_init_done = false;
3023 	return 0;
3024 }
3025 
3026 static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3027 {
3028 	if (req->nbytes)
3029 		return safexcel_hmac_sha3_384_init(req) ?:
3030 		       safexcel_ahash_finup(req);
3031 
3032 	/* HW cannot do zero length HMAC, use fallback instead */
3033 	return safexcel_sha3_digest_fallback(req);
3034 }
3035 
3036 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3037 {
3038 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3039 }
3040 
3041 struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3042 	.type = SAFEXCEL_ALG_TYPE_AHASH,
3043 	.algo_mask = SAFEXCEL_ALG_SHA3,
3044 	.alg.ahash = {
3045 		.init = safexcel_hmac_sha3_384_init,
3046 		.update = safexcel_sha3_update,
3047 		.final = safexcel_sha3_final,
3048 		.finup = safexcel_sha3_finup,
3049 		.digest = safexcel_hmac_sha3_384_digest,
3050 		.setkey = safexcel_hmac_sha3_setkey,
3051 		.export = safexcel_sha3_export,
3052 		.import = safexcel_sha3_import,
3053 		.halg = {
3054 			.digestsize = SHA3_384_DIGEST_SIZE,
3055 			.statesize = sizeof(struct safexcel_ahash_export_state),
3056 			.base = {
3057 				.cra_name = "hmac(sha3-384)",
3058 				.cra_driver_name = "safexcel-hmac-sha3-384",
3059 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3060 				.cra_flags = CRYPTO_ALG_ASYNC |
3061 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3062 					     CRYPTO_ALG_NEED_FALLBACK,
3063 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
3064 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3065 				.cra_init = safexcel_hmac_sha3_384_cra_init,
3066 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3067 				.cra_module = THIS_MODULE,
3068 			},
3069 		},
3070 	},
3071 };
3072 
3073 static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3074 {
3075 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3076 	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3077 	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3078 
3079 	memset(req, 0, sizeof(*req));
3080 
3081 	/* Copy (half of) the key */
3082 	memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3083 	/* Start of HMAC should have len == processed == blocksize */
3084 	req->len	= SHA3_512_BLOCK_SIZE;
3085 	req->processed	= SHA3_512_BLOCK_SIZE;
3086 	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3087 	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3088 	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3089 	req->digest_sz = SHA3_512_DIGEST_SIZE;
3090 	req->block_sz = SHA3_512_BLOCK_SIZE;
3091 	req->hmac = true;
3092 	ctx->do_fallback = false;
3093 	ctx->fb_init_done = false;
3094 	return 0;
3095 }
3096 
3097 static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3098 {
3099 	if (req->nbytes)
3100 		return safexcel_hmac_sha3_512_init(req) ?:
3101 		       safexcel_ahash_finup(req);
3102 
3103 	/* HW cannot do zero length HMAC, use fallback instead */
3104 	return safexcel_sha3_digest_fallback(req);
3105 }
3106 
3107 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3108 {
3109 	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3110 }
3111 struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3112 	.type = SAFEXCEL_ALG_TYPE_AHASH,
3113 	.algo_mask = SAFEXCEL_ALG_SHA3,
3114 	.alg.ahash = {
3115 		.init = safexcel_hmac_sha3_512_init,
3116 		.update = safexcel_sha3_update,
3117 		.final = safexcel_sha3_final,
3118 		.finup = safexcel_sha3_finup,
3119 		.digest = safexcel_hmac_sha3_512_digest,
3120 		.setkey = safexcel_hmac_sha3_setkey,
3121 		.export = safexcel_sha3_export,
3122 		.import = safexcel_sha3_import,
3123 		.halg = {
3124 			.digestsize = SHA3_512_DIGEST_SIZE,
3125 			.statesize = sizeof(struct safexcel_ahash_export_state),
3126 			.base = {
3127 				.cra_name = "hmac(sha3-512)",
3128 				.cra_driver_name = "safexcel-hmac-sha3-512",
3129 				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3130 				.cra_flags = CRYPTO_ALG_ASYNC |
3131 					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3132 					     CRYPTO_ALG_NEED_FALLBACK,
3133 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3134 				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3135 				.cra_init = safexcel_hmac_sha3_512_cra_init,
3136 				.cra_exit = safexcel_hmac_sha3_cra_exit,
3137 				.cra_module = THIS_MODULE,
3138 			},
3139 		},
3140 	},
3141 };
3142