1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * This file is part of STM32 Crypto driver for Linux.
4  *
5  * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6  * Author(s): Lionel DEBIEVE <lionel.debieve@st.com> for STMicroelectronics.
7  */
8 
9 #include <linux/clk.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/dmaengine.h>
13 #include <linux/interrupt.h>
14 #include <linux/io.h>
15 #include <linux/iopoll.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/of_device.h>
19 #include <linux/platform_device.h>
20 #include <linux/pm_runtime.h>
21 #include <linux/reset.h>
22 
23 #include <crypto/engine.h>
24 #include <crypto/hash.h>
25 #include <crypto/md5.h>
26 #include <crypto/scatterwalk.h>
27 #include <crypto/sha1.h>
28 #include <crypto/sha2.h>
29 #include <crypto/sha3.h>
30 #include <crypto/internal/hash.h>
31 
32 #define HASH_CR				0x00
33 #define HASH_DIN			0x04
34 #define HASH_STR			0x08
35 #define HASH_UX500_HREG(x)		(0x0c + ((x) * 0x04))
36 #define HASH_IMR			0x20
37 #define HASH_SR				0x24
38 #define HASH_CSR(x)			(0x0F8 + ((x) * 0x04))
39 #define HASH_HREG(x)			(0x310 + ((x) * 0x04))
40 #define HASH_HWCFGR			0x3F0
41 #define HASH_VER			0x3F4
42 #define HASH_ID				0x3F8
43 
44 /* Control Register */
45 #define HASH_CR_INIT			BIT(2)
46 #define HASH_CR_DMAE			BIT(3)
47 #define HASH_CR_DATATYPE_POS		4
48 #define HASH_CR_MODE			BIT(6)
49 #define HASH_CR_ALGO_POS		7
50 #define HASH_CR_MDMAT			BIT(13)
51 #define HASH_CR_DMAA			BIT(14)
52 #define HASH_CR_LKEY			BIT(16)
53 
54 /* Interrupt */
55 #define HASH_DINIE			BIT(0)
56 #define HASH_DCIE			BIT(1)
57 
58 /* Interrupt Mask */
59 #define HASH_MASK_CALC_COMPLETION	BIT(0)
60 #define HASH_MASK_DATA_INPUT		BIT(1)
61 
62 /* Status Flags */
63 #define HASH_SR_DATA_INPUT_READY	BIT(0)
64 #define HASH_SR_OUTPUT_READY		BIT(1)
65 #define HASH_SR_DMA_ACTIVE		BIT(2)
66 #define HASH_SR_BUSY			BIT(3)
67 
68 /* STR Register */
69 #define HASH_STR_NBLW_MASK		GENMASK(4, 0)
70 #define HASH_STR_DCAL			BIT(8)
71 
72 /* HWCFGR Register */
73 #define HASH_HWCFG_DMA_MASK		GENMASK(3, 0)
74 
75 /* Context swap register */
76 #define HASH_CSR_NB_SHA256_HMAC		54
77 #define HASH_CSR_NB_SHA256		38
78 #define HASH_CSR_NB_SHA512_HMAC		103
79 #define HASH_CSR_NB_SHA512		91
80 #define HASH_CSR_NB_SHA3_HMAC		88
81 #define HASH_CSR_NB_SHA3		72
82 #define HASH_CSR_NB_MAX			HASH_CSR_NB_SHA512_HMAC
83 
84 #define HASH_FLAGS_INIT			BIT(0)
85 #define HASH_FLAGS_OUTPUT_READY		BIT(1)
86 #define HASH_FLAGS_CPU			BIT(2)
87 #define HASH_FLAGS_DMA_ACTIVE		BIT(3)
88 #define HASH_FLAGS_HMAC_INIT		BIT(4)
89 #define HASH_FLAGS_HMAC_FINAL		BIT(5)
90 #define HASH_FLAGS_HMAC_KEY		BIT(6)
91 #define HASH_FLAGS_SHA3_MODE		BIT(7)
92 #define HASH_FLAGS_FINAL		BIT(15)
93 #define HASH_FLAGS_FINUP		BIT(16)
94 #define HASH_FLAGS_ALGO_MASK		GENMASK(20, 17)
95 #define HASH_FLAGS_ALGO_SHIFT		17
96 #define HASH_FLAGS_ERRORS		BIT(21)
97 #define HASH_FLAGS_EMPTY		BIT(22)
98 #define HASH_FLAGS_HMAC			BIT(23)
99 
100 #define HASH_OP_UPDATE			1
101 #define HASH_OP_FINAL			2
102 
103 #define HASH_BURST_LEVEL		4
104 
105 enum stm32_hash_data_format {
106 	HASH_DATA_32_BITS		= 0x0,
107 	HASH_DATA_16_BITS		= 0x1,
108 	HASH_DATA_8_BITS		= 0x2,
109 	HASH_DATA_1_BIT			= 0x3
110 };
111 
112 #define HASH_BUFLEN			(SHA3_224_BLOCK_SIZE + 4)
113 #define HASH_MAX_KEY_SIZE		(SHA512_BLOCK_SIZE * 8)
114 
115 enum stm32_hash_algo {
116 	HASH_SHA1			= 0,
117 	HASH_MD5			= 1,
118 	HASH_SHA224			= 2,
119 	HASH_SHA256			= 3,
120 	HASH_SHA3_224			= 4,
121 	HASH_SHA3_256			= 5,
122 	HASH_SHA3_384			= 6,
123 	HASH_SHA3_512			= 7,
124 	HASH_SHA384			= 12,
125 	HASH_SHA512			= 15,
126 };
127 
128 enum ux500_hash_algo {
129 	HASH_SHA256_UX500		= 0,
130 	HASH_SHA1_UX500			= 1,
131 };
132 
133 #define HASH_AUTOSUSPEND_DELAY		50
134 
135 struct stm32_hash_ctx {
136 	struct crypto_engine_ctx enginectx;
137 	struct stm32_hash_dev	*hdev;
138 	struct crypto_shash	*xtfm;
139 	unsigned long		flags;
140 
141 	u8			key[HASH_MAX_KEY_SIZE];
142 	int			keylen;
143 };
144 
145 struct stm32_hash_state {
146 	u32			flags;
147 
148 	u16			bufcnt;
149 	u16			blocklen;
150 
151 	u8 buffer[HASH_BUFLEN] __aligned(4);
152 
153 	/* hash state */
154 	u32			hw_context[3 + HASH_CSR_NB_MAX];
155 };
156 
157 struct stm32_hash_request_ctx {
158 	struct stm32_hash_dev	*hdev;
159 	unsigned long		op;
160 
161 	u8 digest[SHA512_DIGEST_SIZE] __aligned(sizeof(u32));
162 	size_t			digcnt;
163 
164 	/* DMA */
165 	struct scatterlist	*sg;
166 	unsigned int		offset;
167 	unsigned int		total;
168 	struct scatterlist	sg_key;
169 
170 	dma_addr_t		dma_addr;
171 	size_t			dma_ct;
172 	int			nents;
173 
174 	u8			data_type;
175 
176 	struct stm32_hash_state state;
177 };
178 
179 struct stm32_hash_algs_info {
180 	struct ahash_alg	*algs_list;
181 	size_t			size;
182 };
183 
184 struct stm32_hash_pdata {
185 	const int				alg_shift;
186 	const struct stm32_hash_algs_info	*algs_info;
187 	size_t					algs_info_size;
188 	bool					has_sr;
189 	bool					has_mdmat;
190 	bool					broken_emptymsg;
191 	bool					ux500;
192 };
193 
194 struct stm32_hash_dev {
195 	struct list_head	list;
196 	struct device		*dev;
197 	struct clk		*clk;
198 	struct reset_control	*rst;
199 	void __iomem		*io_base;
200 	phys_addr_t		phys_base;
201 	u32			dma_mode;
202 	bool			polled;
203 
204 	struct ahash_request	*req;
205 	struct crypto_engine	*engine;
206 
207 	unsigned long		flags;
208 
209 	struct dma_chan		*dma_lch;
210 	struct completion	dma_completion;
211 
212 	const struct stm32_hash_pdata	*pdata;
213 };
214 
215 struct stm32_hash_drv {
216 	struct list_head	dev_list;
217 	spinlock_t		lock; /* List protection access */
218 };
219 
220 static struct stm32_hash_drv stm32_hash = {
221 	.dev_list = LIST_HEAD_INIT(stm32_hash.dev_list),
222 	.lock = __SPIN_LOCK_UNLOCKED(stm32_hash.lock),
223 };
224 
225 static void stm32_hash_dma_callback(void *param);
226 
227 static inline u32 stm32_hash_read(struct stm32_hash_dev *hdev, u32 offset)
228 {
229 	return readl_relaxed(hdev->io_base + offset);
230 }
231 
232 static inline void stm32_hash_write(struct stm32_hash_dev *hdev,
233 				    u32 offset, u32 value)
234 {
235 	writel_relaxed(value, hdev->io_base + offset);
236 }
237 
238 static inline int stm32_hash_wait_busy(struct stm32_hash_dev *hdev)
239 {
240 	u32 status;
241 
242 	/* The Ux500 lacks the special status register, we poll the DCAL bit instead */
243 	if (!hdev->pdata->has_sr)
244 		return readl_relaxed_poll_timeout(hdev->io_base + HASH_STR, status,
245 						  !(status & HASH_STR_DCAL), 10, 10000);
246 
247 	return readl_relaxed_poll_timeout(hdev->io_base + HASH_SR, status,
248 				   !(status & HASH_SR_BUSY), 10, 10000);
249 }
250 
251 static void stm32_hash_set_nblw(struct stm32_hash_dev *hdev, int length)
252 {
253 	u32 reg;
254 
255 	reg = stm32_hash_read(hdev, HASH_STR);
256 	reg &= ~(HASH_STR_NBLW_MASK);
257 	reg |= (8U * ((length) % 4U));
258 	stm32_hash_write(hdev, HASH_STR, reg);
259 }
260 
261 static int stm32_hash_write_key(struct stm32_hash_dev *hdev)
262 {
263 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
264 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
265 	u32 reg;
266 	int keylen = ctx->keylen;
267 	void *key = ctx->key;
268 
269 	if (keylen) {
270 		stm32_hash_set_nblw(hdev, keylen);
271 
272 		while (keylen > 0) {
273 			stm32_hash_write(hdev, HASH_DIN, *(u32 *)key);
274 			keylen -= 4;
275 			key += 4;
276 		}
277 
278 		reg = stm32_hash_read(hdev, HASH_STR);
279 		reg |= HASH_STR_DCAL;
280 		stm32_hash_write(hdev, HASH_STR, reg);
281 
282 		return -EINPROGRESS;
283 	}
284 
285 	return 0;
286 }
287 
288 static void stm32_hash_write_ctrl(struct stm32_hash_dev *hdev)
289 {
290 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
291 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
292 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
293 	struct stm32_hash_state *state = &rctx->state;
294 	u32 alg = (state->flags & HASH_FLAGS_ALGO_MASK) >> HASH_FLAGS_ALGO_SHIFT;
295 
296 	u32 reg = HASH_CR_INIT;
297 
298 	if (!(hdev->flags & HASH_FLAGS_INIT)) {
299 		if (hdev->pdata->ux500) {
300 			reg |= ((alg & BIT(0)) << HASH_CR_ALGO_POS);
301 		} else {
302 			if (hdev->pdata->alg_shift == HASH_CR_ALGO_POS)
303 				reg |= ((alg & BIT(1)) << 17) |
304 				       ((alg & BIT(0)) << HASH_CR_ALGO_POS);
305 			else
306 				reg |= alg << hdev->pdata->alg_shift;
307 		}
308 
309 		reg |= (rctx->data_type << HASH_CR_DATATYPE_POS);
310 
311 		if (state->flags & HASH_FLAGS_HMAC) {
312 			hdev->flags |= HASH_FLAGS_HMAC;
313 			reg |= HASH_CR_MODE;
314 			if (ctx->keylen > crypto_ahash_blocksize(tfm))
315 				reg |= HASH_CR_LKEY;
316 		}
317 
318 		if (!hdev->polled)
319 			stm32_hash_write(hdev, HASH_IMR, HASH_DCIE);
320 
321 		stm32_hash_write(hdev, HASH_CR, reg);
322 
323 		hdev->flags |= HASH_FLAGS_INIT;
324 
325 		/*
326 		 * After first block + 1 words are fill up,
327 		 * we only need to fill 1 block to start partial computation
328 		 */
329 		rctx->state.blocklen -= sizeof(u32);
330 
331 		dev_dbg(hdev->dev, "Write Control %x\n", reg);
332 	}
333 }
334 
335 static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx)
336 {
337 	struct stm32_hash_state *state = &rctx->state;
338 	size_t count;
339 
340 	while ((state->bufcnt < state->blocklen) && rctx->total) {
341 		count = min(rctx->sg->length - rctx->offset, rctx->total);
342 		count = min_t(size_t, count, state->blocklen - state->bufcnt);
343 
344 		if (count <= 0) {
345 			if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
346 				rctx->sg = sg_next(rctx->sg);
347 				continue;
348 			} else {
349 				break;
350 			}
351 		}
352 
353 		scatterwalk_map_and_copy(state->buffer + state->bufcnt,
354 					 rctx->sg, rctx->offset, count, 0);
355 
356 		state->bufcnt += count;
357 		rctx->offset += count;
358 		rctx->total -= count;
359 
360 		if (rctx->offset == rctx->sg->length) {
361 			rctx->sg = sg_next(rctx->sg);
362 			if (rctx->sg)
363 				rctx->offset = 0;
364 			else
365 				rctx->total = 0;
366 		}
367 	}
368 }
369 
370 static int stm32_hash_xmit_cpu(struct stm32_hash_dev *hdev,
371 			       const u8 *buf, size_t length, int final)
372 {
373 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
374 	struct stm32_hash_state *state = &rctx->state;
375 	unsigned int count, len32;
376 	const u32 *buffer = (const u32 *)buf;
377 	u32 reg;
378 
379 	if (final) {
380 		hdev->flags |= HASH_FLAGS_FINAL;
381 
382 		/* Do not process empty messages if hw is buggy. */
383 		if (!(hdev->flags & HASH_FLAGS_INIT) && !length &&
384 		    hdev->pdata->broken_emptymsg) {
385 			state->flags |= HASH_FLAGS_EMPTY;
386 			return 0;
387 		}
388 	}
389 
390 	len32 = DIV_ROUND_UP(length, sizeof(u32));
391 
392 	dev_dbg(hdev->dev, "%s: length: %zd, final: %x len32 %i\n",
393 		__func__, length, final, len32);
394 
395 	hdev->flags |= HASH_FLAGS_CPU;
396 
397 	stm32_hash_write_ctrl(hdev);
398 
399 	if (stm32_hash_wait_busy(hdev))
400 		return -ETIMEDOUT;
401 
402 	if ((hdev->flags & HASH_FLAGS_HMAC) &&
403 	    (!(hdev->flags & HASH_FLAGS_HMAC_KEY))) {
404 		hdev->flags |= HASH_FLAGS_HMAC_KEY;
405 		stm32_hash_write_key(hdev);
406 		if (stm32_hash_wait_busy(hdev))
407 			return -ETIMEDOUT;
408 	}
409 
410 	for (count = 0; count < len32; count++)
411 		stm32_hash_write(hdev, HASH_DIN, buffer[count]);
412 
413 	if (final) {
414 		if (stm32_hash_wait_busy(hdev))
415 			return -ETIMEDOUT;
416 
417 		stm32_hash_set_nblw(hdev, length);
418 		reg = stm32_hash_read(hdev, HASH_STR);
419 		reg |= HASH_STR_DCAL;
420 		stm32_hash_write(hdev, HASH_STR, reg);
421 		if (hdev->flags & HASH_FLAGS_HMAC) {
422 			if (stm32_hash_wait_busy(hdev))
423 				return -ETIMEDOUT;
424 			stm32_hash_write_key(hdev);
425 		}
426 		return -EINPROGRESS;
427 	}
428 
429 	return 0;
430 }
431 
432 static int hash_swap_reg(struct stm32_hash_request_ctx *rctx)
433 {
434 	struct stm32_hash_state *state = &rctx->state;
435 
436 	switch ((state->flags & HASH_FLAGS_ALGO_MASK) >>
437 		HASH_FLAGS_ALGO_SHIFT) {
438 	case HASH_MD5:
439 	case HASH_SHA1:
440 	case HASH_SHA224:
441 	case HASH_SHA256:
442 		if (state->flags & HASH_FLAGS_HMAC)
443 			return HASH_CSR_NB_SHA256_HMAC;
444 		else
445 			return HASH_CSR_NB_SHA256;
446 		break;
447 
448 	case HASH_SHA384:
449 	case HASH_SHA512:
450 		if (state->flags & HASH_FLAGS_HMAC)
451 			return HASH_CSR_NB_SHA512_HMAC;
452 		else
453 			return HASH_CSR_NB_SHA512;
454 		break;
455 
456 	case HASH_SHA3_224:
457 	case HASH_SHA3_256:
458 	case HASH_SHA3_384:
459 	case HASH_SHA3_512:
460 		if (state->flags & HASH_FLAGS_HMAC)
461 			return HASH_CSR_NB_SHA3_HMAC;
462 		else
463 			return HASH_CSR_NB_SHA3;
464 		break;
465 
466 	default:
467 		return -EINVAL;
468 	}
469 }
470 
471 static int stm32_hash_update_cpu(struct stm32_hash_dev *hdev)
472 {
473 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
474 	struct stm32_hash_state *state = &rctx->state;
475 	u32 *preg = state->hw_context;
476 	int bufcnt, err = 0, final;
477 	int i, swap_reg;
478 
479 	dev_dbg(hdev->dev, "%s flags %x\n", __func__, state->flags);
480 
481 	final = state->flags & HASH_FLAGS_FINAL;
482 
483 	while ((rctx->total >= state->blocklen) ||
484 	       (state->bufcnt + rctx->total >= state->blocklen)) {
485 		stm32_hash_append_sg(rctx);
486 		bufcnt = state->bufcnt;
487 		state->bufcnt = 0;
488 		err = stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 0);
489 		if (err)
490 			return err;
491 	}
492 
493 	stm32_hash_append_sg(rctx);
494 
495 	if (final) {
496 		bufcnt = state->bufcnt;
497 		state->bufcnt = 0;
498 		return stm32_hash_xmit_cpu(hdev, state->buffer, bufcnt, 1);
499 	}
500 
501 	if (!(hdev->flags & HASH_FLAGS_INIT))
502 		return 0;
503 
504 	if (stm32_hash_wait_busy(hdev))
505 		return -ETIMEDOUT;
506 
507 	swap_reg = hash_swap_reg(rctx);
508 
509 	if (!hdev->pdata->ux500)
510 		*preg++ = stm32_hash_read(hdev, HASH_IMR);
511 	*preg++ = stm32_hash_read(hdev, HASH_STR);
512 	*preg++ = stm32_hash_read(hdev, HASH_CR);
513 	for (i = 0; i < swap_reg; i++)
514 		*preg++ = stm32_hash_read(hdev, HASH_CSR(i));
515 
516 	state->flags |= HASH_FLAGS_INIT;
517 
518 	return err;
519 }
520 
521 static int stm32_hash_xmit_dma(struct stm32_hash_dev *hdev,
522 			       struct scatterlist *sg, int length, int mdma)
523 {
524 	struct dma_async_tx_descriptor *in_desc;
525 	dma_cookie_t cookie;
526 	u32 reg;
527 	int err;
528 
529 	in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
530 					  DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT |
531 					  DMA_CTRL_ACK);
532 	if (!in_desc) {
533 		dev_err(hdev->dev, "dmaengine_prep_slave error\n");
534 		return -ENOMEM;
535 	}
536 
537 	reinit_completion(&hdev->dma_completion);
538 	in_desc->callback = stm32_hash_dma_callback;
539 	in_desc->callback_param = hdev;
540 
541 	hdev->flags |= HASH_FLAGS_FINAL;
542 	hdev->flags |= HASH_FLAGS_DMA_ACTIVE;
543 
544 	reg = stm32_hash_read(hdev, HASH_CR);
545 
546 	if (hdev->pdata->has_mdmat) {
547 		if (mdma)
548 			reg |= HASH_CR_MDMAT;
549 		else
550 			reg &= ~HASH_CR_MDMAT;
551 	}
552 	reg |= HASH_CR_DMAE;
553 
554 	stm32_hash_write(hdev, HASH_CR, reg);
555 
556 	stm32_hash_set_nblw(hdev, length);
557 
558 	cookie = dmaengine_submit(in_desc);
559 	err = dma_submit_error(cookie);
560 	if (err)
561 		return -ENOMEM;
562 
563 	dma_async_issue_pending(hdev->dma_lch);
564 
565 	if (!wait_for_completion_timeout(&hdev->dma_completion,
566 					 msecs_to_jiffies(100)))
567 		err = -ETIMEDOUT;
568 
569 	if (dma_async_is_tx_complete(hdev->dma_lch, cookie,
570 				     NULL, NULL) != DMA_COMPLETE)
571 		err = -ETIMEDOUT;
572 
573 	if (err) {
574 		dev_err(hdev->dev, "DMA Error %i\n", err);
575 		dmaengine_terminate_all(hdev->dma_lch);
576 		return err;
577 	}
578 
579 	return -EINPROGRESS;
580 }
581 
582 static void stm32_hash_dma_callback(void *param)
583 {
584 	struct stm32_hash_dev *hdev = param;
585 
586 	complete(&hdev->dma_completion);
587 }
588 
589 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev *hdev)
590 {
591 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
592 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(hdev->req);
593 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
594 	int err;
595 
596 	if (ctx->keylen < rctx->state.blocklen || hdev->dma_mode == 1) {
597 		err = stm32_hash_write_key(hdev);
598 		if (stm32_hash_wait_busy(hdev))
599 			return -ETIMEDOUT;
600 	} else {
601 		if (!(hdev->flags & HASH_FLAGS_HMAC_KEY))
602 			sg_init_one(&rctx->sg_key, ctx->key,
603 				    ALIGN(ctx->keylen, sizeof(u32)));
604 
605 		rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1,
606 					  DMA_TO_DEVICE);
607 		if (rctx->dma_ct == 0) {
608 			dev_err(hdev->dev, "dma_map_sg error\n");
609 			return -ENOMEM;
610 		}
611 
612 		err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0);
613 
614 		dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE);
615 	}
616 
617 	return err;
618 }
619 
620 static int stm32_hash_dma_init(struct stm32_hash_dev *hdev)
621 {
622 	struct dma_slave_config dma_conf;
623 	struct dma_chan *chan;
624 	int err;
625 
626 	memset(&dma_conf, 0, sizeof(dma_conf));
627 
628 	dma_conf.direction = DMA_MEM_TO_DEV;
629 	dma_conf.dst_addr = hdev->phys_base + HASH_DIN;
630 	dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
631 	dma_conf.src_maxburst = HASH_BURST_LEVEL;
632 	dma_conf.dst_maxburst = HASH_BURST_LEVEL;
633 	dma_conf.device_fc = false;
634 
635 	chan = dma_request_chan(hdev->dev, "in");
636 	if (IS_ERR(chan))
637 		return PTR_ERR(chan);
638 
639 	hdev->dma_lch = chan;
640 
641 	err = dmaengine_slave_config(hdev->dma_lch, &dma_conf);
642 	if (err) {
643 		dma_release_channel(hdev->dma_lch);
644 		hdev->dma_lch = NULL;
645 		dev_err(hdev->dev, "Couldn't configure DMA slave.\n");
646 		return err;
647 	}
648 
649 	init_completion(&hdev->dma_completion);
650 
651 	return 0;
652 }
653 
654 static int stm32_hash_dma_send(struct stm32_hash_dev *hdev)
655 {
656 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
657 	u32 *buffer = (void *)rctx->state.buffer;
658 	struct scatterlist sg[1], *tsg;
659 	int err = 0, reg, ncp = 0;
660 	unsigned int i, len = 0, bufcnt = 0;
661 	bool is_last = false;
662 
663 	rctx->sg = hdev->req->src;
664 	rctx->total = hdev->req->nbytes;
665 
666 	rctx->nents = sg_nents(rctx->sg);
667 	if (rctx->nents < 0)
668 		return -EINVAL;
669 
670 	stm32_hash_write_ctrl(hdev);
671 
672 	if (hdev->flags & HASH_FLAGS_HMAC) {
673 		err = stm32_hash_hmac_dma_send(hdev);
674 		if (err != -EINPROGRESS)
675 			return err;
676 	}
677 
678 	for_each_sg(rctx->sg, tsg, rctx->nents, i) {
679 		sg[0] = *tsg;
680 		len = sg->length;
681 
682 		if (sg_is_last(sg) || (bufcnt + sg[0].length) >= rctx->total) {
683 			sg->length = rctx->total - bufcnt;
684 			is_last = true;
685 			if (hdev->dma_mode == 1) {
686 				len = (ALIGN(sg->length, 16) - 16);
687 
688 				ncp = sg_pcopy_to_buffer(
689 					rctx->sg, rctx->nents,
690 					rctx->state.buffer, sg->length - len,
691 					rctx->total - sg->length + len);
692 
693 				sg->length = len;
694 			} else {
695 				if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
696 					len = sg->length;
697 					sg->length = ALIGN(sg->length,
698 							   sizeof(u32));
699 				}
700 			}
701 		}
702 
703 		rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
704 					  DMA_TO_DEVICE);
705 		if (rctx->dma_ct == 0) {
706 			dev_err(hdev->dev, "dma_map_sg error\n");
707 			return -ENOMEM;
708 		}
709 
710 		err = stm32_hash_xmit_dma(hdev, sg, len, !is_last);
711 
712 		bufcnt += sg[0].length;
713 		dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
714 
715 		if (err == -ENOMEM)
716 			return err;
717 		if (is_last)
718 			break;
719 	}
720 
721 	if (hdev->dma_mode == 1) {
722 		if (stm32_hash_wait_busy(hdev))
723 			return -ETIMEDOUT;
724 		reg = stm32_hash_read(hdev, HASH_CR);
725 		reg &= ~HASH_CR_DMAE;
726 		reg |= HASH_CR_DMAA;
727 		stm32_hash_write(hdev, HASH_CR, reg);
728 
729 		if (ncp) {
730 			memset(buffer + ncp, 0,
731 			       DIV_ROUND_UP(ncp, sizeof(u32)) - ncp);
732 			writesl(hdev->io_base + HASH_DIN, buffer,
733 				DIV_ROUND_UP(ncp, sizeof(u32)));
734 		}
735 		stm32_hash_set_nblw(hdev, ncp);
736 		reg = stm32_hash_read(hdev, HASH_STR);
737 		reg |= HASH_STR_DCAL;
738 		stm32_hash_write(hdev, HASH_STR, reg);
739 		err = -EINPROGRESS;
740 	}
741 
742 	if (hdev->flags & HASH_FLAGS_HMAC) {
743 		if (stm32_hash_wait_busy(hdev))
744 			return -ETIMEDOUT;
745 		err = stm32_hash_hmac_dma_send(hdev);
746 	}
747 
748 	return err;
749 }
750 
751 static struct stm32_hash_dev *stm32_hash_find_dev(struct stm32_hash_ctx *ctx)
752 {
753 	struct stm32_hash_dev *hdev = NULL, *tmp;
754 
755 	spin_lock_bh(&stm32_hash.lock);
756 	if (!ctx->hdev) {
757 		list_for_each_entry(tmp, &stm32_hash.dev_list, list) {
758 			hdev = tmp;
759 			break;
760 		}
761 		ctx->hdev = hdev;
762 	} else {
763 		hdev = ctx->hdev;
764 	}
765 
766 	spin_unlock_bh(&stm32_hash.lock);
767 
768 	return hdev;
769 }
770 
771 static bool stm32_hash_dma_aligned_data(struct ahash_request *req)
772 {
773 	struct scatterlist *sg;
774 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
775 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
776 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
777 	int i;
778 
779 	if (!hdev->dma_lch || req->nbytes <= rctx->state.blocklen)
780 		return false;
781 
782 	if (sg_nents(req->src) > 1) {
783 		if (hdev->dma_mode == 1)
784 			return false;
785 		for_each_sg(req->src, sg, sg_nents(req->src), i) {
786 			if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
787 			    (!sg_is_last(sg)))
788 				return false;
789 		}
790 	}
791 
792 	if (req->src->offset % 4)
793 		return false;
794 
795 	return true;
796 }
797 
798 static int stm32_hash_init(struct ahash_request *req)
799 {
800 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
801 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
802 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
803 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
804 	struct stm32_hash_state *state = &rctx->state;
805 	bool sha3_mode = ctx->flags & HASH_FLAGS_SHA3_MODE;
806 
807 	rctx->hdev = hdev;
808 
809 	state->flags = HASH_FLAGS_CPU;
810 
811 	if (sha3_mode)
812 		state->flags |= HASH_FLAGS_SHA3_MODE;
813 
814 	rctx->digcnt = crypto_ahash_digestsize(tfm);
815 	switch (rctx->digcnt) {
816 	case MD5_DIGEST_SIZE:
817 		state->flags |= HASH_MD5 << HASH_FLAGS_ALGO_SHIFT;
818 		break;
819 	case SHA1_DIGEST_SIZE:
820 		if (hdev->pdata->ux500)
821 			state->flags |= HASH_SHA1_UX500 << HASH_FLAGS_ALGO_SHIFT;
822 		else
823 			state->flags |= HASH_SHA1 << HASH_FLAGS_ALGO_SHIFT;
824 		break;
825 	case SHA224_DIGEST_SIZE:
826 		if (sha3_mode)
827 			state->flags |= HASH_SHA3_224 << HASH_FLAGS_ALGO_SHIFT;
828 		else
829 			state->flags |= HASH_SHA224 << HASH_FLAGS_ALGO_SHIFT;
830 		break;
831 	case SHA256_DIGEST_SIZE:
832 		if (sha3_mode) {
833 			state->flags |= HASH_SHA3_256 << HASH_FLAGS_ALGO_SHIFT;
834 		} else {
835 			if (hdev->pdata->ux500)
836 				state->flags |= HASH_SHA256_UX500 << HASH_FLAGS_ALGO_SHIFT;
837 			else
838 				state->flags |= HASH_SHA256 << HASH_FLAGS_ALGO_SHIFT;
839 		}
840 		break;
841 	case SHA384_DIGEST_SIZE:
842 		if (sha3_mode)
843 			state->flags |= HASH_SHA3_384 << HASH_FLAGS_ALGO_SHIFT;
844 		else
845 			state->flags |= HASH_SHA384 << HASH_FLAGS_ALGO_SHIFT;
846 		break;
847 	case SHA512_DIGEST_SIZE:
848 		if (sha3_mode)
849 			state->flags |= HASH_SHA3_512 << HASH_FLAGS_ALGO_SHIFT;
850 		else
851 			state->flags |= HASH_SHA512 << HASH_FLAGS_ALGO_SHIFT;
852 		break;
853 	default:
854 		return -EINVAL;
855 	}
856 
857 	rctx->state.bufcnt = 0;
858 	rctx->state.blocklen = crypto_ahash_blocksize(tfm) + sizeof(u32);
859 	if (rctx->state.blocklen > HASH_BUFLEN) {
860 		dev_err(hdev->dev, "Error, block too large");
861 		return -EINVAL;
862 	}
863 	rctx->total = 0;
864 	rctx->offset = 0;
865 	rctx->data_type = HASH_DATA_8_BITS;
866 
867 	if (ctx->flags & HASH_FLAGS_HMAC)
868 		state->flags |= HASH_FLAGS_HMAC;
869 
870 	dev_dbg(hdev->dev, "%s Flags %x\n", __func__, state->flags);
871 
872 	return 0;
873 }
874 
875 static int stm32_hash_update_req(struct stm32_hash_dev *hdev)
876 {
877 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req);
878 	struct stm32_hash_state *state = &rctx->state;
879 
880 	if (!(state->flags & HASH_FLAGS_CPU))
881 		return stm32_hash_dma_send(hdev);
882 
883 	return stm32_hash_update_cpu(hdev);
884 }
885 
886 static int stm32_hash_final_req(struct stm32_hash_dev *hdev)
887 {
888 	struct ahash_request *req = hdev->req;
889 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
890 	struct stm32_hash_state *state = &rctx->state;
891 	int buflen = state->bufcnt;
892 
893 	if (state->flags & HASH_FLAGS_FINUP)
894 		return stm32_hash_update_req(hdev);
895 
896 	state->bufcnt = 0;
897 
898 	return stm32_hash_xmit_cpu(hdev, state->buffer, buflen, 1);
899 }
900 
901 static void stm32_hash_emptymsg_fallback(struct ahash_request *req)
902 {
903 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
904 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(ahash);
905 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
906 	struct stm32_hash_dev *hdev = rctx->hdev;
907 	int ret;
908 
909 	dev_dbg(hdev->dev, "use fallback message size 0 key size %d\n",
910 		ctx->keylen);
911 
912 	if (!ctx->xtfm) {
913 		dev_err(hdev->dev, "no fallback engine\n");
914 		return;
915 	}
916 
917 	if (ctx->keylen) {
918 		ret = crypto_shash_setkey(ctx->xtfm, ctx->key, ctx->keylen);
919 		if (ret) {
920 			dev_err(hdev->dev, "failed to set key ret=%d\n", ret);
921 			return;
922 		}
923 	}
924 
925 	ret = crypto_shash_tfm_digest(ctx->xtfm, NULL, 0, rctx->digest);
926 	if (ret)
927 		dev_err(hdev->dev, "shash digest error\n");
928 }
929 
930 static void stm32_hash_copy_hash(struct ahash_request *req)
931 {
932 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
933 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
934 	struct stm32_hash_state *state = &rctx->state;
935 	struct stm32_hash_dev *hdev = rctx->hdev;
936 	__be32 *hash = (void *)rctx->digest;
937 	unsigned int i, hashsize;
938 
939 	if (hdev->pdata->broken_emptymsg && (state->flags & HASH_FLAGS_EMPTY))
940 		return stm32_hash_emptymsg_fallback(req);
941 
942 	hashsize = crypto_ahash_digestsize(tfm);
943 
944 	for (i = 0; i < hashsize / sizeof(u32); i++) {
945 		if (hdev->pdata->ux500)
946 			hash[i] = cpu_to_be32(stm32_hash_read(hdev,
947 					      HASH_UX500_HREG(i)));
948 		else
949 			hash[i] = cpu_to_be32(stm32_hash_read(hdev,
950 					      HASH_HREG(i)));
951 	}
952 }
953 
954 static int stm32_hash_finish(struct ahash_request *req)
955 {
956 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
957 	u32 reg;
958 
959 	reg = stm32_hash_read(rctx->hdev, HASH_SR);
960 	reg &= ~HASH_SR_OUTPUT_READY;
961 	stm32_hash_write(rctx->hdev, HASH_SR, reg);
962 
963 	if (!req->result)
964 		return -EINVAL;
965 
966 	memcpy(req->result, rctx->digest, rctx->digcnt);
967 
968 	return 0;
969 }
970 
971 static void stm32_hash_finish_req(struct ahash_request *req, int err)
972 {
973 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
974 	struct stm32_hash_dev *hdev = rctx->hdev;
975 
976 	if (!err && (HASH_FLAGS_FINAL & hdev->flags)) {
977 		stm32_hash_copy_hash(req);
978 		err = stm32_hash_finish(req);
979 	}
980 
981 	pm_runtime_mark_last_busy(hdev->dev);
982 	pm_runtime_put_autosuspend(hdev->dev);
983 
984 	crypto_finalize_hash_request(hdev->engine, req, err);
985 }
986 
987 static int stm32_hash_handle_queue(struct stm32_hash_dev *hdev,
988 				   struct ahash_request *req)
989 {
990 	return crypto_transfer_hash_request_to_engine(hdev->engine, req);
991 }
992 
993 static int stm32_hash_one_request(struct crypto_engine *engine, void *areq)
994 {
995 	struct ahash_request *req = container_of(areq, struct ahash_request,
996 						 base);
997 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
998 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
999 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1000 	struct stm32_hash_state *state = &rctx->state;
1001 	int swap_reg;
1002 	int err = 0;
1003 
1004 	if (!hdev)
1005 		return -ENODEV;
1006 
1007 	dev_dbg(hdev->dev, "processing new req, op: %lu, nbytes %d\n",
1008 		rctx->op, req->nbytes);
1009 
1010 	pm_runtime_get_sync(hdev->dev);
1011 
1012 	hdev->req = req;
1013 	hdev->flags = 0;
1014 	swap_reg = hash_swap_reg(rctx);
1015 
1016 	if (state->flags & HASH_FLAGS_INIT) {
1017 		u32 *preg = rctx->state.hw_context;
1018 		u32 reg;
1019 		int i;
1020 
1021 		if (!hdev->pdata->ux500)
1022 			stm32_hash_write(hdev, HASH_IMR, *preg++);
1023 		stm32_hash_write(hdev, HASH_STR, *preg++);
1024 		stm32_hash_write(hdev, HASH_CR, *preg);
1025 		reg = *preg++ | HASH_CR_INIT;
1026 		stm32_hash_write(hdev, HASH_CR, reg);
1027 
1028 		for (i = 0; i < swap_reg; i++)
1029 			stm32_hash_write(hdev, HASH_CSR(i), *preg++);
1030 
1031 		hdev->flags |= HASH_FLAGS_INIT;
1032 
1033 		if (state->flags & HASH_FLAGS_HMAC)
1034 			hdev->flags |= HASH_FLAGS_HMAC |
1035 				       HASH_FLAGS_HMAC_KEY;
1036 	}
1037 
1038 	if (rctx->op == HASH_OP_UPDATE)
1039 		err = stm32_hash_update_req(hdev);
1040 	else if (rctx->op == HASH_OP_FINAL)
1041 		err = stm32_hash_final_req(hdev);
1042 
1043 	/* If we have an IRQ, wait for that, else poll for completion */
1044 	if (err == -EINPROGRESS && hdev->polled) {
1045 		if (stm32_hash_wait_busy(hdev))
1046 			err = -ETIMEDOUT;
1047 		else {
1048 			hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1049 			err = 0;
1050 		}
1051 	}
1052 
1053 	if (err != -EINPROGRESS)
1054 	/* done task will not finish it, so do it here */
1055 		stm32_hash_finish_req(req, err);
1056 
1057 	return 0;
1058 }
1059 
1060 static int stm32_hash_enqueue(struct ahash_request *req, unsigned int op)
1061 {
1062 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1063 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1064 	struct stm32_hash_dev *hdev = ctx->hdev;
1065 
1066 	rctx->op = op;
1067 
1068 	return stm32_hash_handle_queue(hdev, req);
1069 }
1070 
1071 static int stm32_hash_update(struct ahash_request *req)
1072 {
1073 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1074 	struct stm32_hash_state *state = &rctx->state;
1075 
1076 	if (!req->nbytes || !(state->flags & HASH_FLAGS_CPU))
1077 		return 0;
1078 
1079 	rctx->total = req->nbytes;
1080 	rctx->sg = req->src;
1081 	rctx->offset = 0;
1082 
1083 	if ((state->bufcnt + rctx->total < state->blocklen)) {
1084 		stm32_hash_append_sg(rctx);
1085 		return 0;
1086 	}
1087 
1088 	return stm32_hash_enqueue(req, HASH_OP_UPDATE);
1089 }
1090 
1091 static int stm32_hash_final(struct ahash_request *req)
1092 {
1093 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1094 	struct stm32_hash_state *state = &rctx->state;
1095 
1096 	state->flags |= HASH_FLAGS_FINAL;
1097 
1098 	return stm32_hash_enqueue(req, HASH_OP_FINAL);
1099 }
1100 
1101 static int stm32_hash_finup(struct ahash_request *req)
1102 {
1103 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1104 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
1105 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1106 	struct stm32_hash_state *state = &rctx->state;
1107 
1108 	if (!req->nbytes)
1109 		goto out;
1110 
1111 	state->flags |= HASH_FLAGS_FINUP;
1112 	rctx->total = req->nbytes;
1113 	rctx->sg = req->src;
1114 	rctx->offset = 0;
1115 
1116 	if (hdev->dma_lch && stm32_hash_dma_aligned_data(req))
1117 		state->flags &= ~HASH_FLAGS_CPU;
1118 
1119 out:
1120 	return stm32_hash_final(req);
1121 }
1122 
1123 static int stm32_hash_digest(struct ahash_request *req)
1124 {
1125 	return stm32_hash_init(req) ?: stm32_hash_finup(req);
1126 }
1127 
1128 static int stm32_hash_export(struct ahash_request *req, void *out)
1129 {
1130 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1131 
1132 	memcpy(out, &rctx->state, sizeof(rctx->state));
1133 
1134 	return 0;
1135 }
1136 
1137 static int stm32_hash_import(struct ahash_request *req, const void *in)
1138 {
1139 	struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req);
1140 
1141 	stm32_hash_init(req);
1142 	memcpy(&rctx->state, in, sizeof(rctx->state));
1143 
1144 	return 0;
1145 }
1146 
1147 static int stm32_hash_setkey(struct crypto_ahash *tfm,
1148 			     const u8 *key, unsigned int keylen)
1149 {
1150 	struct stm32_hash_ctx *ctx = crypto_ahash_ctx(tfm);
1151 
1152 	if (keylen <= HASH_MAX_KEY_SIZE) {
1153 		memcpy(ctx->key, key, keylen);
1154 		ctx->keylen = keylen;
1155 	} else {
1156 		return -ENOMEM;
1157 	}
1158 
1159 	return 0;
1160 }
1161 
1162 static int stm32_hash_init_fallback(struct crypto_tfm *tfm)
1163 {
1164 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1165 	struct stm32_hash_dev *hdev = stm32_hash_find_dev(ctx);
1166 	const char *name = crypto_tfm_alg_name(tfm);
1167 	struct crypto_shash *xtfm;
1168 
1169 	/* The fallback is only needed on Ux500 */
1170 	if (!hdev->pdata->ux500)
1171 		return 0;
1172 
1173 	xtfm = crypto_alloc_shash(name, 0, CRYPTO_ALG_NEED_FALLBACK);
1174 	if (IS_ERR(xtfm)) {
1175 		dev_err(hdev->dev, "failed to allocate %s fallback\n",
1176 			name);
1177 		return PTR_ERR(xtfm);
1178 	}
1179 	dev_info(hdev->dev, "allocated %s fallback\n", name);
1180 	ctx->xtfm = xtfm;
1181 
1182 	return 0;
1183 }
1184 
1185 static int stm32_hash_cra_init_algs(struct crypto_tfm *tfm, u32 algs_flags)
1186 {
1187 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1188 
1189 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
1190 				 sizeof(struct stm32_hash_request_ctx));
1191 
1192 	ctx->keylen = 0;
1193 
1194 	if (algs_flags)
1195 		ctx->flags |= algs_flags;
1196 
1197 	ctx->enginectx.op.do_one_request = stm32_hash_one_request;
1198 
1199 	return stm32_hash_init_fallback(tfm);
1200 }
1201 
1202 static int stm32_hash_cra_init(struct crypto_tfm *tfm)
1203 {
1204 	return stm32_hash_cra_init_algs(tfm, 0);
1205 }
1206 
1207 static int stm32_hash_cra_hmac_init(struct crypto_tfm *tfm)
1208 {
1209 	return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_HMAC);
1210 }
1211 
1212 static int stm32_hash_cra_sha3_init(struct crypto_tfm *tfm)
1213 {
1214 	return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE);
1215 }
1216 
1217 static int stm32_hash_cra_sha3_hmac_init(struct crypto_tfm *tfm)
1218 {
1219 	return stm32_hash_cra_init_algs(tfm, HASH_FLAGS_SHA3_MODE |
1220 					HASH_FLAGS_HMAC);
1221 }
1222 
1223 
1224 static void stm32_hash_cra_exit(struct crypto_tfm *tfm)
1225 {
1226 	struct stm32_hash_ctx *ctx = crypto_tfm_ctx(tfm);
1227 
1228 	if (ctx->xtfm)
1229 		crypto_free_shash(ctx->xtfm);
1230 }
1231 
1232 static irqreturn_t stm32_hash_irq_thread(int irq, void *dev_id)
1233 {
1234 	struct stm32_hash_dev *hdev = dev_id;
1235 
1236 	if (HASH_FLAGS_CPU & hdev->flags) {
1237 		if (HASH_FLAGS_OUTPUT_READY & hdev->flags) {
1238 			hdev->flags &= ~HASH_FLAGS_OUTPUT_READY;
1239 			goto finish;
1240 		}
1241 	} else if (HASH_FLAGS_DMA_ACTIVE & hdev->flags) {
1242 		hdev->flags &= ~HASH_FLAGS_DMA_ACTIVE;
1243 			goto finish;
1244 	}
1245 
1246 	return IRQ_HANDLED;
1247 
1248 finish:
1249 	/* Finish current request */
1250 	stm32_hash_finish_req(hdev->req, 0);
1251 
1252 	return IRQ_HANDLED;
1253 }
1254 
1255 static irqreturn_t stm32_hash_irq_handler(int irq, void *dev_id)
1256 {
1257 	struct stm32_hash_dev *hdev = dev_id;
1258 	u32 reg;
1259 
1260 	reg = stm32_hash_read(hdev, HASH_SR);
1261 	if (reg & HASH_SR_OUTPUT_READY) {
1262 		hdev->flags |= HASH_FLAGS_OUTPUT_READY;
1263 		/* Disable IT*/
1264 		stm32_hash_write(hdev, HASH_IMR, 0);
1265 		return IRQ_WAKE_THREAD;
1266 	}
1267 
1268 	return IRQ_NONE;
1269 }
1270 
1271 static struct ahash_alg algs_md5[] = {
1272 	{
1273 		.init = stm32_hash_init,
1274 		.update = stm32_hash_update,
1275 		.final = stm32_hash_final,
1276 		.finup = stm32_hash_finup,
1277 		.digest = stm32_hash_digest,
1278 		.export = stm32_hash_export,
1279 		.import = stm32_hash_import,
1280 		.halg = {
1281 			.digestsize = MD5_DIGEST_SIZE,
1282 			.statesize = sizeof(struct stm32_hash_state),
1283 			.base = {
1284 				.cra_name = "md5",
1285 				.cra_driver_name = "stm32-md5",
1286 				.cra_priority = 200,
1287 				.cra_flags = CRYPTO_ALG_ASYNC |
1288 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1289 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1290 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1291 				.cra_alignmask = 3,
1292 				.cra_init = stm32_hash_cra_init,
1293 				.cra_exit = stm32_hash_cra_exit,
1294 				.cra_module = THIS_MODULE,
1295 			}
1296 		}
1297 	},
1298 	{
1299 		.init = stm32_hash_init,
1300 		.update = stm32_hash_update,
1301 		.final = stm32_hash_final,
1302 		.finup = stm32_hash_finup,
1303 		.digest = stm32_hash_digest,
1304 		.export = stm32_hash_export,
1305 		.import = stm32_hash_import,
1306 		.setkey = stm32_hash_setkey,
1307 		.halg = {
1308 			.digestsize = MD5_DIGEST_SIZE,
1309 			.statesize = sizeof(struct stm32_hash_state),
1310 			.base = {
1311 				.cra_name = "hmac(md5)",
1312 				.cra_driver_name = "stm32-hmac-md5",
1313 				.cra_priority = 200,
1314 				.cra_flags = CRYPTO_ALG_ASYNC |
1315 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1316 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1317 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1318 				.cra_alignmask = 3,
1319 				.cra_init = stm32_hash_cra_hmac_init,
1320 				.cra_exit = stm32_hash_cra_exit,
1321 				.cra_module = THIS_MODULE,
1322 			}
1323 		}
1324 	}
1325 };
1326 
1327 static struct ahash_alg algs_sha1[] = {
1328 	{
1329 		.init = stm32_hash_init,
1330 		.update = stm32_hash_update,
1331 		.final = stm32_hash_final,
1332 		.finup = stm32_hash_finup,
1333 		.digest = stm32_hash_digest,
1334 		.export = stm32_hash_export,
1335 		.import = stm32_hash_import,
1336 		.halg = {
1337 			.digestsize = SHA1_DIGEST_SIZE,
1338 			.statesize = sizeof(struct stm32_hash_state),
1339 			.base = {
1340 				.cra_name = "sha1",
1341 				.cra_driver_name = "stm32-sha1",
1342 				.cra_priority = 200,
1343 				.cra_flags = CRYPTO_ALG_ASYNC |
1344 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1345 				.cra_blocksize = SHA1_BLOCK_SIZE,
1346 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1347 				.cra_alignmask = 3,
1348 				.cra_init = stm32_hash_cra_init,
1349 				.cra_exit = stm32_hash_cra_exit,
1350 				.cra_module = THIS_MODULE,
1351 			}
1352 		}
1353 	},
1354 	{
1355 		.init = stm32_hash_init,
1356 		.update = stm32_hash_update,
1357 		.final = stm32_hash_final,
1358 		.finup = stm32_hash_finup,
1359 		.digest = stm32_hash_digest,
1360 		.export = stm32_hash_export,
1361 		.import = stm32_hash_import,
1362 		.setkey = stm32_hash_setkey,
1363 		.halg = {
1364 			.digestsize = SHA1_DIGEST_SIZE,
1365 			.statesize = sizeof(struct stm32_hash_state),
1366 			.base = {
1367 				.cra_name = "hmac(sha1)",
1368 				.cra_driver_name = "stm32-hmac-sha1",
1369 				.cra_priority = 200,
1370 				.cra_flags = CRYPTO_ALG_ASYNC |
1371 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1372 				.cra_blocksize = SHA1_BLOCK_SIZE,
1373 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1374 				.cra_alignmask = 3,
1375 				.cra_init = stm32_hash_cra_hmac_init,
1376 				.cra_exit = stm32_hash_cra_exit,
1377 				.cra_module = THIS_MODULE,
1378 			}
1379 		}
1380 	},
1381 };
1382 
1383 static struct ahash_alg algs_sha224[] = {
1384 	{
1385 		.init = stm32_hash_init,
1386 		.update = stm32_hash_update,
1387 		.final = stm32_hash_final,
1388 		.finup = stm32_hash_finup,
1389 		.digest = stm32_hash_digest,
1390 		.export = stm32_hash_export,
1391 		.import = stm32_hash_import,
1392 		.halg = {
1393 			.digestsize = SHA224_DIGEST_SIZE,
1394 			.statesize = sizeof(struct stm32_hash_state),
1395 			.base = {
1396 				.cra_name = "sha224",
1397 				.cra_driver_name = "stm32-sha224",
1398 				.cra_priority = 200,
1399 				.cra_flags = CRYPTO_ALG_ASYNC |
1400 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1401 				.cra_blocksize = SHA224_BLOCK_SIZE,
1402 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1403 				.cra_alignmask = 3,
1404 				.cra_init = stm32_hash_cra_init,
1405 				.cra_exit = stm32_hash_cra_exit,
1406 				.cra_module = THIS_MODULE,
1407 			}
1408 		}
1409 	},
1410 	{
1411 		.init = stm32_hash_init,
1412 		.update = stm32_hash_update,
1413 		.final = stm32_hash_final,
1414 		.finup = stm32_hash_finup,
1415 		.digest = stm32_hash_digest,
1416 		.setkey = stm32_hash_setkey,
1417 		.export = stm32_hash_export,
1418 		.import = stm32_hash_import,
1419 		.halg = {
1420 			.digestsize = SHA224_DIGEST_SIZE,
1421 			.statesize = sizeof(struct stm32_hash_state),
1422 			.base = {
1423 				.cra_name = "hmac(sha224)",
1424 				.cra_driver_name = "stm32-hmac-sha224",
1425 				.cra_priority = 200,
1426 				.cra_flags = CRYPTO_ALG_ASYNC |
1427 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1428 				.cra_blocksize = SHA224_BLOCK_SIZE,
1429 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1430 				.cra_alignmask = 3,
1431 				.cra_init = stm32_hash_cra_hmac_init,
1432 				.cra_exit = stm32_hash_cra_exit,
1433 				.cra_module = THIS_MODULE,
1434 			}
1435 		}
1436 	},
1437 };
1438 
1439 static struct ahash_alg algs_sha256[] = {
1440 	{
1441 		.init = stm32_hash_init,
1442 		.update = stm32_hash_update,
1443 		.final = stm32_hash_final,
1444 		.finup = stm32_hash_finup,
1445 		.digest = stm32_hash_digest,
1446 		.export = stm32_hash_export,
1447 		.import = stm32_hash_import,
1448 		.halg = {
1449 			.digestsize = SHA256_DIGEST_SIZE,
1450 			.statesize = sizeof(struct stm32_hash_state),
1451 			.base = {
1452 				.cra_name = "sha256",
1453 				.cra_driver_name = "stm32-sha256",
1454 				.cra_priority = 200,
1455 				.cra_flags = CRYPTO_ALG_ASYNC |
1456 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1457 				.cra_blocksize = SHA256_BLOCK_SIZE,
1458 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1459 				.cra_alignmask = 3,
1460 				.cra_init = stm32_hash_cra_init,
1461 				.cra_exit = stm32_hash_cra_exit,
1462 				.cra_module = THIS_MODULE,
1463 			}
1464 		}
1465 	},
1466 	{
1467 		.init = stm32_hash_init,
1468 		.update = stm32_hash_update,
1469 		.final = stm32_hash_final,
1470 		.finup = stm32_hash_finup,
1471 		.digest = stm32_hash_digest,
1472 		.export = stm32_hash_export,
1473 		.import = stm32_hash_import,
1474 		.setkey = stm32_hash_setkey,
1475 		.halg = {
1476 			.digestsize = SHA256_DIGEST_SIZE,
1477 			.statesize = sizeof(struct stm32_hash_state),
1478 			.base = {
1479 				.cra_name = "hmac(sha256)",
1480 				.cra_driver_name = "stm32-hmac-sha256",
1481 				.cra_priority = 200,
1482 				.cra_flags = CRYPTO_ALG_ASYNC |
1483 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1484 				.cra_blocksize = SHA256_BLOCK_SIZE,
1485 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1486 				.cra_alignmask = 3,
1487 				.cra_init = stm32_hash_cra_hmac_init,
1488 				.cra_exit = stm32_hash_cra_exit,
1489 				.cra_module = THIS_MODULE,
1490 			}
1491 		}
1492 	},
1493 };
1494 
1495 static struct ahash_alg algs_sha384_sha512[] = {
1496 	{
1497 		.init = stm32_hash_init,
1498 		.update = stm32_hash_update,
1499 		.final = stm32_hash_final,
1500 		.finup = stm32_hash_finup,
1501 		.digest = stm32_hash_digest,
1502 		.export = stm32_hash_export,
1503 		.import = stm32_hash_import,
1504 		.halg = {
1505 			.digestsize = SHA384_DIGEST_SIZE,
1506 			.statesize = sizeof(struct stm32_hash_state),
1507 			.base = {
1508 				.cra_name = "sha384",
1509 				.cra_driver_name = "stm32-sha384",
1510 				.cra_priority = 200,
1511 				.cra_flags = CRYPTO_ALG_ASYNC |
1512 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1513 				.cra_blocksize = SHA384_BLOCK_SIZE,
1514 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1515 				.cra_alignmask = 3,
1516 				.cra_init = stm32_hash_cra_init,
1517 				.cra_exit = stm32_hash_cra_exit,
1518 				.cra_module = THIS_MODULE,
1519 			}
1520 		}
1521 	},
1522 	{
1523 		.init = stm32_hash_init,
1524 		.update = stm32_hash_update,
1525 		.final = stm32_hash_final,
1526 		.finup = stm32_hash_finup,
1527 		.digest = stm32_hash_digest,
1528 		.setkey = stm32_hash_setkey,
1529 		.export = stm32_hash_export,
1530 		.import = stm32_hash_import,
1531 		.halg = {
1532 			.digestsize = SHA384_DIGEST_SIZE,
1533 			.statesize = sizeof(struct stm32_hash_state),
1534 			.base = {
1535 				.cra_name = "hmac(sha384)",
1536 				.cra_driver_name = "stm32-hmac-sha384",
1537 				.cra_priority = 200,
1538 				.cra_flags = CRYPTO_ALG_ASYNC |
1539 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1540 				.cra_blocksize = SHA384_BLOCK_SIZE,
1541 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1542 				.cra_alignmask = 3,
1543 				.cra_init = stm32_hash_cra_hmac_init,
1544 				.cra_exit = stm32_hash_cra_exit,
1545 				.cra_module = THIS_MODULE,
1546 			}
1547 		}
1548 	},
1549 	{
1550 		.init = stm32_hash_init,
1551 		.update = stm32_hash_update,
1552 		.final = stm32_hash_final,
1553 		.finup = stm32_hash_finup,
1554 		.digest = stm32_hash_digest,
1555 		.export = stm32_hash_export,
1556 		.import = stm32_hash_import,
1557 		.halg = {
1558 			.digestsize = SHA512_DIGEST_SIZE,
1559 			.statesize = sizeof(struct stm32_hash_state),
1560 			.base = {
1561 				.cra_name = "sha512",
1562 				.cra_driver_name = "stm32-sha512",
1563 				.cra_priority = 200,
1564 				.cra_flags = CRYPTO_ALG_ASYNC |
1565 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1566 				.cra_blocksize = SHA512_BLOCK_SIZE,
1567 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1568 				.cra_alignmask = 3,
1569 				.cra_init = stm32_hash_cra_init,
1570 				.cra_exit = stm32_hash_cra_exit,
1571 				.cra_module = THIS_MODULE,
1572 			}
1573 		}
1574 	},
1575 	{
1576 		.init = stm32_hash_init,
1577 		.update = stm32_hash_update,
1578 		.final = stm32_hash_final,
1579 		.finup = stm32_hash_finup,
1580 		.digest = stm32_hash_digest,
1581 		.export = stm32_hash_export,
1582 		.import = stm32_hash_import,
1583 		.setkey = stm32_hash_setkey,
1584 		.halg = {
1585 			.digestsize = SHA512_DIGEST_SIZE,
1586 			.statesize = sizeof(struct stm32_hash_state),
1587 			.base = {
1588 				.cra_name = "hmac(sha512)",
1589 				.cra_driver_name = "stm32-hmac-sha512",
1590 				.cra_priority = 200,
1591 				.cra_flags = CRYPTO_ALG_ASYNC |
1592 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1593 				.cra_blocksize = SHA512_BLOCK_SIZE,
1594 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1595 				.cra_alignmask = 3,
1596 				.cra_init = stm32_hash_cra_hmac_init,
1597 				.cra_exit = stm32_hash_cra_exit,
1598 				.cra_module = THIS_MODULE,
1599 			}
1600 		}
1601 	},
1602 };
1603 
1604 static struct ahash_alg algs_sha3[] = {
1605 	{
1606 		.init = stm32_hash_init,
1607 		.update = stm32_hash_update,
1608 		.final = stm32_hash_final,
1609 		.finup = stm32_hash_finup,
1610 		.digest = stm32_hash_digest,
1611 		.export = stm32_hash_export,
1612 		.import = stm32_hash_import,
1613 		.halg = {
1614 			.digestsize = SHA3_224_DIGEST_SIZE,
1615 			.statesize = sizeof(struct stm32_hash_state),
1616 			.base = {
1617 				.cra_name = "sha3-224",
1618 				.cra_driver_name = "stm32-sha3-224",
1619 				.cra_priority = 200,
1620 				.cra_flags = CRYPTO_ALG_ASYNC |
1621 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1622 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
1623 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1624 				.cra_alignmask = 3,
1625 				.cra_init = stm32_hash_cra_sha3_init,
1626 				.cra_exit = stm32_hash_cra_exit,
1627 				.cra_module = THIS_MODULE,
1628 			}
1629 		}
1630 	},
1631 	{
1632 		.init = stm32_hash_init,
1633 		.update = stm32_hash_update,
1634 		.final = stm32_hash_final,
1635 		.finup = stm32_hash_finup,
1636 		.digest = stm32_hash_digest,
1637 		.export = stm32_hash_export,
1638 		.import = stm32_hash_import,
1639 		.setkey = stm32_hash_setkey,
1640 		.halg = {
1641 			.digestsize = SHA3_224_DIGEST_SIZE,
1642 			.statesize = sizeof(struct stm32_hash_state),
1643 			.base = {
1644 				.cra_name = "hmac(sha3-224)",
1645 				.cra_driver_name = "stm32-hmac-sha3-224",
1646 				.cra_priority = 200,
1647 				.cra_flags = CRYPTO_ALG_ASYNC |
1648 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1649 				.cra_blocksize = SHA3_224_BLOCK_SIZE,
1650 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1651 				.cra_alignmask = 3,
1652 				.cra_init = stm32_hash_cra_sha3_hmac_init,
1653 				.cra_exit = stm32_hash_cra_exit,
1654 				.cra_module = THIS_MODULE,
1655 			}
1656 		}
1657 	},
1658 		{
1659 		.init = stm32_hash_init,
1660 		.update = stm32_hash_update,
1661 		.final = stm32_hash_final,
1662 		.finup = stm32_hash_finup,
1663 		.digest = stm32_hash_digest,
1664 		.export = stm32_hash_export,
1665 		.import = stm32_hash_import,
1666 		.halg = {
1667 			.digestsize = SHA3_256_DIGEST_SIZE,
1668 			.statesize = sizeof(struct stm32_hash_state),
1669 			.base = {
1670 				.cra_name = "sha3-256",
1671 				.cra_driver_name = "stm32-sha3-256",
1672 				.cra_priority = 200,
1673 				.cra_flags = CRYPTO_ALG_ASYNC |
1674 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1675 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
1676 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1677 				.cra_alignmask = 3,
1678 				.cra_init = stm32_hash_cra_sha3_init,
1679 				.cra_exit = stm32_hash_cra_exit,
1680 				.cra_module = THIS_MODULE,
1681 			}
1682 		}
1683 	},
1684 	{
1685 		.init = stm32_hash_init,
1686 		.update = stm32_hash_update,
1687 		.final = stm32_hash_final,
1688 		.finup = stm32_hash_finup,
1689 		.digest = stm32_hash_digest,
1690 		.export = stm32_hash_export,
1691 		.import = stm32_hash_import,
1692 		.setkey = stm32_hash_setkey,
1693 		.halg = {
1694 			.digestsize = SHA3_256_DIGEST_SIZE,
1695 			.statesize = sizeof(struct stm32_hash_state),
1696 			.base = {
1697 				.cra_name = "hmac(sha3-256)",
1698 				.cra_driver_name = "stm32-hmac-sha3-256",
1699 				.cra_priority = 200,
1700 				.cra_flags = CRYPTO_ALG_ASYNC |
1701 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1702 				.cra_blocksize = SHA3_256_BLOCK_SIZE,
1703 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1704 				.cra_alignmask = 3,
1705 				.cra_init = stm32_hash_cra_sha3_hmac_init,
1706 				.cra_exit = stm32_hash_cra_exit,
1707 				.cra_module = THIS_MODULE,
1708 			}
1709 		}
1710 	},
1711 	{
1712 		.init = stm32_hash_init,
1713 		.update = stm32_hash_update,
1714 		.final = stm32_hash_final,
1715 		.finup = stm32_hash_finup,
1716 		.digest = stm32_hash_digest,
1717 		.export = stm32_hash_export,
1718 		.import = stm32_hash_import,
1719 		.halg = {
1720 			.digestsize = SHA3_384_DIGEST_SIZE,
1721 			.statesize = sizeof(struct stm32_hash_state),
1722 			.base = {
1723 				.cra_name = "sha3-384",
1724 				.cra_driver_name = "stm32-sha3-384",
1725 				.cra_priority = 200,
1726 				.cra_flags = CRYPTO_ALG_ASYNC |
1727 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1728 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
1729 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1730 				.cra_alignmask = 3,
1731 				.cra_init = stm32_hash_cra_sha3_init,
1732 				.cra_exit = stm32_hash_cra_exit,
1733 				.cra_module = THIS_MODULE,
1734 			}
1735 		}
1736 	},
1737 	{
1738 		.init = stm32_hash_init,
1739 		.update = stm32_hash_update,
1740 		.final = stm32_hash_final,
1741 		.finup = stm32_hash_finup,
1742 		.digest = stm32_hash_digest,
1743 		.export = stm32_hash_export,
1744 		.import = stm32_hash_import,
1745 		.setkey = stm32_hash_setkey,
1746 		.halg = {
1747 			.digestsize = SHA3_384_DIGEST_SIZE,
1748 			.statesize = sizeof(struct stm32_hash_state),
1749 			.base = {
1750 				.cra_name = "hmac(sha3-384)",
1751 				.cra_driver_name = "stm32-hmac-sha3-384",
1752 				.cra_priority = 200,
1753 				.cra_flags = CRYPTO_ALG_ASYNC |
1754 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1755 				.cra_blocksize = SHA3_384_BLOCK_SIZE,
1756 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1757 				.cra_alignmask = 3,
1758 				.cra_init = stm32_hash_cra_sha3_hmac_init,
1759 				.cra_exit = stm32_hash_cra_exit,
1760 				.cra_module = THIS_MODULE,
1761 			}
1762 		}
1763 	},
1764 	{
1765 		.init = stm32_hash_init,
1766 		.update = stm32_hash_update,
1767 		.final = stm32_hash_final,
1768 		.finup = stm32_hash_finup,
1769 		.digest = stm32_hash_digest,
1770 		.export = stm32_hash_export,
1771 		.import = stm32_hash_import,
1772 		.halg = {
1773 			.digestsize = SHA3_512_DIGEST_SIZE,
1774 			.statesize = sizeof(struct stm32_hash_state),
1775 			.base = {
1776 				.cra_name = "sha3-512",
1777 				.cra_driver_name = "stm32-sha3-512",
1778 				.cra_priority = 200,
1779 				.cra_flags = CRYPTO_ALG_ASYNC |
1780 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1781 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
1782 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1783 				.cra_alignmask = 3,
1784 				.cra_init = stm32_hash_cra_sha3_init,
1785 				.cra_exit = stm32_hash_cra_exit,
1786 				.cra_module = THIS_MODULE,
1787 			}
1788 		}
1789 	},
1790 	{
1791 		.init = stm32_hash_init,
1792 		.update = stm32_hash_update,
1793 		.final = stm32_hash_final,
1794 		.finup = stm32_hash_finup,
1795 		.digest = stm32_hash_digest,
1796 		.export = stm32_hash_export,
1797 		.import = stm32_hash_import,
1798 		.setkey = stm32_hash_setkey,
1799 		.halg = {
1800 			.digestsize = SHA3_512_DIGEST_SIZE,
1801 			.statesize = sizeof(struct stm32_hash_state),
1802 			.base = {
1803 				.cra_name = "hmac(sha3-512)",
1804 				.cra_driver_name = "stm32-hmac-sha3-512",
1805 				.cra_priority = 200,
1806 				.cra_flags = CRYPTO_ALG_ASYNC |
1807 					CRYPTO_ALG_KERN_DRIVER_ONLY,
1808 				.cra_blocksize = SHA3_512_BLOCK_SIZE,
1809 				.cra_ctxsize = sizeof(struct stm32_hash_ctx),
1810 				.cra_alignmask = 3,
1811 				.cra_init = stm32_hash_cra_sha3_hmac_init,
1812 				.cra_exit = stm32_hash_cra_exit,
1813 				.cra_module = THIS_MODULE,
1814 			}
1815 		}
1816 	}
1817 };
1818 
1819 static int stm32_hash_register_algs(struct stm32_hash_dev *hdev)
1820 {
1821 	unsigned int i, j;
1822 	int err;
1823 
1824 	for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1825 		for (j = 0; j < hdev->pdata->algs_info[i].size; j++) {
1826 			err = crypto_register_ahash(
1827 				&hdev->pdata->algs_info[i].algs_list[j]);
1828 			if (err)
1829 				goto err_algs;
1830 		}
1831 	}
1832 
1833 	return 0;
1834 err_algs:
1835 	dev_err(hdev->dev, "Algo %d : %d failed\n", i, j);
1836 	for (; i--; ) {
1837 		for (; j--;)
1838 			crypto_unregister_ahash(
1839 				&hdev->pdata->algs_info[i].algs_list[j]);
1840 	}
1841 
1842 	return err;
1843 }
1844 
1845 static int stm32_hash_unregister_algs(struct stm32_hash_dev *hdev)
1846 {
1847 	unsigned int i, j;
1848 
1849 	for (i = 0; i < hdev->pdata->algs_info_size; i++) {
1850 		for (j = 0; j < hdev->pdata->algs_info[i].size; j++)
1851 			crypto_unregister_ahash(
1852 				&hdev->pdata->algs_info[i].algs_list[j]);
1853 	}
1854 
1855 	return 0;
1856 }
1857 
1858 static struct stm32_hash_algs_info stm32_hash_algs_info_ux500[] = {
1859 	{
1860 		.algs_list	= algs_sha1,
1861 		.size		= ARRAY_SIZE(algs_sha1),
1862 	},
1863 	{
1864 		.algs_list	= algs_sha256,
1865 		.size		= ARRAY_SIZE(algs_sha256),
1866 	},
1867 };
1868 
1869 static const struct stm32_hash_pdata stm32_hash_pdata_ux500 = {
1870 	.alg_shift	= 7,
1871 	.algs_info	= stm32_hash_algs_info_ux500,
1872 	.algs_info_size	= ARRAY_SIZE(stm32_hash_algs_info_ux500),
1873 	.broken_emptymsg = true,
1874 	.ux500		= true,
1875 };
1876 
1877 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4[] = {
1878 	{
1879 		.algs_list	= algs_md5,
1880 		.size		= ARRAY_SIZE(algs_md5),
1881 	},
1882 	{
1883 		.algs_list	= algs_sha1,
1884 		.size		= ARRAY_SIZE(algs_sha1),
1885 	},
1886 };
1887 
1888 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4 = {
1889 	.alg_shift	= 7,
1890 	.algs_info	= stm32_hash_algs_info_stm32f4,
1891 	.algs_info_size	= ARRAY_SIZE(stm32_hash_algs_info_stm32f4),
1892 	.has_sr		= true,
1893 	.has_mdmat	= true,
1894 };
1895 
1896 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7[] = {
1897 	{
1898 		.algs_list	= algs_md5,
1899 		.size		= ARRAY_SIZE(algs_md5),
1900 	},
1901 	{
1902 		.algs_list	= algs_sha1,
1903 		.size		= ARRAY_SIZE(algs_sha1),
1904 	},
1905 	{
1906 		.algs_list	= algs_sha224,
1907 		.size		= ARRAY_SIZE(algs_sha224),
1908 	},
1909 	{
1910 		.algs_list	= algs_sha256,
1911 		.size		= ARRAY_SIZE(algs_sha256),
1912 	},
1913 };
1914 
1915 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7 = {
1916 	.alg_shift	= 7,
1917 	.algs_info	= stm32_hash_algs_info_stm32f7,
1918 	.algs_info_size	= ARRAY_SIZE(stm32_hash_algs_info_stm32f7),
1919 	.has_sr		= true,
1920 	.has_mdmat	= true,
1921 };
1922 
1923 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32mp13[] = {
1924 	{
1925 		.algs_list	= algs_sha1,
1926 		.size		= ARRAY_SIZE(algs_sha1),
1927 	},
1928 	{
1929 		.algs_list	= algs_sha224,
1930 		.size		= ARRAY_SIZE(algs_sha224),
1931 	},
1932 	{
1933 		.algs_list	= algs_sha256,
1934 		.size		= ARRAY_SIZE(algs_sha256),
1935 	},
1936 	{
1937 		.algs_list	= algs_sha384_sha512,
1938 		.size		= ARRAY_SIZE(algs_sha384_sha512),
1939 	},
1940 	{
1941 		.algs_list	= algs_sha3,
1942 		.size		= ARRAY_SIZE(algs_sha3),
1943 	},
1944 };
1945 
1946 static const struct stm32_hash_pdata stm32_hash_pdata_stm32mp13 = {
1947 	.alg_shift	= 17,
1948 	.algs_info	= stm32_hash_algs_info_stm32mp13,
1949 	.algs_info_size	= ARRAY_SIZE(stm32_hash_algs_info_stm32mp13),
1950 	.has_sr		= true,
1951 	.has_mdmat	= true,
1952 };
1953 
1954 static const struct of_device_id stm32_hash_of_match[] = {
1955 	{ .compatible = "stericsson,ux500-hash", .data = &stm32_hash_pdata_ux500 },
1956 	{ .compatible = "st,stm32f456-hash", .data = &stm32_hash_pdata_stm32f4 },
1957 	{ .compatible = "st,stm32f756-hash", .data = &stm32_hash_pdata_stm32f7 },
1958 	{ .compatible = "st,stm32mp13-hash", .data = &stm32_hash_pdata_stm32mp13 },
1959 	{},
1960 };
1961 
1962 MODULE_DEVICE_TABLE(of, stm32_hash_of_match);
1963 
1964 static int stm32_hash_get_of_match(struct stm32_hash_dev *hdev,
1965 				   struct device *dev)
1966 {
1967 	hdev->pdata = of_device_get_match_data(dev);
1968 	if (!hdev->pdata) {
1969 		dev_err(dev, "no compatible OF match\n");
1970 		return -EINVAL;
1971 	}
1972 
1973 	return 0;
1974 }
1975 
1976 static int stm32_hash_probe(struct platform_device *pdev)
1977 {
1978 	struct stm32_hash_dev *hdev;
1979 	struct device *dev = &pdev->dev;
1980 	struct resource *res;
1981 	int ret, irq;
1982 
1983 	hdev = devm_kzalloc(dev, sizeof(*hdev), GFP_KERNEL);
1984 	if (!hdev)
1985 		return -ENOMEM;
1986 
1987 	hdev->io_base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
1988 	if (IS_ERR(hdev->io_base))
1989 		return PTR_ERR(hdev->io_base);
1990 
1991 	hdev->phys_base = res->start;
1992 
1993 	ret = stm32_hash_get_of_match(hdev, dev);
1994 	if (ret)
1995 		return ret;
1996 
1997 	irq = platform_get_irq_optional(pdev, 0);
1998 	if (irq < 0 && irq != -ENXIO)
1999 		return irq;
2000 
2001 	if (irq > 0) {
2002 		ret = devm_request_threaded_irq(dev, irq,
2003 						stm32_hash_irq_handler,
2004 						stm32_hash_irq_thread,
2005 						IRQF_ONESHOT,
2006 						dev_name(dev), hdev);
2007 		if (ret) {
2008 			dev_err(dev, "Cannot grab IRQ\n");
2009 			return ret;
2010 		}
2011 	} else {
2012 		dev_info(dev, "No IRQ, use polling mode\n");
2013 		hdev->polled = true;
2014 	}
2015 
2016 	hdev->clk = devm_clk_get(&pdev->dev, NULL);
2017 	if (IS_ERR(hdev->clk))
2018 		return dev_err_probe(dev, PTR_ERR(hdev->clk),
2019 				     "failed to get clock for hash\n");
2020 
2021 	ret = clk_prepare_enable(hdev->clk);
2022 	if (ret) {
2023 		dev_err(dev, "failed to enable hash clock (%d)\n", ret);
2024 		return ret;
2025 	}
2026 
2027 	pm_runtime_set_autosuspend_delay(dev, HASH_AUTOSUSPEND_DELAY);
2028 	pm_runtime_use_autosuspend(dev);
2029 
2030 	pm_runtime_get_noresume(dev);
2031 	pm_runtime_set_active(dev);
2032 	pm_runtime_enable(dev);
2033 
2034 	hdev->rst = devm_reset_control_get(&pdev->dev, NULL);
2035 	if (IS_ERR(hdev->rst)) {
2036 		if (PTR_ERR(hdev->rst) == -EPROBE_DEFER) {
2037 			ret = -EPROBE_DEFER;
2038 			goto err_reset;
2039 		}
2040 	} else {
2041 		reset_control_assert(hdev->rst);
2042 		udelay(2);
2043 		reset_control_deassert(hdev->rst);
2044 	}
2045 
2046 	hdev->dev = dev;
2047 
2048 	platform_set_drvdata(pdev, hdev);
2049 
2050 	ret = stm32_hash_dma_init(hdev);
2051 	switch (ret) {
2052 	case 0:
2053 		break;
2054 	case -ENOENT:
2055 	case -ENODEV:
2056 		dev_info(dev, "DMA mode not available\n");
2057 		break;
2058 	default:
2059 		dev_err(dev, "DMA init error %d\n", ret);
2060 		goto err_dma;
2061 	}
2062 
2063 	spin_lock(&stm32_hash.lock);
2064 	list_add_tail(&hdev->list, &stm32_hash.dev_list);
2065 	spin_unlock(&stm32_hash.lock);
2066 
2067 	/* Initialize crypto engine */
2068 	hdev->engine = crypto_engine_alloc_init(dev, 1);
2069 	if (!hdev->engine) {
2070 		ret = -ENOMEM;
2071 		goto err_engine;
2072 	}
2073 
2074 	ret = crypto_engine_start(hdev->engine);
2075 	if (ret)
2076 		goto err_engine_start;
2077 
2078 	if (hdev->pdata->ux500)
2079 		/* FIXME: implement DMA mode for Ux500 */
2080 		hdev->dma_mode = 0;
2081 	else
2082 		hdev->dma_mode = stm32_hash_read(hdev, HASH_HWCFGR) & HASH_HWCFG_DMA_MASK;
2083 
2084 	/* Register algos */
2085 	ret = stm32_hash_register_algs(hdev);
2086 	if (ret)
2087 		goto err_algs;
2088 
2089 	dev_info(dev, "Init HASH done HW ver %x DMA mode %u\n",
2090 		 stm32_hash_read(hdev, HASH_VER), hdev->dma_mode);
2091 
2092 	pm_runtime_put_sync(dev);
2093 
2094 	return 0;
2095 
2096 err_algs:
2097 err_engine_start:
2098 	crypto_engine_exit(hdev->engine);
2099 err_engine:
2100 	spin_lock(&stm32_hash.lock);
2101 	list_del(&hdev->list);
2102 	spin_unlock(&stm32_hash.lock);
2103 err_dma:
2104 	if (hdev->dma_lch)
2105 		dma_release_channel(hdev->dma_lch);
2106 err_reset:
2107 	pm_runtime_disable(dev);
2108 	pm_runtime_put_noidle(dev);
2109 
2110 	clk_disable_unprepare(hdev->clk);
2111 
2112 	return ret;
2113 }
2114 
2115 static int stm32_hash_remove(struct platform_device *pdev)
2116 {
2117 	struct stm32_hash_dev *hdev;
2118 	int ret;
2119 
2120 	hdev = platform_get_drvdata(pdev);
2121 	if (!hdev)
2122 		return -ENODEV;
2123 
2124 	ret = pm_runtime_get_sync(hdev->dev);
2125 
2126 	stm32_hash_unregister_algs(hdev);
2127 
2128 	crypto_engine_exit(hdev->engine);
2129 
2130 	spin_lock(&stm32_hash.lock);
2131 	list_del(&hdev->list);
2132 	spin_unlock(&stm32_hash.lock);
2133 
2134 	if (hdev->dma_lch)
2135 		dma_release_channel(hdev->dma_lch);
2136 
2137 	pm_runtime_disable(hdev->dev);
2138 	pm_runtime_put_noidle(hdev->dev);
2139 
2140 	if (ret >= 0)
2141 		clk_disable_unprepare(hdev->clk);
2142 
2143 	return 0;
2144 }
2145 
2146 #ifdef CONFIG_PM
2147 static int stm32_hash_runtime_suspend(struct device *dev)
2148 {
2149 	struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2150 
2151 	clk_disable_unprepare(hdev->clk);
2152 
2153 	return 0;
2154 }
2155 
2156 static int stm32_hash_runtime_resume(struct device *dev)
2157 {
2158 	struct stm32_hash_dev *hdev = dev_get_drvdata(dev);
2159 	int ret;
2160 
2161 	ret = clk_prepare_enable(hdev->clk);
2162 	if (ret) {
2163 		dev_err(hdev->dev, "Failed to prepare_enable clock\n");
2164 		return ret;
2165 	}
2166 
2167 	return 0;
2168 }
2169 #endif
2170 
2171 static const struct dev_pm_ops stm32_hash_pm_ops = {
2172 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2173 				pm_runtime_force_resume)
2174 	SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend,
2175 			   stm32_hash_runtime_resume, NULL)
2176 };
2177 
2178 static struct platform_driver stm32_hash_driver = {
2179 	.probe		= stm32_hash_probe,
2180 	.remove		= stm32_hash_remove,
2181 	.driver		= {
2182 		.name	= "stm32-hash",
2183 		.pm = &stm32_hash_pm_ops,
2184 		.of_match_table	= stm32_hash_of_match,
2185 	}
2186 };
2187 
2188 module_platform_driver(stm32_hash_driver);
2189 
2190 MODULE_DESCRIPTION("STM32 SHA1/SHA2/SHA3 & MD5 (HMAC) hw accelerator driver");
2191 MODULE_AUTHOR("Lionel Debieve <lionel.debieve@st.com>");
2192 MODULE_LICENSE("GPL v2");
2193