1 /*
2  * Copyright (C) STMicroelectronics SA 2017
3  * Author: Fabien Dessenne <fabien.dessenne@st.com>
4  * License terms:  GNU General Public License (GPL), version 2
5  */
6 
7 #include <linux/clk.h>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/reset.h>
15 
16 #include <crypto/aes.h>
17 #include <crypto/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
20 
21 #define DRIVER_NAME             "stm32-cryp"
22 
23 /* Bit [0] encrypt / decrypt */
24 #define FLG_ENCRYPT             BIT(0)
25 /* Bit [8..1] algo & operation mode */
26 #define FLG_AES                 BIT(1)
27 #define FLG_DES                 BIT(2)
28 #define FLG_TDES                BIT(3)
29 #define FLG_ECB                 BIT(4)
30 #define FLG_CBC                 BIT(5)
31 #define FLG_CTR                 BIT(6)
32 /* Mode mask = bits [15..0] */
33 #define FLG_MODE_MASK           GENMASK(15, 0)
34 
35 /* Registers */
36 #define CRYP_CR                 0x00000000
37 #define CRYP_SR                 0x00000004
38 #define CRYP_DIN                0x00000008
39 #define CRYP_DOUT               0x0000000C
40 #define CRYP_DMACR              0x00000010
41 #define CRYP_IMSCR              0x00000014
42 #define CRYP_RISR               0x00000018
43 #define CRYP_MISR               0x0000001C
44 #define CRYP_K0LR               0x00000020
45 #define CRYP_K0RR               0x00000024
46 #define CRYP_K1LR               0x00000028
47 #define CRYP_K1RR               0x0000002C
48 #define CRYP_K2LR               0x00000030
49 #define CRYP_K2RR               0x00000034
50 #define CRYP_K3LR               0x00000038
51 #define CRYP_K3RR               0x0000003C
52 #define CRYP_IV0LR              0x00000040
53 #define CRYP_IV0RR              0x00000044
54 #define CRYP_IV1LR              0x00000048
55 #define CRYP_IV1RR              0x0000004C
56 
57 /* Registers values */
58 #define CR_DEC_NOT_ENC          0x00000004
59 #define CR_TDES_ECB             0x00000000
60 #define CR_TDES_CBC             0x00000008
61 #define CR_DES_ECB              0x00000010
62 #define CR_DES_CBC              0x00000018
63 #define CR_AES_ECB              0x00000020
64 #define CR_AES_CBC              0x00000028
65 #define CR_AES_CTR              0x00000030
66 #define CR_AES_KP               0x00000038
67 #define CR_AES_UNKNOWN          0xFFFFFFFF
68 #define CR_ALGO_MASK            0x00080038
69 #define CR_DATA32               0x00000000
70 #define CR_DATA16               0x00000040
71 #define CR_DATA8                0x00000080
72 #define CR_DATA1                0x000000C0
73 #define CR_KEY128               0x00000000
74 #define CR_KEY192               0x00000100
75 #define CR_KEY256               0x00000200
76 #define CR_FFLUSH               0x00004000
77 #define CR_CRYPEN               0x00008000
78 
79 #define SR_BUSY                 0x00000010
80 #define SR_OFNE                 0x00000004
81 
82 #define IMSCR_IN                BIT(0)
83 #define IMSCR_OUT               BIT(1)
84 
85 #define MISR_IN                 BIT(0)
86 #define MISR_OUT                BIT(1)
87 
88 /* Misc */
89 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
90 #define _walked_in              (cryp->in_walk.offset - cryp->in_sg->offset)
91 #define _walked_out             (cryp->out_walk.offset - cryp->out_sg->offset)
92 
93 struct stm32_cryp_ctx {
94 	struct stm32_cryp       *cryp;
95 	int                     keylen;
96 	u32                     key[AES_KEYSIZE_256 / sizeof(u32)];
97 	unsigned long           flags;
98 };
99 
100 struct stm32_cryp_reqctx {
101 	unsigned long mode;
102 };
103 
104 struct stm32_cryp {
105 	struct list_head        list;
106 	struct device           *dev;
107 	void __iomem            *regs;
108 	struct clk              *clk;
109 	unsigned long           flags;
110 	u32                     irq_status;
111 	struct stm32_cryp_ctx   *ctx;
112 
113 	struct crypto_engine    *engine;
114 
115 	struct mutex            lock; /* protects req */
116 	struct ablkcipher_request *req;
117 
118 	size_t                  hw_blocksize;
119 
120 	size_t                  total_in;
121 	size_t                  total_in_save;
122 	size_t                  total_out;
123 	size_t                  total_out_save;
124 
125 	struct scatterlist      *in_sg;
126 	struct scatterlist      *out_sg;
127 	struct scatterlist      *out_sg_save;
128 
129 	struct scatterlist      in_sgl;
130 	struct scatterlist      out_sgl;
131 	bool                    sgs_copied;
132 
133 	int                     in_sg_len;
134 	int                     out_sg_len;
135 
136 	struct scatter_walk     in_walk;
137 	struct scatter_walk     out_walk;
138 
139 	u32                     last_ctr[4];
140 };
141 
142 struct stm32_cryp_list {
143 	struct list_head        dev_list;
144 	spinlock_t              lock; /* protect dev_list */
145 };
146 
147 static struct stm32_cryp_list cryp_list = {
148 	.dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
149 	.lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
150 };
151 
152 static inline bool is_aes(struct stm32_cryp *cryp)
153 {
154 	return cryp->flags & FLG_AES;
155 }
156 
157 static inline bool is_des(struct stm32_cryp *cryp)
158 {
159 	return cryp->flags & FLG_DES;
160 }
161 
162 static inline bool is_tdes(struct stm32_cryp *cryp)
163 {
164 	return cryp->flags & FLG_TDES;
165 }
166 
167 static inline bool is_ecb(struct stm32_cryp *cryp)
168 {
169 	return cryp->flags & FLG_ECB;
170 }
171 
172 static inline bool is_cbc(struct stm32_cryp *cryp)
173 {
174 	return cryp->flags & FLG_CBC;
175 }
176 
177 static inline bool is_ctr(struct stm32_cryp *cryp)
178 {
179 	return cryp->flags & FLG_CTR;
180 }
181 
182 static inline bool is_encrypt(struct stm32_cryp *cryp)
183 {
184 	return cryp->flags & FLG_ENCRYPT;
185 }
186 
187 static inline bool is_decrypt(struct stm32_cryp *cryp)
188 {
189 	return !is_encrypt(cryp);
190 }
191 
192 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
193 {
194 	return readl_relaxed(cryp->regs + ofst);
195 }
196 
197 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
198 {
199 	writel_relaxed(val, cryp->regs + ofst);
200 }
201 
202 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
203 {
204 	u32 status;
205 
206 	return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
207 			!(status & SR_BUSY), 10, 100000);
208 }
209 
210 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
211 {
212 	struct stm32_cryp *tmp, *cryp = NULL;
213 
214 	spin_lock_bh(&cryp_list.lock);
215 	if (!ctx->cryp) {
216 		list_for_each_entry(tmp, &cryp_list.dev_list, list) {
217 			cryp = tmp;
218 			break;
219 		}
220 		ctx->cryp = cryp;
221 	} else {
222 		cryp = ctx->cryp;
223 	}
224 
225 	spin_unlock_bh(&cryp_list.lock);
226 
227 	return cryp;
228 }
229 
230 static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
231 				    size_t align)
232 {
233 	int len = 0;
234 
235 	if (!total)
236 		return 0;
237 
238 	if (!IS_ALIGNED(total, align))
239 		return -EINVAL;
240 
241 	while (sg) {
242 		if (!IS_ALIGNED(sg->offset, sizeof(u32)))
243 			return -EINVAL;
244 
245 		if (!IS_ALIGNED(sg->length, align))
246 			return -EINVAL;
247 
248 		len += sg->length;
249 		sg = sg_next(sg);
250 	}
251 
252 	if (len != total)
253 		return -EINVAL;
254 
255 	return 0;
256 }
257 
258 static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
259 {
260 	int ret;
261 
262 	ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
263 				       cryp->hw_blocksize);
264 	if (ret)
265 		return ret;
266 
267 	ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
268 				       cryp->hw_blocksize);
269 
270 	return ret;
271 }
272 
273 static void sg_copy_buf(void *buf, struct scatterlist *sg,
274 			unsigned int start, unsigned int nbytes, int out)
275 {
276 	struct scatter_walk walk;
277 
278 	if (!nbytes)
279 		return;
280 
281 	scatterwalk_start(&walk, sg);
282 	scatterwalk_advance(&walk, start);
283 	scatterwalk_copychunks(buf, &walk, nbytes, out);
284 	scatterwalk_done(&walk, out, 0);
285 }
286 
287 static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
288 {
289 	void *buf_in, *buf_out;
290 	int pages, total_in, total_out;
291 
292 	if (!stm32_cryp_check_io_aligned(cryp)) {
293 		cryp->sgs_copied = 0;
294 		return 0;
295 	}
296 
297 	total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
298 	pages = total_in ? get_order(total_in) : 1;
299 	buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
300 
301 	total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
302 	pages = total_out ? get_order(total_out) : 1;
303 	buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
304 
305 	if (!buf_in || !buf_out) {
306 		dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
307 		cryp->sgs_copied = 0;
308 		return -EFAULT;
309 	}
310 
311 	sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
312 
313 	sg_init_one(&cryp->in_sgl, buf_in, total_in);
314 	cryp->in_sg = &cryp->in_sgl;
315 	cryp->in_sg_len = 1;
316 
317 	sg_init_one(&cryp->out_sgl, buf_out, total_out);
318 	cryp->out_sg_save = cryp->out_sg;
319 	cryp->out_sg = &cryp->out_sgl;
320 	cryp->out_sg_len = 1;
321 
322 	cryp->sgs_copied = 1;
323 
324 	return 0;
325 }
326 
327 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
328 {
329 	if (!iv)
330 		return;
331 
332 	stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
333 	stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
334 
335 	if (is_aes(cryp)) {
336 		stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
337 		stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
338 	}
339 }
340 
341 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
342 {
343 	unsigned int i;
344 	int r_id;
345 
346 	if (is_des(c)) {
347 		stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
348 		stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
349 	} else {
350 		r_id = CRYP_K3RR;
351 		for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
352 			stm32_cryp_write(c, r_id,
353 					 cpu_to_be32(c->ctx->key[i - 1]));
354 	}
355 }
356 
357 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
358 {
359 	if (is_aes(cryp) && is_ecb(cryp))
360 		return CR_AES_ECB;
361 
362 	if (is_aes(cryp) && is_cbc(cryp))
363 		return CR_AES_CBC;
364 
365 	if (is_aes(cryp) && is_ctr(cryp))
366 		return CR_AES_CTR;
367 
368 	if (is_des(cryp) && is_ecb(cryp))
369 		return CR_DES_ECB;
370 
371 	if (is_des(cryp) && is_cbc(cryp))
372 		return CR_DES_CBC;
373 
374 	if (is_tdes(cryp) && is_ecb(cryp))
375 		return CR_TDES_ECB;
376 
377 	if (is_tdes(cryp) && is_cbc(cryp))
378 		return CR_TDES_CBC;
379 
380 	dev_err(cryp->dev, "Unknown mode\n");
381 	return CR_AES_UNKNOWN;
382 }
383 
384 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
385 {
386 	int ret;
387 	u32 cfg, hw_mode;
388 
389 	/* Disable interrupt */
390 	stm32_cryp_write(cryp, CRYP_IMSCR, 0);
391 
392 	/* Set key */
393 	stm32_cryp_hw_write_key(cryp);
394 
395 	/* Set configuration */
396 	cfg = CR_DATA8 | CR_FFLUSH;
397 
398 	switch (cryp->ctx->keylen) {
399 	case AES_KEYSIZE_128:
400 		cfg |= CR_KEY128;
401 		break;
402 
403 	case AES_KEYSIZE_192:
404 		cfg |= CR_KEY192;
405 		break;
406 
407 	default:
408 	case AES_KEYSIZE_256:
409 		cfg |= CR_KEY256;
410 		break;
411 	}
412 
413 	hw_mode = stm32_cryp_get_hw_mode(cryp);
414 	if (hw_mode == CR_AES_UNKNOWN)
415 		return -EINVAL;
416 
417 	/* AES ECB/CBC decrypt: run key preparation first */
418 	if (is_decrypt(cryp) &&
419 	    ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
420 		stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
421 
422 		/* Wait for end of processing */
423 		ret = stm32_cryp_wait_busy(cryp);
424 		if (ret) {
425 			dev_err(cryp->dev, "Timeout (key preparation)\n");
426 			return ret;
427 		}
428 	}
429 
430 	cfg |= hw_mode;
431 
432 	if (is_decrypt(cryp))
433 		cfg |= CR_DEC_NOT_ENC;
434 
435 	/* Apply config and flush (valid when CRYPEN = 0) */
436 	stm32_cryp_write(cryp, CRYP_CR, cfg);
437 
438 	switch (hw_mode) {
439 	case CR_DES_CBC:
440 	case CR_TDES_CBC:
441 	case CR_AES_CBC:
442 	case CR_AES_CTR:
443 		stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->info);
444 		break;
445 
446 	default:
447 		break;
448 	}
449 
450 	/* Enable now */
451 	cfg |= CR_CRYPEN;
452 
453 	stm32_cryp_write(cryp, CRYP_CR, cfg);
454 
455 	return 0;
456 }
457 
458 static void stm32_cryp_finish_req(struct stm32_cryp *cryp)
459 {
460 	int err = 0;
461 
462 	if (cryp->sgs_copied) {
463 		void *buf_in, *buf_out;
464 		int pages, len;
465 
466 		buf_in = sg_virt(&cryp->in_sgl);
467 		buf_out = sg_virt(&cryp->out_sgl);
468 
469 		sg_copy_buf(buf_out, cryp->out_sg_save, 0,
470 			    cryp->total_out_save, 1);
471 
472 		len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
473 		pages = len ? get_order(len) : 1;
474 		free_pages((unsigned long)buf_in, pages);
475 
476 		len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
477 		pages = len ? get_order(len) : 1;
478 		free_pages((unsigned long)buf_out, pages);
479 	}
480 
481 	crypto_finalize_cipher_request(cryp->engine, cryp->req, err);
482 	cryp->req = NULL;
483 
484 	memset(cryp->ctx->key, 0, cryp->ctx->keylen);
485 
486 	mutex_unlock(&cryp->lock);
487 }
488 
489 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
490 {
491 	/* Enable interrupt and let the IRQ handler do everything */
492 	stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
493 
494 	return 0;
495 }
496 
497 static int stm32_cryp_cra_init(struct crypto_tfm *tfm)
498 {
499 	tfm->crt_ablkcipher.reqsize = sizeof(struct stm32_cryp_reqctx);
500 
501 	return 0;
502 }
503 
504 static int stm32_cryp_crypt(struct ablkcipher_request *req, unsigned long mode)
505 {
506 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
507 			crypto_ablkcipher_reqtfm(req));
508 	struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req);
509 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
510 
511 	if (!cryp)
512 		return -ENODEV;
513 
514 	rctx->mode = mode;
515 
516 	return crypto_transfer_cipher_request_to_engine(cryp->engine, req);
517 }
518 
519 static int stm32_cryp_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
520 			     unsigned int keylen)
521 {
522 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(tfm);
523 
524 	memcpy(ctx->key, key, keylen);
525 	ctx->keylen = keylen;
526 
527 	return 0;
528 }
529 
530 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
531 				 unsigned int keylen)
532 {
533 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
534 	    keylen != AES_KEYSIZE_256)
535 		return -EINVAL;
536 	else
537 		return stm32_cryp_setkey(tfm, key, keylen);
538 }
539 
540 static int stm32_cryp_des_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
541 				 unsigned int keylen)
542 {
543 	if (keylen != DES_KEY_SIZE)
544 		return -EINVAL;
545 	else
546 		return stm32_cryp_setkey(tfm, key, keylen);
547 }
548 
549 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
550 				  unsigned int keylen)
551 {
552 	if (keylen != (3 * DES_KEY_SIZE))
553 		return -EINVAL;
554 	else
555 		return stm32_cryp_setkey(tfm, key, keylen);
556 }
557 
558 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request *req)
559 {
560 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
561 }
562 
563 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request *req)
564 {
565 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
566 }
567 
568 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request *req)
569 {
570 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
571 }
572 
573 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request *req)
574 {
575 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
576 }
577 
578 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request *req)
579 {
580 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
581 }
582 
583 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request *req)
584 {
585 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
586 }
587 
588 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request *req)
589 {
590 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
591 }
592 
593 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request *req)
594 {
595 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
596 }
597 
598 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request *req)
599 {
600 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
601 }
602 
603 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request *req)
604 {
605 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
606 }
607 
608 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request *req)
609 {
610 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
611 }
612 
613 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request *req)
614 {
615 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
616 }
617 
618 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request *req)
619 {
620 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
621 }
622 
623 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request *req)
624 {
625 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
626 }
627 
628 static int stm32_cryp_prepare_req(struct crypto_engine *engine,
629 				  struct ablkcipher_request *req)
630 {
631 	struct stm32_cryp_ctx *ctx;
632 	struct stm32_cryp *cryp;
633 	struct stm32_cryp_reqctx *rctx;
634 	int ret;
635 
636 	if (!req)
637 		return -EINVAL;
638 
639 	ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
640 
641 	cryp = ctx->cryp;
642 
643 	if (!cryp)
644 		return -ENODEV;
645 
646 	mutex_lock(&cryp->lock);
647 
648 	rctx = ablkcipher_request_ctx(req);
649 	rctx->mode &= FLG_MODE_MASK;
650 
651 	ctx->cryp = cryp;
652 
653 	cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
654 	cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
655 	cryp->ctx = ctx;
656 
657 	cryp->req = req;
658 	cryp->total_in = req->nbytes;
659 	cryp->total_out = cryp->total_in;
660 
661 	cryp->total_in_save = cryp->total_in;
662 	cryp->total_out_save = cryp->total_out;
663 
664 	cryp->in_sg = req->src;
665 	cryp->out_sg = req->dst;
666 	cryp->out_sg_save = cryp->out_sg;
667 
668 	cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
669 	if (cryp->in_sg_len < 0) {
670 		dev_err(cryp->dev, "Cannot get in_sg_len\n");
671 		ret = cryp->in_sg_len;
672 		goto out;
673 	}
674 
675 	cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
676 	if (cryp->out_sg_len < 0) {
677 		dev_err(cryp->dev, "Cannot get out_sg_len\n");
678 		ret = cryp->out_sg_len;
679 		goto out;
680 	}
681 
682 	ret = stm32_cryp_copy_sgs(cryp);
683 	if (ret)
684 		goto out;
685 
686 	scatterwalk_start(&cryp->in_walk, cryp->in_sg);
687 	scatterwalk_start(&cryp->out_walk, cryp->out_sg);
688 
689 	ret = stm32_cryp_hw_init(cryp);
690 out:
691 	if (ret)
692 		mutex_unlock(&cryp->lock);
693 
694 	return ret;
695 }
696 
697 static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
698 					 struct ablkcipher_request *req)
699 {
700 	return stm32_cryp_prepare_req(engine, req);
701 }
702 
703 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine,
704 				     struct ablkcipher_request *req)
705 {
706 	struct stm32_cryp_ctx *ctx = crypto_ablkcipher_ctx(
707 			crypto_ablkcipher_reqtfm(req));
708 	struct stm32_cryp *cryp = ctx->cryp;
709 
710 	if (!cryp)
711 		return -ENODEV;
712 
713 	return stm32_cryp_cpu_start(cryp);
714 }
715 
716 static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
717 				unsigned int n)
718 {
719 	scatterwalk_advance(&cryp->out_walk, n);
720 
721 	if (unlikely(cryp->out_sg->length == _walked_out)) {
722 		cryp->out_sg = sg_next(cryp->out_sg);
723 		if (cryp->out_sg) {
724 			scatterwalk_start(&cryp->out_walk, cryp->out_sg);
725 			return (sg_virt(cryp->out_sg) + _walked_out);
726 		}
727 	}
728 
729 	return (u32 *)((u8 *)dst + n);
730 }
731 
732 static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
733 			       unsigned int n)
734 {
735 	scatterwalk_advance(&cryp->in_walk, n);
736 
737 	if (unlikely(cryp->in_sg->length == _walked_in)) {
738 		cryp->in_sg = sg_next(cryp->in_sg);
739 		if (cryp->in_sg) {
740 			scatterwalk_start(&cryp->in_walk, cryp->in_sg);
741 			return (sg_virt(cryp->in_sg) + _walked_in);
742 		}
743 	}
744 
745 	return (u32 *)((u8 *)src + n);
746 }
747 
748 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
749 {
750 	u32 cr;
751 
752 	if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
753 		cryp->last_ctr[3] = 0;
754 		cryp->last_ctr[2]++;
755 		if (!cryp->last_ctr[2]) {
756 			cryp->last_ctr[1]++;
757 			if (!cryp->last_ctr[1])
758 				cryp->last_ctr[0]++;
759 		}
760 
761 		cr = stm32_cryp_read(cryp, CRYP_CR);
762 		stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
763 
764 		stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
765 
766 		stm32_cryp_write(cryp, CRYP_CR, cr);
767 	}
768 
769 	cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
770 	cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
771 	cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
772 	cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
773 }
774 
775 static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
776 {
777 	unsigned int i, j;
778 	u32 d32, *dst;
779 	u8 *d8;
780 
781 	dst = sg_virt(cryp->out_sg) + _walked_out;
782 
783 	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
784 		if (likely(cryp->total_out >= sizeof(u32))) {
785 			/* Read a full u32 */
786 			*dst = stm32_cryp_read(cryp, CRYP_DOUT);
787 
788 			dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
789 			cryp->total_out -= sizeof(u32);
790 		} else if (!cryp->total_out) {
791 			/* Empty fifo out (data from input padding) */
792 			d32 = stm32_cryp_read(cryp, CRYP_DOUT);
793 		} else {
794 			/* Read less than an u32 */
795 			d32 = stm32_cryp_read(cryp, CRYP_DOUT);
796 			d8 = (u8 *)&d32;
797 
798 			for (j = 0; j < cryp->total_out; j++) {
799 				*((u8 *)dst) = *(d8++);
800 				dst = stm32_cryp_next_out(cryp, dst, 1);
801 			}
802 			cryp->total_out = 0;
803 		}
804 	}
805 
806 	return !cryp->total_out || !cryp->total_in;
807 }
808 
809 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
810 {
811 	unsigned int i, j;
812 	u32 *src;
813 	u8 d8[4];
814 
815 	src = sg_virt(cryp->in_sg) + _walked_in;
816 
817 	for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
818 		if (likely(cryp->total_in >= sizeof(u32))) {
819 			/* Write a full u32 */
820 			stm32_cryp_write(cryp, CRYP_DIN, *src);
821 
822 			src = stm32_cryp_next_in(cryp, src, sizeof(u32));
823 			cryp->total_in -= sizeof(u32);
824 		} else if (!cryp->total_in) {
825 			/* Write padding data */
826 			stm32_cryp_write(cryp, CRYP_DIN, 0);
827 		} else {
828 			/* Write less than an u32 */
829 			memset(d8, 0, sizeof(u32));
830 			for (j = 0; j < cryp->total_in; j++) {
831 				d8[j] = *((u8 *)src);
832 				src = stm32_cryp_next_in(cryp, src, 1);
833 			}
834 
835 			stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
836 			cryp->total_in = 0;
837 		}
838 	}
839 }
840 
841 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
842 {
843 	if (unlikely(!cryp->total_in)) {
844 		dev_warn(cryp->dev, "No more data to process\n");
845 		return;
846 	}
847 
848 	if (is_aes(cryp) && is_ctr(cryp))
849 		stm32_cryp_check_ctr_counter(cryp);
850 
851 	stm32_cryp_irq_write_block(cryp);
852 }
853 
854 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
855 {
856 	struct stm32_cryp *cryp = arg;
857 
858 	if (cryp->irq_status & MISR_OUT)
859 		/* Output FIFO IRQ: read data */
860 		if (unlikely(stm32_cryp_irq_read_data(cryp))) {
861 			/* All bytes processed, finish */
862 			stm32_cryp_write(cryp, CRYP_IMSCR, 0);
863 			stm32_cryp_finish_req(cryp);
864 			return IRQ_HANDLED;
865 		}
866 
867 	if (cryp->irq_status & MISR_IN) {
868 		/* Input FIFO IRQ: write data */
869 		stm32_cryp_irq_write_data(cryp);
870 	}
871 
872 	return IRQ_HANDLED;
873 }
874 
875 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
876 {
877 	struct stm32_cryp *cryp = arg;
878 
879 	cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
880 
881 	return IRQ_WAKE_THREAD;
882 }
883 
884 static struct crypto_alg crypto_algs[] = {
885 {
886 	.cra_name		= "ecb(aes)",
887 	.cra_driver_name	= "stm32-ecb-aes",
888 	.cra_priority		= 200,
889 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
890 				  CRYPTO_ALG_ASYNC,
891 	.cra_blocksize		= AES_BLOCK_SIZE,
892 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
893 	.cra_alignmask		= 0xf,
894 	.cra_type		= &crypto_ablkcipher_type,
895 	.cra_module		= THIS_MODULE,
896 	.cra_init		= stm32_cryp_cra_init,
897 	.cra_ablkcipher = {
898 		.min_keysize	= AES_MIN_KEY_SIZE,
899 		.max_keysize	= AES_MAX_KEY_SIZE,
900 		.setkey		= stm32_cryp_aes_setkey,
901 		.encrypt	= stm32_cryp_aes_ecb_encrypt,
902 		.decrypt	= stm32_cryp_aes_ecb_decrypt,
903 	}
904 },
905 {
906 	.cra_name		= "cbc(aes)",
907 	.cra_driver_name	= "stm32-cbc-aes",
908 	.cra_priority		= 200,
909 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
910 				  CRYPTO_ALG_ASYNC,
911 	.cra_blocksize		= AES_BLOCK_SIZE,
912 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
913 	.cra_alignmask		= 0xf,
914 	.cra_type		= &crypto_ablkcipher_type,
915 	.cra_module		= THIS_MODULE,
916 	.cra_init		= stm32_cryp_cra_init,
917 	.cra_ablkcipher = {
918 		.min_keysize	= AES_MIN_KEY_SIZE,
919 		.max_keysize	= AES_MAX_KEY_SIZE,
920 		.ivsize		= AES_BLOCK_SIZE,
921 		.setkey		= stm32_cryp_aes_setkey,
922 		.encrypt	= stm32_cryp_aes_cbc_encrypt,
923 		.decrypt	= stm32_cryp_aes_cbc_decrypt,
924 	}
925 },
926 {
927 	.cra_name		= "ctr(aes)",
928 	.cra_driver_name	= "stm32-ctr-aes",
929 	.cra_priority		= 200,
930 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
931 				  CRYPTO_ALG_ASYNC,
932 	.cra_blocksize		= 1,
933 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
934 	.cra_alignmask		= 0xf,
935 	.cra_type		= &crypto_ablkcipher_type,
936 	.cra_module		= THIS_MODULE,
937 	.cra_init		= stm32_cryp_cra_init,
938 	.cra_ablkcipher = {
939 		.min_keysize	= AES_MIN_KEY_SIZE,
940 		.max_keysize	= AES_MAX_KEY_SIZE,
941 		.ivsize		= AES_BLOCK_SIZE,
942 		.setkey		= stm32_cryp_aes_setkey,
943 		.encrypt	= stm32_cryp_aes_ctr_encrypt,
944 		.decrypt	= stm32_cryp_aes_ctr_decrypt,
945 	}
946 },
947 {
948 	.cra_name		= "ecb(des)",
949 	.cra_driver_name	= "stm32-ecb-des",
950 	.cra_priority		= 200,
951 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
952 				  CRYPTO_ALG_ASYNC,
953 	.cra_blocksize		= DES_BLOCK_SIZE,
954 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
955 	.cra_alignmask		= 0xf,
956 	.cra_type		= &crypto_ablkcipher_type,
957 	.cra_module		= THIS_MODULE,
958 	.cra_init		= stm32_cryp_cra_init,
959 	.cra_ablkcipher = {
960 		.min_keysize	= DES_BLOCK_SIZE,
961 		.max_keysize	= DES_BLOCK_SIZE,
962 		.setkey		= stm32_cryp_des_setkey,
963 		.encrypt	= stm32_cryp_des_ecb_encrypt,
964 		.decrypt	= stm32_cryp_des_ecb_decrypt,
965 	}
966 },
967 {
968 	.cra_name		= "cbc(des)",
969 	.cra_driver_name	= "stm32-cbc-des",
970 	.cra_priority		= 200,
971 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
972 				  CRYPTO_ALG_ASYNC,
973 	.cra_blocksize		= DES_BLOCK_SIZE,
974 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
975 	.cra_alignmask		= 0xf,
976 	.cra_type		= &crypto_ablkcipher_type,
977 	.cra_module		= THIS_MODULE,
978 	.cra_init		= stm32_cryp_cra_init,
979 	.cra_ablkcipher = {
980 		.min_keysize	= DES_BLOCK_SIZE,
981 		.max_keysize	= DES_BLOCK_SIZE,
982 		.ivsize		= DES_BLOCK_SIZE,
983 		.setkey		= stm32_cryp_des_setkey,
984 		.encrypt	= stm32_cryp_des_cbc_encrypt,
985 		.decrypt	= stm32_cryp_des_cbc_decrypt,
986 	}
987 },
988 {
989 	.cra_name		= "ecb(des3_ede)",
990 	.cra_driver_name	= "stm32-ecb-des3",
991 	.cra_priority		= 200,
992 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
993 				  CRYPTO_ALG_ASYNC,
994 	.cra_blocksize		= DES_BLOCK_SIZE,
995 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
996 	.cra_alignmask		= 0xf,
997 	.cra_type		= &crypto_ablkcipher_type,
998 	.cra_module		= THIS_MODULE,
999 	.cra_init		= stm32_cryp_cra_init,
1000 	.cra_ablkcipher = {
1001 		.min_keysize	= 3 * DES_BLOCK_SIZE,
1002 		.max_keysize	= 3 * DES_BLOCK_SIZE,
1003 		.setkey		= stm32_cryp_tdes_setkey,
1004 		.encrypt	= stm32_cryp_tdes_ecb_encrypt,
1005 		.decrypt	= stm32_cryp_tdes_ecb_decrypt,
1006 	}
1007 },
1008 {
1009 	.cra_name		= "cbc(des3_ede)",
1010 	.cra_driver_name	= "stm32-cbc-des3",
1011 	.cra_priority		= 200,
1012 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER |
1013 				  CRYPTO_ALG_ASYNC,
1014 	.cra_blocksize		= DES_BLOCK_SIZE,
1015 	.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
1016 	.cra_alignmask		= 0xf,
1017 	.cra_type		= &crypto_ablkcipher_type,
1018 	.cra_module		= THIS_MODULE,
1019 	.cra_init		= stm32_cryp_cra_init,
1020 	.cra_ablkcipher = {
1021 		.min_keysize	= 3 * DES_BLOCK_SIZE,
1022 		.max_keysize	= 3 * DES_BLOCK_SIZE,
1023 		.ivsize		= DES_BLOCK_SIZE,
1024 		.setkey		= stm32_cryp_tdes_setkey,
1025 		.encrypt	= stm32_cryp_tdes_cbc_encrypt,
1026 		.decrypt	= stm32_cryp_tdes_cbc_decrypt,
1027 	}
1028 },
1029 };
1030 
1031 static const struct of_device_id stm32_dt_ids[] = {
1032 	{ .compatible = "st,stm32f756-cryp", },
1033 	{},
1034 };
1035 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1036 
1037 static int stm32_cryp_probe(struct platform_device *pdev)
1038 {
1039 	struct device *dev = &pdev->dev;
1040 	struct stm32_cryp *cryp;
1041 	struct resource *res;
1042 	struct reset_control *rst;
1043 	int irq, ret;
1044 
1045 	cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1046 	if (!cryp)
1047 		return -ENOMEM;
1048 
1049 	cryp->dev = dev;
1050 
1051 	mutex_init(&cryp->lock);
1052 
1053 	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1054 	cryp->regs = devm_ioremap_resource(dev, res);
1055 	if (IS_ERR(cryp->regs))
1056 		return PTR_ERR(cryp->regs);
1057 
1058 	irq = platform_get_irq(pdev, 0);
1059 	if (irq < 0) {
1060 		dev_err(dev, "Cannot get IRQ resource\n");
1061 		return irq;
1062 	}
1063 
1064 	ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1065 					stm32_cryp_irq_thread, IRQF_ONESHOT,
1066 					dev_name(dev), cryp);
1067 	if (ret) {
1068 		dev_err(dev, "Cannot grab IRQ\n");
1069 		return ret;
1070 	}
1071 
1072 	cryp->clk = devm_clk_get(dev, NULL);
1073 	if (IS_ERR(cryp->clk)) {
1074 		dev_err(dev, "Could not get clock\n");
1075 		return PTR_ERR(cryp->clk);
1076 	}
1077 
1078 	ret = clk_prepare_enable(cryp->clk);
1079 	if (ret) {
1080 		dev_err(cryp->dev, "Failed to enable clock\n");
1081 		return ret;
1082 	}
1083 
1084 	rst = devm_reset_control_get(dev, NULL);
1085 	if (!IS_ERR(rst)) {
1086 		reset_control_assert(rst);
1087 		udelay(2);
1088 		reset_control_deassert(rst);
1089 	}
1090 
1091 	platform_set_drvdata(pdev, cryp);
1092 
1093 	spin_lock(&cryp_list.lock);
1094 	list_add(&cryp->list, &cryp_list.dev_list);
1095 	spin_unlock(&cryp_list.lock);
1096 
1097 	/* Initialize crypto engine */
1098 	cryp->engine = crypto_engine_alloc_init(dev, 1);
1099 	if (!cryp->engine) {
1100 		dev_err(dev, "Could not init crypto engine\n");
1101 		ret = -ENOMEM;
1102 		goto err_engine1;
1103 	}
1104 
1105 	cryp->engine->prepare_cipher_request = stm32_cryp_prepare_cipher_req;
1106 	cryp->engine->cipher_one_request = stm32_cryp_cipher_one_req;
1107 
1108 	ret = crypto_engine_start(cryp->engine);
1109 	if (ret) {
1110 		dev_err(dev, "Could not start crypto engine\n");
1111 		goto err_engine2;
1112 	}
1113 
1114 	ret = crypto_register_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
1115 	if (ret) {
1116 		dev_err(dev, "Could not register algs\n");
1117 		goto err_algs;
1118 	}
1119 
1120 	dev_info(dev, "Initialized\n");
1121 
1122 	return 0;
1123 
1124 err_algs:
1125 err_engine2:
1126 	crypto_engine_exit(cryp->engine);
1127 err_engine1:
1128 	spin_lock(&cryp_list.lock);
1129 	list_del(&cryp->list);
1130 	spin_unlock(&cryp_list.lock);
1131 
1132 	clk_disable_unprepare(cryp->clk);
1133 
1134 	return ret;
1135 }
1136 
1137 static int stm32_cryp_remove(struct platform_device *pdev)
1138 {
1139 	struct stm32_cryp *cryp = platform_get_drvdata(pdev);
1140 
1141 	if (!cryp)
1142 		return -ENODEV;
1143 
1144 	crypto_unregister_algs(crypto_algs, ARRAY_SIZE(crypto_algs));
1145 
1146 	crypto_engine_exit(cryp->engine);
1147 
1148 	spin_lock(&cryp_list.lock);
1149 	list_del(&cryp->list);
1150 	spin_unlock(&cryp_list.lock);
1151 
1152 	clk_disable_unprepare(cryp->clk);
1153 
1154 	return 0;
1155 }
1156 
1157 static struct platform_driver stm32_cryp_driver = {
1158 	.probe  = stm32_cryp_probe,
1159 	.remove = stm32_cryp_remove,
1160 	.driver = {
1161 		.name           = DRIVER_NAME,
1162 		.of_match_table = stm32_dt_ids,
1163 	},
1164 };
1165 
1166 module_platform_driver(stm32_cryp_driver);
1167 
1168 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
1169 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
1170 MODULE_LICENSE("GPL");
1171