1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (C) STMicroelectronics SA 2017
4  * Author: Fabien Dessenne <fabien.dessenne@st.com>
5  * Ux500 support taken from snippets in the old Ux500 cryp driver
6  */
7 
8 #include <crypto/aes.h>
9 #include <crypto/engine.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/des.h>
12 #include <crypto/internal/skcipher.h>
13 #include <crypto/scatterwalk.h>
14 #include <linux/bottom_half.h>
15 #include <linux/clk.h>
16 #include <linux/delay.h>
17 #include <linux/err.h>
18 #include <linux/iopoll.h>
19 #include <linux/interrupt.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/of.h>
23 #include <linux/platform_device.h>
24 #include <linux/pm_runtime.h>
25 #include <linux/reset.h>
26 #include <linux/string.h>
27 
28 #define DRIVER_NAME             "stm32-cryp"
29 
30 /* Bit [0] encrypt / decrypt */
31 #define FLG_ENCRYPT             BIT(0)
32 /* Bit [8..1] algo & operation mode */
33 #define FLG_AES                 BIT(1)
34 #define FLG_DES                 BIT(2)
35 #define FLG_TDES                BIT(3)
36 #define FLG_ECB                 BIT(4)
37 #define FLG_CBC                 BIT(5)
38 #define FLG_CTR                 BIT(6)
39 #define FLG_GCM                 BIT(7)
40 #define FLG_CCM                 BIT(8)
41 /* Mode mask = bits [15..0] */
42 #define FLG_MODE_MASK           GENMASK(15, 0)
43 /* Bit [31..16] status  */
44 
45 /* Registers */
46 #define CRYP_CR                 0x00000000
47 #define CRYP_SR                 0x00000004
48 #define CRYP_DIN                0x00000008
49 #define CRYP_DOUT               0x0000000C
50 #define CRYP_DMACR              0x00000010
51 #define CRYP_IMSCR              0x00000014
52 #define CRYP_RISR               0x00000018
53 #define CRYP_MISR               0x0000001C
54 #define CRYP_K0LR               0x00000020
55 #define CRYP_K0RR               0x00000024
56 #define CRYP_K1LR               0x00000028
57 #define CRYP_K1RR               0x0000002C
58 #define CRYP_K2LR               0x00000030
59 #define CRYP_K2RR               0x00000034
60 #define CRYP_K3LR               0x00000038
61 #define CRYP_K3RR               0x0000003C
62 #define CRYP_IV0LR              0x00000040
63 #define CRYP_IV0RR              0x00000044
64 #define CRYP_IV1LR              0x00000048
65 #define CRYP_IV1RR              0x0000004C
66 #define CRYP_CSGCMCCM0R         0x00000050
67 #define CRYP_CSGCM0R            0x00000070
68 
69 #define UX500_CRYP_CR		0x00000000
70 #define UX500_CRYP_SR		0x00000004
71 #define UX500_CRYP_DIN		0x00000008
72 #define UX500_CRYP_DINSIZE	0x0000000C
73 #define UX500_CRYP_DOUT		0x00000010
74 #define UX500_CRYP_DOUSIZE	0x00000014
75 #define UX500_CRYP_DMACR	0x00000018
76 #define UX500_CRYP_IMSC		0x0000001C
77 #define UX500_CRYP_RIS		0x00000020
78 #define UX500_CRYP_MIS		0x00000024
79 #define UX500_CRYP_K1L		0x00000028
80 #define UX500_CRYP_K1R		0x0000002C
81 #define UX500_CRYP_K2L		0x00000030
82 #define UX500_CRYP_K2R		0x00000034
83 #define UX500_CRYP_K3L		0x00000038
84 #define UX500_CRYP_K3R		0x0000003C
85 #define UX500_CRYP_K4L		0x00000040
86 #define UX500_CRYP_K4R		0x00000044
87 #define UX500_CRYP_IV0L		0x00000048
88 #define UX500_CRYP_IV0R		0x0000004C
89 #define UX500_CRYP_IV1L		0x00000050
90 #define UX500_CRYP_IV1R		0x00000054
91 
92 /* Registers values */
93 #define CR_DEC_NOT_ENC          0x00000004
94 #define CR_TDES_ECB             0x00000000
95 #define CR_TDES_CBC             0x00000008
96 #define CR_DES_ECB              0x00000010
97 #define CR_DES_CBC              0x00000018
98 #define CR_AES_ECB              0x00000020
99 #define CR_AES_CBC              0x00000028
100 #define CR_AES_CTR              0x00000030
101 #define CR_AES_KP               0x00000038 /* Not on Ux500 */
102 #define CR_AES_XTS              0x00000038 /* Only on Ux500 */
103 #define CR_AES_GCM              0x00080000
104 #define CR_AES_CCM              0x00080008
105 #define CR_AES_UNKNOWN          0xFFFFFFFF
106 #define CR_ALGO_MASK            0x00080038
107 #define CR_DATA32               0x00000000
108 #define CR_DATA16               0x00000040
109 #define CR_DATA8                0x00000080
110 #define CR_DATA1                0x000000C0
111 #define CR_KEY128               0x00000000
112 #define CR_KEY192               0x00000100
113 #define CR_KEY256               0x00000200
114 #define CR_KEYRDEN              0x00000400 /* Only on Ux500 */
115 #define CR_KSE                  0x00000800 /* Only on Ux500 */
116 #define CR_FFLUSH               0x00004000
117 #define CR_CRYPEN               0x00008000
118 #define CR_PH_INIT              0x00000000
119 #define CR_PH_HEADER            0x00010000
120 #define CR_PH_PAYLOAD           0x00020000
121 #define CR_PH_FINAL             0x00030000
122 #define CR_PH_MASK              0x00030000
123 #define CR_NBPBL_SHIFT          20
124 
125 #define SR_BUSY                 0x00000010
126 #define SR_OFNE                 0x00000004
127 
128 #define IMSCR_IN                BIT(0)
129 #define IMSCR_OUT               BIT(1)
130 
131 #define MISR_IN                 BIT(0)
132 #define MISR_OUT                BIT(1)
133 
134 /* Misc */
135 #define AES_BLOCK_32            (AES_BLOCK_SIZE / sizeof(u32))
136 #define GCM_CTR_INIT            2
137 #define CRYP_AUTOSUSPEND_DELAY	50
138 
139 struct stm32_cryp_caps {
140 	bool			aeads_support;
141 	bool			linear_aes_key;
142 	bool			kp_mode;
143 	bool			iv_protection;
144 	bool			swap_final;
145 	bool			padding_wa;
146 	u32			cr;
147 	u32			sr;
148 	u32			din;
149 	u32			dout;
150 	u32			imsc;
151 	u32			mis;
152 	u32			k1l;
153 	u32			k1r;
154 	u32			k3r;
155 	u32			iv0l;
156 	u32			iv0r;
157 	u32			iv1l;
158 	u32			iv1r;
159 };
160 
161 struct stm32_cryp_ctx {
162 	struct stm32_cryp       *cryp;
163 	int                     keylen;
164 	__be32                  key[AES_KEYSIZE_256 / sizeof(u32)];
165 	unsigned long           flags;
166 };
167 
168 struct stm32_cryp_reqctx {
169 	unsigned long mode;
170 };
171 
172 struct stm32_cryp {
173 	struct list_head        list;
174 	struct device           *dev;
175 	void __iomem            *regs;
176 	struct clk              *clk;
177 	unsigned long           flags;
178 	u32                     irq_status;
179 	const struct stm32_cryp_caps *caps;
180 	struct stm32_cryp_ctx   *ctx;
181 
182 	struct crypto_engine    *engine;
183 
184 	struct skcipher_request *req;
185 	struct aead_request     *areq;
186 
187 	size_t                  authsize;
188 	size_t                  hw_blocksize;
189 
190 	size_t                  payload_in;
191 	size_t                  header_in;
192 	size_t                  payload_out;
193 
194 	struct scatterlist      *out_sg;
195 
196 	struct scatter_walk     in_walk;
197 	struct scatter_walk     out_walk;
198 
199 	__be32                  last_ctr[4];
200 	u32                     gcm_ctr;
201 };
202 
203 struct stm32_cryp_list {
204 	struct list_head        dev_list;
205 	spinlock_t              lock; /* protect dev_list */
206 };
207 
208 static struct stm32_cryp_list cryp_list = {
209 	.dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
210 	.lock     = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
211 };
212 
is_aes(struct stm32_cryp * cryp)213 static inline bool is_aes(struct stm32_cryp *cryp)
214 {
215 	return cryp->flags & FLG_AES;
216 }
217 
is_des(struct stm32_cryp * cryp)218 static inline bool is_des(struct stm32_cryp *cryp)
219 {
220 	return cryp->flags & FLG_DES;
221 }
222 
is_tdes(struct stm32_cryp * cryp)223 static inline bool is_tdes(struct stm32_cryp *cryp)
224 {
225 	return cryp->flags & FLG_TDES;
226 }
227 
is_ecb(struct stm32_cryp * cryp)228 static inline bool is_ecb(struct stm32_cryp *cryp)
229 {
230 	return cryp->flags & FLG_ECB;
231 }
232 
is_cbc(struct stm32_cryp * cryp)233 static inline bool is_cbc(struct stm32_cryp *cryp)
234 {
235 	return cryp->flags & FLG_CBC;
236 }
237 
is_ctr(struct stm32_cryp * cryp)238 static inline bool is_ctr(struct stm32_cryp *cryp)
239 {
240 	return cryp->flags & FLG_CTR;
241 }
242 
is_gcm(struct stm32_cryp * cryp)243 static inline bool is_gcm(struct stm32_cryp *cryp)
244 {
245 	return cryp->flags & FLG_GCM;
246 }
247 
is_ccm(struct stm32_cryp * cryp)248 static inline bool is_ccm(struct stm32_cryp *cryp)
249 {
250 	return cryp->flags & FLG_CCM;
251 }
252 
is_encrypt(struct stm32_cryp * cryp)253 static inline bool is_encrypt(struct stm32_cryp *cryp)
254 {
255 	return cryp->flags & FLG_ENCRYPT;
256 }
257 
is_decrypt(struct stm32_cryp * cryp)258 static inline bool is_decrypt(struct stm32_cryp *cryp)
259 {
260 	return !is_encrypt(cryp);
261 }
262 
stm32_cryp_read(struct stm32_cryp * cryp,u32 ofst)263 static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
264 {
265 	return readl_relaxed(cryp->regs + ofst);
266 }
267 
stm32_cryp_write(struct stm32_cryp * cryp,u32 ofst,u32 val)268 static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
269 {
270 	writel_relaxed(val, cryp->regs + ofst);
271 }
272 
stm32_cryp_wait_busy(struct stm32_cryp * cryp)273 static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
274 {
275 	u32 status;
276 
277 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
278 			!(status & SR_BUSY), 10, 100000);
279 }
280 
stm32_cryp_enable(struct stm32_cryp * cryp)281 static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
282 {
283 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
284 		       cryp->regs + cryp->caps->cr);
285 }
286 
stm32_cryp_wait_enable(struct stm32_cryp * cryp)287 static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
288 {
289 	u32 status;
290 
291 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
292 			!(status & CR_CRYPEN), 10, 100000);
293 }
294 
stm32_cryp_wait_output(struct stm32_cryp * cryp)295 static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
296 {
297 	u32 status;
298 
299 	return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
300 			status & SR_OFNE, 10, 100000);
301 }
302 
stm32_cryp_key_read_enable(struct stm32_cryp * cryp)303 static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
304 {
305 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
306 		       cryp->regs + cryp->caps->cr);
307 }
308 
stm32_cryp_key_read_disable(struct stm32_cryp * cryp)309 static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
310 {
311 	writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
312 		       cryp->regs + cryp->caps->cr);
313 }
314 
315 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
316 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
317 
stm32_cryp_find_dev(struct stm32_cryp_ctx * ctx)318 static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
319 {
320 	struct stm32_cryp *tmp, *cryp = NULL;
321 
322 	spin_lock_bh(&cryp_list.lock);
323 	if (!ctx->cryp) {
324 		list_for_each_entry(tmp, &cryp_list.dev_list, list) {
325 			cryp = tmp;
326 			break;
327 		}
328 		ctx->cryp = cryp;
329 	} else {
330 		cryp = ctx->cryp;
331 	}
332 
333 	spin_unlock_bh(&cryp_list.lock);
334 
335 	return cryp;
336 }
337 
stm32_cryp_hw_write_iv(struct stm32_cryp * cryp,__be32 * iv)338 static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
339 {
340 	if (!iv)
341 		return;
342 
343 	stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
344 	stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
345 
346 	if (is_aes(cryp)) {
347 		stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
348 		stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
349 	}
350 }
351 
stm32_cryp_get_iv(struct stm32_cryp * cryp)352 static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
353 {
354 	struct skcipher_request *req = cryp->req;
355 	__be32 *tmp = (void *)req->iv;
356 
357 	if (!tmp)
358 		return;
359 
360 	if (cryp->caps->iv_protection)
361 		stm32_cryp_key_read_enable(cryp);
362 
363 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
364 	*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
365 
366 	if (is_aes(cryp)) {
367 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
368 		*tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
369 	}
370 
371 	if (cryp->caps->iv_protection)
372 		stm32_cryp_key_read_disable(cryp);
373 }
374 
375 /**
376  * ux500_swap_bits_in_byte() - mirror the bits in a byte
377  * @b: the byte to be mirrored
378  *
379  * The bits are swapped the following way:
380  *  Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
381  *  nibble 2 (n2) bits 4-7.
382  *
383  *  Nibble 1 (n1):
384  *  (The "old" (moved) bit is replaced with a zero)
385  *  1. Move bit 6 and 7, 4 positions to the left.
386  *  2. Move bit 3 and 5, 2 positions to the left.
387  *  3. Move bit 1-4, 1 position to the left.
388  *
389  *  Nibble 2 (n2):
390  *  1. Move bit 0 and 1, 4 positions to the right.
391  *  2. Move bit 2 and 4, 2 positions to the right.
392  *  3. Move bit 3-6, 1 position to the right.
393  *
394  *  Combine the two nibbles to a complete and swapped byte.
395  */
ux500_swap_bits_in_byte(u8 b)396 static inline u8 ux500_swap_bits_in_byte(u8 b)
397 {
398 #define R_SHIFT_4_MASK  0xc0 /* Bits 6 and 7, right shift 4 */
399 #define R_SHIFT_2_MASK  0x28 /* (After right shift 4) Bits 3 and 5,
400 				  right shift 2 */
401 #define R_SHIFT_1_MASK  0x1e /* (After right shift 2) Bits 1-4,
402 				  right shift 1 */
403 #define L_SHIFT_4_MASK  0x03 /* Bits 0 and 1, left shift 4 */
404 #define L_SHIFT_2_MASK  0x14 /* (After left shift 4) Bits 2 and 4,
405 				  left shift 2 */
406 #define L_SHIFT_1_MASK  0x78 /* (After left shift 1) Bits 3-6,
407 				  left shift 1 */
408 
409 	u8 n1;
410 	u8 n2;
411 
412 	/* Swap most significant nibble */
413 	/* Right shift 4, bits 6 and 7 */
414 	n1 = ((b  & R_SHIFT_4_MASK) >> 4) | (b  & ~(R_SHIFT_4_MASK >> 4));
415 	/* Right shift 2, bits 3 and 5 */
416 	n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
417 	/* Right shift 1, bits 1-4 */
418 	n1 = (n1  & R_SHIFT_1_MASK) >> 1;
419 
420 	/* Swap least significant nibble */
421 	/* Left shift 4, bits 0 and 1 */
422 	n2 = ((b  & L_SHIFT_4_MASK) << 4) | (b  & ~(L_SHIFT_4_MASK << 4));
423 	/* Left shift 2, bits 2 and 4 */
424 	n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
425 	/* Left shift 1, bits 3-6 */
426 	n2 = (n2  & L_SHIFT_1_MASK) << 1;
427 
428 	return n1 | n2;
429 }
430 
431 /**
432  * ux500_swizzle_key() - Shuffle around words and bits in the AES key
433  * @in: key to swizzle
434  * @out: swizzled key
435  * @len: length of key, in bytes
436  *
437  * This "key swizzling procedure" is described in the examples in the
438  * DB8500 design specification. There is no real description of why
439  * the bits have been arranged like this in the hardware.
440  */
ux500_swizzle_key(const u8 * in,u8 * out,u32 len)441 static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
442 {
443 	int i = 0;
444 	int bpw = sizeof(u32);
445 	int j;
446 	int index = 0;
447 
448 	j = len - bpw;
449 	while (j >= 0) {
450 		for (i = 0; i < bpw; i++) {
451 			index = len - j - bpw + i;
452 			out[j + i] =
453 				ux500_swap_bits_in_byte(in[index]);
454 		}
455 		j -= bpw;
456 	}
457 }
458 
stm32_cryp_hw_write_key(struct stm32_cryp * c)459 static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
460 {
461 	unsigned int i;
462 	int r_id;
463 
464 	if (is_des(c)) {
465 		stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
466 		stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
467 		return;
468 	}
469 
470 	/*
471 	 * On the Ux500 the AES key is considered as a single bit sequence
472 	 * of 128, 192 or 256 bits length. It is written linearly into the
473 	 * registers from K1L and down, and need to be processed to become
474 	 * a proper big-endian bit sequence.
475 	 */
476 	if (is_aes(c) && c->caps->linear_aes_key) {
477 		u32 tmpkey[8];
478 
479 		ux500_swizzle_key((u8 *)c->ctx->key,
480 				  (u8 *)tmpkey, c->ctx->keylen);
481 
482 		r_id = c->caps->k1l;
483 		for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
484 			stm32_cryp_write(c, r_id, tmpkey[i]);
485 
486 		return;
487 	}
488 
489 	r_id = c->caps->k3r;
490 	for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
491 		stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
492 }
493 
stm32_cryp_get_hw_mode(struct stm32_cryp * cryp)494 static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
495 {
496 	if (is_aes(cryp) && is_ecb(cryp))
497 		return CR_AES_ECB;
498 
499 	if (is_aes(cryp) && is_cbc(cryp))
500 		return CR_AES_CBC;
501 
502 	if (is_aes(cryp) && is_ctr(cryp))
503 		return CR_AES_CTR;
504 
505 	if (is_aes(cryp) && is_gcm(cryp))
506 		return CR_AES_GCM;
507 
508 	if (is_aes(cryp) && is_ccm(cryp))
509 		return CR_AES_CCM;
510 
511 	if (is_des(cryp) && is_ecb(cryp))
512 		return CR_DES_ECB;
513 
514 	if (is_des(cryp) && is_cbc(cryp))
515 		return CR_DES_CBC;
516 
517 	if (is_tdes(cryp) && is_ecb(cryp))
518 		return CR_TDES_ECB;
519 
520 	if (is_tdes(cryp) && is_cbc(cryp))
521 		return CR_TDES_CBC;
522 
523 	dev_err(cryp->dev, "Unknown mode\n");
524 	return CR_AES_UNKNOWN;
525 }
526 
stm32_cryp_get_input_text_len(struct stm32_cryp * cryp)527 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
528 {
529 	return is_encrypt(cryp) ? cryp->areq->cryptlen :
530 				  cryp->areq->cryptlen - cryp->authsize;
531 }
532 
stm32_cryp_gcm_init(struct stm32_cryp * cryp,u32 cfg)533 static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
534 {
535 	int ret;
536 	__be32 iv[4];
537 
538 	/* Phase 1 : init */
539 	memcpy(iv, cryp->areq->iv, 12);
540 	iv[3] = cpu_to_be32(GCM_CTR_INIT);
541 	cryp->gcm_ctr = GCM_CTR_INIT;
542 	stm32_cryp_hw_write_iv(cryp, iv);
543 
544 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
545 
546 	/* Wait for end of processing */
547 	ret = stm32_cryp_wait_enable(cryp);
548 	if (ret) {
549 		dev_err(cryp->dev, "Timeout (gcm init)\n");
550 		return ret;
551 	}
552 
553 	/* Prepare next phase */
554 	if (cryp->areq->assoclen) {
555 		cfg |= CR_PH_HEADER;
556 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
557 	} else if (stm32_cryp_get_input_text_len(cryp)) {
558 		cfg |= CR_PH_PAYLOAD;
559 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
560 	}
561 
562 	return 0;
563 }
564 
stm32_crypt_gcmccm_end_header(struct stm32_cryp * cryp)565 static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
566 {
567 	u32 cfg;
568 	int err;
569 
570 	/* Check if whole header written */
571 	if (!cryp->header_in) {
572 		/* Wait for completion */
573 		err = stm32_cryp_wait_busy(cryp);
574 		if (err) {
575 			dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
576 			stm32_cryp_write(cryp, cryp->caps->imsc, 0);
577 			stm32_cryp_finish_req(cryp, err);
578 			return;
579 		}
580 
581 		if (stm32_cryp_get_input_text_len(cryp)) {
582 			/* Phase 3 : payload */
583 			cfg = stm32_cryp_read(cryp, cryp->caps->cr);
584 			cfg &= ~CR_CRYPEN;
585 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
586 
587 			cfg &= ~CR_PH_MASK;
588 			cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
589 			stm32_cryp_write(cryp, cryp->caps->cr, cfg);
590 		} else {
591 			/*
592 			 * Phase 4 : tag.
593 			 * Nothing to read, nothing to write, caller have to
594 			 * end request
595 			 */
596 		}
597 	}
598 }
599 
stm32_cryp_write_ccm_first_header(struct stm32_cryp * cryp)600 static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
601 {
602 	size_t written;
603 	size_t len;
604 	u32 alen = cryp->areq->assoclen;
605 	u32 block[AES_BLOCK_32] = {0};
606 	u8 *b8 = (u8 *)block;
607 
608 	if (alen <= 65280) {
609 		/* Write first u32 of B1 */
610 		b8[0] = (alen >> 8) & 0xFF;
611 		b8[1] = alen & 0xFF;
612 		len = 2;
613 	} else {
614 		/* Build the two first u32 of B1 */
615 		b8[0] = 0xFF;
616 		b8[1] = 0xFE;
617 		b8[2] = (alen & 0xFF000000) >> 24;
618 		b8[3] = (alen & 0x00FF0000) >> 16;
619 		b8[4] = (alen & 0x0000FF00) >> 8;
620 		b8[5] = alen & 0x000000FF;
621 		len = 6;
622 	}
623 
624 	written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
625 
626 	scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
627 
628 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
629 
630 	cryp->header_in -= written;
631 
632 	stm32_crypt_gcmccm_end_header(cryp);
633 }
634 
stm32_cryp_ccm_init(struct stm32_cryp * cryp,u32 cfg)635 static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
636 {
637 	int ret;
638 	u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
639 	u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
640 	__be32 *bd;
641 	u32 *d;
642 	unsigned int i, textlen;
643 
644 	/* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
645 	memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
646 	memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
647 	iv[AES_BLOCK_SIZE - 1] = 1;
648 	stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
649 
650 	/* Build B0 */
651 	memcpy(b0, iv, AES_BLOCK_SIZE);
652 
653 	b0[0] |= (8 * ((cryp->authsize - 2) / 2));
654 
655 	if (cryp->areq->assoclen)
656 		b0[0] |= 0x40;
657 
658 	textlen = stm32_cryp_get_input_text_len(cryp);
659 
660 	b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
661 	b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
662 
663 	/* Enable HW */
664 	stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
665 
666 	/* Write B0 */
667 	d = (u32 *)b0;
668 	bd = (__be32 *)b0;
669 
670 	for (i = 0; i < AES_BLOCK_32; i++) {
671 		u32 xd = d[i];
672 
673 		if (!cryp->caps->padding_wa)
674 			xd = be32_to_cpu(bd[i]);
675 		stm32_cryp_write(cryp, cryp->caps->din, xd);
676 	}
677 
678 	/* Wait for end of processing */
679 	ret = stm32_cryp_wait_enable(cryp);
680 	if (ret) {
681 		dev_err(cryp->dev, "Timeout (ccm init)\n");
682 		return ret;
683 	}
684 
685 	/* Prepare next phase */
686 	if (cryp->areq->assoclen) {
687 		cfg |= CR_PH_HEADER | CR_CRYPEN;
688 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
689 
690 		/* Write first (special) block (may move to next phase [payload]) */
691 		stm32_cryp_write_ccm_first_header(cryp);
692 	} else if (stm32_cryp_get_input_text_len(cryp)) {
693 		cfg |= CR_PH_PAYLOAD;
694 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
695 	}
696 
697 	return 0;
698 }
699 
stm32_cryp_hw_init(struct stm32_cryp * cryp)700 static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
701 {
702 	int ret;
703 	u32 cfg, hw_mode;
704 
705 	pm_runtime_get_sync(cryp->dev);
706 
707 	/* Disable interrupt */
708 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
709 
710 	/* Set configuration */
711 	cfg = CR_DATA8 | CR_FFLUSH;
712 
713 	switch (cryp->ctx->keylen) {
714 	case AES_KEYSIZE_128:
715 		cfg |= CR_KEY128;
716 		break;
717 
718 	case AES_KEYSIZE_192:
719 		cfg |= CR_KEY192;
720 		break;
721 
722 	default:
723 	case AES_KEYSIZE_256:
724 		cfg |= CR_KEY256;
725 		break;
726 	}
727 
728 	hw_mode = stm32_cryp_get_hw_mode(cryp);
729 	if (hw_mode == CR_AES_UNKNOWN)
730 		return -EINVAL;
731 
732 	/* AES ECB/CBC decrypt: run key preparation first */
733 	if (is_decrypt(cryp) &&
734 	    ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
735 		/* Configure in key preparation mode */
736 		if (cryp->caps->kp_mode)
737 			stm32_cryp_write(cryp, cryp->caps->cr,
738 				cfg | CR_AES_KP);
739 		else
740 			stm32_cryp_write(cryp,
741 				cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
742 
743 		/* Set key only after full configuration done */
744 		stm32_cryp_hw_write_key(cryp);
745 
746 		/* Start prepare key */
747 		stm32_cryp_enable(cryp);
748 		/* Wait for end of processing */
749 		ret = stm32_cryp_wait_busy(cryp);
750 		if (ret) {
751 			dev_err(cryp->dev, "Timeout (key preparation)\n");
752 			return ret;
753 		}
754 
755 		cfg |= hw_mode | CR_DEC_NOT_ENC;
756 
757 		/* Apply updated config (Decrypt + algo) and flush */
758 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
759 	} else {
760 		cfg |= hw_mode;
761 		if (is_decrypt(cryp))
762 			cfg |= CR_DEC_NOT_ENC;
763 
764 		/* Apply config and flush */
765 		stm32_cryp_write(cryp, cryp->caps->cr, cfg);
766 
767 		/* Set key only after configuration done */
768 		stm32_cryp_hw_write_key(cryp);
769 	}
770 
771 	switch (hw_mode) {
772 	case CR_AES_GCM:
773 	case CR_AES_CCM:
774 		/* Phase 1 : init */
775 		if (hw_mode == CR_AES_CCM)
776 			ret = stm32_cryp_ccm_init(cryp, cfg);
777 		else
778 			ret = stm32_cryp_gcm_init(cryp, cfg);
779 
780 		if (ret)
781 			return ret;
782 
783 		break;
784 
785 	case CR_DES_CBC:
786 	case CR_TDES_CBC:
787 	case CR_AES_CBC:
788 	case CR_AES_CTR:
789 		stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
790 		break;
791 
792 	default:
793 		break;
794 	}
795 
796 	/* Enable now */
797 	stm32_cryp_enable(cryp);
798 
799 	return 0;
800 }
801 
stm32_cryp_finish_req(struct stm32_cryp * cryp,int err)802 static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
803 {
804 	if (!err && (is_gcm(cryp) || is_ccm(cryp)))
805 		/* Phase 4 : output tag */
806 		err = stm32_cryp_read_auth_tag(cryp);
807 
808 	if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
809 		stm32_cryp_get_iv(cryp);
810 
811 	pm_runtime_mark_last_busy(cryp->dev);
812 	pm_runtime_put_autosuspend(cryp->dev);
813 
814 	if (is_gcm(cryp) || is_ccm(cryp))
815 		crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
816 	else
817 		crypto_finalize_skcipher_request(cryp->engine, cryp->req,
818 						   err);
819 }
820 
stm32_cryp_cpu_start(struct stm32_cryp * cryp)821 static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
822 {
823 	/* Enable interrupt and let the IRQ handler do everything */
824 	stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
825 
826 	return 0;
827 }
828 
829 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
830 
stm32_cryp_init_tfm(struct crypto_skcipher * tfm)831 static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
832 {
833 	crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
834 
835 	return 0;
836 }
837 
838 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
839 
stm32_cryp_aes_aead_init(struct crypto_aead * tfm)840 static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
841 {
842 	tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
843 
844 	return 0;
845 }
846 
stm32_cryp_crypt(struct skcipher_request * req,unsigned long mode)847 static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
848 {
849 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
850 			crypto_skcipher_reqtfm(req));
851 	struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
852 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
853 
854 	if (!cryp)
855 		return -ENODEV;
856 
857 	rctx->mode = mode;
858 
859 	return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
860 }
861 
stm32_cryp_aead_crypt(struct aead_request * req,unsigned long mode)862 static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
863 {
864 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
865 	struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
866 	struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
867 
868 	if (!cryp)
869 		return -ENODEV;
870 
871 	rctx->mode = mode;
872 
873 	return crypto_transfer_aead_request_to_engine(cryp->engine, req);
874 }
875 
stm32_cryp_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)876 static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
877 			     unsigned int keylen)
878 {
879 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
880 
881 	memcpy(ctx->key, key, keylen);
882 	ctx->keylen = keylen;
883 
884 	return 0;
885 }
886 
stm32_cryp_aes_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)887 static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
888 				 unsigned int keylen)
889 {
890 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
891 	    keylen != AES_KEYSIZE_256)
892 		return -EINVAL;
893 	else
894 		return stm32_cryp_setkey(tfm, key, keylen);
895 }
896 
stm32_cryp_des_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)897 static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
898 				 unsigned int keylen)
899 {
900 	return verify_skcipher_des_key(tfm, key) ?:
901 	       stm32_cryp_setkey(tfm, key, keylen);
902 }
903 
stm32_cryp_tdes_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)904 static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
905 				  unsigned int keylen)
906 {
907 	return verify_skcipher_des3_key(tfm, key) ?:
908 	       stm32_cryp_setkey(tfm, key, keylen);
909 }
910 
stm32_cryp_aes_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)911 static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
912 				      unsigned int keylen)
913 {
914 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
915 
916 	if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
917 	    keylen != AES_KEYSIZE_256)
918 		return -EINVAL;
919 
920 	memcpy(ctx->key, key, keylen);
921 	ctx->keylen = keylen;
922 
923 	return 0;
924 }
925 
stm32_cryp_aes_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)926 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
927 					  unsigned int authsize)
928 {
929 	switch (authsize) {
930 	case 4:
931 	case 8:
932 	case 12:
933 	case 13:
934 	case 14:
935 	case 15:
936 	case 16:
937 		break;
938 	default:
939 		return -EINVAL;
940 	}
941 
942 	return 0;
943 }
944 
stm32_cryp_aes_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)945 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
946 					  unsigned int authsize)
947 {
948 	switch (authsize) {
949 	case 4:
950 	case 6:
951 	case 8:
952 	case 10:
953 	case 12:
954 	case 14:
955 	case 16:
956 		break;
957 	default:
958 		return -EINVAL;
959 	}
960 
961 	return 0;
962 }
963 
stm32_cryp_aes_ecb_encrypt(struct skcipher_request * req)964 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
965 {
966 	if (req->cryptlen % AES_BLOCK_SIZE)
967 		return -EINVAL;
968 
969 	if (req->cryptlen == 0)
970 		return 0;
971 
972 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
973 }
974 
stm32_cryp_aes_ecb_decrypt(struct skcipher_request * req)975 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
976 {
977 	if (req->cryptlen % AES_BLOCK_SIZE)
978 		return -EINVAL;
979 
980 	if (req->cryptlen == 0)
981 		return 0;
982 
983 	return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
984 }
985 
stm32_cryp_aes_cbc_encrypt(struct skcipher_request * req)986 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
987 {
988 	if (req->cryptlen % AES_BLOCK_SIZE)
989 		return -EINVAL;
990 
991 	if (req->cryptlen == 0)
992 		return 0;
993 
994 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
995 }
996 
stm32_cryp_aes_cbc_decrypt(struct skcipher_request * req)997 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
998 {
999 	if (req->cryptlen % AES_BLOCK_SIZE)
1000 		return -EINVAL;
1001 
1002 	if (req->cryptlen == 0)
1003 		return 0;
1004 
1005 	return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1006 }
1007 
stm32_cryp_aes_ctr_encrypt(struct skcipher_request * req)1008 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1009 {
1010 	if (req->cryptlen == 0)
1011 		return 0;
1012 
1013 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1014 }
1015 
stm32_cryp_aes_ctr_decrypt(struct skcipher_request * req)1016 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1017 {
1018 	if (req->cryptlen == 0)
1019 		return 0;
1020 
1021 	return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1022 }
1023 
stm32_cryp_aes_gcm_encrypt(struct aead_request * req)1024 static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1025 {
1026 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1027 }
1028 
stm32_cryp_aes_gcm_decrypt(struct aead_request * req)1029 static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1030 {
1031 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1032 }
1033 
crypto_ccm_check_iv(const u8 * iv)1034 static inline int crypto_ccm_check_iv(const u8 *iv)
1035 {
1036 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
1037 	if (iv[0] < 1 || iv[0] > 7)
1038 		return -EINVAL;
1039 
1040 	return 0;
1041 }
1042 
stm32_cryp_aes_ccm_encrypt(struct aead_request * req)1043 static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1044 {
1045 	int err;
1046 
1047 	err = crypto_ccm_check_iv(req->iv);
1048 	if (err)
1049 		return err;
1050 
1051 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1052 }
1053 
stm32_cryp_aes_ccm_decrypt(struct aead_request * req)1054 static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1055 {
1056 	int err;
1057 
1058 	err = crypto_ccm_check_iv(req->iv);
1059 	if (err)
1060 		return err;
1061 
1062 	return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1063 }
1064 
stm32_cryp_des_ecb_encrypt(struct skcipher_request * req)1065 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1066 {
1067 	if (req->cryptlen % DES_BLOCK_SIZE)
1068 		return -EINVAL;
1069 
1070 	if (req->cryptlen == 0)
1071 		return 0;
1072 
1073 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1074 }
1075 
stm32_cryp_des_ecb_decrypt(struct skcipher_request * req)1076 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1077 {
1078 	if (req->cryptlen % DES_BLOCK_SIZE)
1079 		return -EINVAL;
1080 
1081 	if (req->cryptlen == 0)
1082 		return 0;
1083 
1084 	return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1085 }
1086 
stm32_cryp_des_cbc_encrypt(struct skcipher_request * req)1087 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1088 {
1089 	if (req->cryptlen % DES_BLOCK_SIZE)
1090 		return -EINVAL;
1091 
1092 	if (req->cryptlen == 0)
1093 		return 0;
1094 
1095 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1096 }
1097 
stm32_cryp_des_cbc_decrypt(struct skcipher_request * req)1098 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1099 {
1100 	if (req->cryptlen % DES_BLOCK_SIZE)
1101 		return -EINVAL;
1102 
1103 	if (req->cryptlen == 0)
1104 		return 0;
1105 
1106 	return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1107 }
1108 
stm32_cryp_tdes_ecb_encrypt(struct skcipher_request * req)1109 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1110 {
1111 	if (req->cryptlen % DES_BLOCK_SIZE)
1112 		return -EINVAL;
1113 
1114 	if (req->cryptlen == 0)
1115 		return 0;
1116 
1117 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1118 }
1119 
stm32_cryp_tdes_ecb_decrypt(struct skcipher_request * req)1120 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1121 {
1122 	if (req->cryptlen % DES_BLOCK_SIZE)
1123 		return -EINVAL;
1124 
1125 	if (req->cryptlen == 0)
1126 		return 0;
1127 
1128 	return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1129 }
1130 
stm32_cryp_tdes_cbc_encrypt(struct skcipher_request * req)1131 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1132 {
1133 	if (req->cryptlen % DES_BLOCK_SIZE)
1134 		return -EINVAL;
1135 
1136 	if (req->cryptlen == 0)
1137 		return 0;
1138 
1139 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1140 }
1141 
stm32_cryp_tdes_cbc_decrypt(struct skcipher_request * req)1142 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1143 {
1144 	if (req->cryptlen % DES_BLOCK_SIZE)
1145 		return -EINVAL;
1146 
1147 	if (req->cryptlen == 0)
1148 		return 0;
1149 
1150 	return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1151 }
1152 
stm32_cryp_prepare_req(struct skcipher_request * req,struct aead_request * areq)1153 static int stm32_cryp_prepare_req(struct skcipher_request *req,
1154 				  struct aead_request *areq)
1155 {
1156 	struct stm32_cryp_ctx *ctx;
1157 	struct stm32_cryp *cryp;
1158 	struct stm32_cryp_reqctx *rctx;
1159 	struct scatterlist *in_sg;
1160 	int ret;
1161 
1162 	if (!req && !areq)
1163 		return -EINVAL;
1164 
1165 	ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1166 		    crypto_aead_ctx(crypto_aead_reqtfm(areq));
1167 
1168 	cryp = ctx->cryp;
1169 
1170 	rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1171 	rctx->mode &= FLG_MODE_MASK;
1172 
1173 	ctx->cryp = cryp;
1174 
1175 	cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1176 	cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1177 	cryp->ctx = ctx;
1178 
1179 	if (req) {
1180 		cryp->req = req;
1181 		cryp->areq = NULL;
1182 		cryp->header_in = 0;
1183 		cryp->payload_in = req->cryptlen;
1184 		cryp->payload_out = req->cryptlen;
1185 		cryp->authsize = 0;
1186 	} else {
1187 		/*
1188 		 * Length of input and output data:
1189 		 * Encryption case:
1190 		 *  INPUT  = AssocData   ||     PlainText
1191 		 *          <- assoclen ->  <- cryptlen ->
1192 		 *
1193 		 *  OUTPUT = AssocData    ||   CipherText   ||      AuthTag
1194 		 *          <- assoclen ->  <-- cryptlen -->  <- authsize ->
1195 		 *
1196 		 * Decryption case:
1197 		 *  INPUT  =  AssocData     ||    CipherTex   ||       AuthTag
1198 		 *          <- assoclen --->  <---------- cryptlen ---------->
1199 		 *
1200 		 *  OUTPUT = AssocData    ||               PlainText
1201 		 *          <- assoclen ->  <- cryptlen - authsize ->
1202 		 */
1203 		cryp->areq = areq;
1204 		cryp->req = NULL;
1205 		cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1206 		if (is_encrypt(cryp)) {
1207 			cryp->payload_in = areq->cryptlen;
1208 			cryp->header_in = areq->assoclen;
1209 			cryp->payload_out = areq->cryptlen;
1210 		} else {
1211 			cryp->payload_in = areq->cryptlen - cryp->authsize;
1212 			cryp->header_in = areq->assoclen;
1213 			cryp->payload_out = cryp->payload_in;
1214 		}
1215 	}
1216 
1217 	in_sg = req ? req->src : areq->src;
1218 	scatterwalk_start(&cryp->in_walk, in_sg);
1219 
1220 	cryp->out_sg = req ? req->dst : areq->dst;
1221 	scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1222 
1223 	if (is_gcm(cryp) || is_ccm(cryp)) {
1224 		/* In output, jump after assoc data */
1225 		scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1226 	}
1227 
1228 	if (is_ctr(cryp))
1229 		memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1230 
1231 	ret = stm32_cryp_hw_init(cryp);
1232 	return ret;
1233 }
1234 
stm32_cryp_cipher_one_req(struct crypto_engine * engine,void * areq)1235 static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1236 {
1237 	struct skcipher_request *req = container_of(areq,
1238 						      struct skcipher_request,
1239 						      base);
1240 	struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1241 			crypto_skcipher_reqtfm(req));
1242 	struct stm32_cryp *cryp = ctx->cryp;
1243 
1244 	if (!cryp)
1245 		return -ENODEV;
1246 
1247 	return stm32_cryp_prepare_req(req, NULL) ?:
1248 	       stm32_cryp_cpu_start(cryp);
1249 }
1250 
stm32_cryp_aead_one_req(struct crypto_engine * engine,void * areq)1251 static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1252 {
1253 	struct aead_request *req = container_of(areq, struct aead_request,
1254 						base);
1255 	struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1256 	struct stm32_cryp *cryp = ctx->cryp;
1257 	int err;
1258 
1259 	if (!cryp)
1260 		return -ENODEV;
1261 
1262 	err = stm32_cryp_prepare_req(NULL, req);
1263 	if (err)
1264 		return err;
1265 
1266 	if (unlikely(!cryp->payload_in && !cryp->header_in)) {
1267 		/* No input data to process: get tag and finish */
1268 		stm32_cryp_finish_req(cryp, 0);
1269 		return 0;
1270 	}
1271 
1272 	return stm32_cryp_cpu_start(cryp);
1273 }
1274 
stm32_cryp_read_auth_tag(struct stm32_cryp * cryp)1275 static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1276 {
1277 	u32 cfg, size_bit;
1278 	unsigned int i;
1279 	int ret = 0;
1280 
1281 	/* Update Config */
1282 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1283 
1284 	cfg &= ~CR_PH_MASK;
1285 	cfg |= CR_PH_FINAL;
1286 	cfg &= ~CR_DEC_NOT_ENC;
1287 	cfg |= CR_CRYPEN;
1288 
1289 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1290 
1291 	if (is_gcm(cryp)) {
1292 		/* GCM: write aad and payload size (in bits) */
1293 		size_bit = cryp->areq->assoclen * 8;
1294 		if (cryp->caps->swap_final)
1295 			size_bit = (__force u32)cpu_to_be32(size_bit);
1296 
1297 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1298 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1299 
1300 		size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1301 				cryp->areq->cryptlen - cryp->authsize;
1302 		size_bit *= 8;
1303 		if (cryp->caps->swap_final)
1304 			size_bit = (__force u32)cpu_to_be32(size_bit);
1305 
1306 		stm32_cryp_write(cryp, cryp->caps->din, 0);
1307 		stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1308 	} else {
1309 		/* CCM: write CTR0 */
1310 		u32 iv32[AES_BLOCK_32];
1311 		u8 *iv = (u8 *)iv32;
1312 		__be32 *biv = (__be32 *)iv32;
1313 
1314 		memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1315 		memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1316 
1317 		for (i = 0; i < AES_BLOCK_32; i++) {
1318 			u32 xiv = iv32[i];
1319 
1320 			if (!cryp->caps->padding_wa)
1321 				xiv = be32_to_cpu(biv[i]);
1322 			stm32_cryp_write(cryp, cryp->caps->din, xiv);
1323 		}
1324 	}
1325 
1326 	/* Wait for output data */
1327 	ret = stm32_cryp_wait_output(cryp);
1328 	if (ret) {
1329 		dev_err(cryp->dev, "Timeout (read tag)\n");
1330 		return ret;
1331 	}
1332 
1333 	if (is_encrypt(cryp)) {
1334 		u32 out_tag[AES_BLOCK_32];
1335 
1336 		/* Get and write tag */
1337 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1338 		scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1339 	} else {
1340 		/* Get and check tag */
1341 		u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1342 
1343 		scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1344 		readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1345 
1346 		if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1347 			ret = -EBADMSG;
1348 	}
1349 
1350 	/* Disable cryp */
1351 	cfg &= ~CR_CRYPEN;
1352 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1353 
1354 	return ret;
1355 }
1356 
stm32_cryp_check_ctr_counter(struct stm32_cryp * cryp)1357 static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1358 {
1359 	u32 cr;
1360 
1361 	if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1362 		/*
1363 		 * In this case, we need to increment manually the ctr counter,
1364 		 * as HW doesn't handle the U32 carry.
1365 		 */
1366 		crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1367 
1368 		cr = stm32_cryp_read(cryp, cryp->caps->cr);
1369 		stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1370 
1371 		stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1372 
1373 		stm32_cryp_write(cryp, cryp->caps->cr, cr);
1374 	}
1375 
1376 	/* The IV registers are BE  */
1377 	cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1378 	cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1379 	cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1380 	cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1381 }
1382 
stm32_cryp_irq_read_data(struct stm32_cryp * cryp)1383 static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1384 {
1385 	u32 block[AES_BLOCK_32];
1386 
1387 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1388 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1389 							     cryp->payload_out), 1);
1390 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1391 				   cryp->payload_out);
1392 }
1393 
stm32_cryp_irq_write_block(struct stm32_cryp * cryp)1394 static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1395 {
1396 	u32 block[AES_BLOCK_32] = {0};
1397 
1398 	scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1399 							    cryp->payload_in), 0);
1400 	writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1401 	cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1402 }
1403 
stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp * cryp)1404 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1405 {
1406 	int err;
1407 	u32 cfg, block[AES_BLOCK_32] = {0};
1408 	unsigned int i;
1409 
1410 	/* 'Special workaround' procedure described in the datasheet */
1411 
1412 	/* a) disable ip */
1413 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1414 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1415 	cfg &= ~CR_CRYPEN;
1416 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1417 
1418 	/* b) Update IV1R */
1419 	stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1420 
1421 	/* c) change mode to CTR */
1422 	cfg &= ~CR_ALGO_MASK;
1423 	cfg |= CR_AES_CTR;
1424 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1425 
1426 	/* a) enable IP */
1427 	cfg |= CR_CRYPEN;
1428 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1429 
1430 	/* b) pad and write the last block */
1431 	stm32_cryp_irq_write_block(cryp);
1432 	/* wait end of process */
1433 	err = stm32_cryp_wait_output(cryp);
1434 	if (err) {
1435 		dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1436 		return stm32_cryp_finish_req(cryp, err);
1437 	}
1438 
1439 	/* c) get and store encrypted data */
1440 	/*
1441 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1442 	 * block value
1443 	 */
1444 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1445 
1446 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1447 							     cryp->payload_out), 1);
1448 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1449 				   cryp->payload_out);
1450 
1451 	/* d) change mode back to AES GCM */
1452 	cfg &= ~CR_ALGO_MASK;
1453 	cfg |= CR_AES_GCM;
1454 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1455 
1456 	/* e) change phase to Final */
1457 	cfg &= ~CR_PH_MASK;
1458 	cfg |= CR_PH_FINAL;
1459 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1460 
1461 	/* f) write padded data */
1462 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
1463 
1464 	/* g) Empty fifo out */
1465 	err = stm32_cryp_wait_output(cryp);
1466 	if (err) {
1467 		dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
1468 		return stm32_cryp_finish_req(cryp, err);
1469 	}
1470 
1471 	for (i = 0; i < AES_BLOCK_32; i++)
1472 		stm32_cryp_read(cryp, cryp->caps->dout);
1473 
1474 	/* h) run the he normal Final phase */
1475 	stm32_cryp_finish_req(cryp, 0);
1476 }
1477 
stm32_cryp_irq_set_npblb(struct stm32_cryp * cryp)1478 static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1479 {
1480 	u32 cfg;
1481 
1482 	/* disable ip, set NPBLB and reneable ip */
1483 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1484 	cfg &= ~CR_CRYPEN;
1485 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1486 
1487 	cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
1488 	cfg |= CR_CRYPEN;
1489 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1490 }
1491 
stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp * cryp)1492 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1493 {
1494 	int err = 0;
1495 	u32 cfg, iv1tmp;
1496 	u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
1497 	u32 block[AES_BLOCK_32] = {0};
1498 	unsigned int i;
1499 
1500 	/* 'Special workaround' procedure described in the datasheet */
1501 
1502 	/* a) disable ip */
1503 	stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1504 
1505 	cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1506 	cfg &= ~CR_CRYPEN;
1507 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1508 
1509 	/* b) get IV1 from CRYP_CSGCMCCM7 */
1510 	iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1511 
1512 	/* c) Load CRYP_CSGCMCCMxR */
1513 	for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1514 		cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1515 
1516 	/* d) Write IV1R */
1517 	stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
1518 
1519 	/* e) change mode to CTR */
1520 	cfg &= ~CR_ALGO_MASK;
1521 	cfg |= CR_AES_CTR;
1522 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1523 
1524 	/* a) enable IP */
1525 	cfg |= CR_CRYPEN;
1526 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1527 
1528 	/* b) pad and write the last block */
1529 	stm32_cryp_irq_write_block(cryp);
1530 	/* wait end of process */
1531 	err = stm32_cryp_wait_output(cryp);
1532 	if (err) {
1533 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
1534 		return stm32_cryp_finish_req(cryp, err);
1535 	}
1536 
1537 	/* c) get and store decrypted data */
1538 	/*
1539 	 * Same code as stm32_cryp_irq_read_data(), but we want to store
1540 	 * block value
1541 	 */
1542 	readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1543 
1544 	scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1545 							     cryp->payload_out), 1);
1546 	cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
1547 
1548 	/* d) Load again CRYP_CSGCMCCMxR */
1549 	for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1550 		cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1551 
1552 	/* e) change mode back to AES CCM */
1553 	cfg &= ~CR_ALGO_MASK;
1554 	cfg |= CR_AES_CCM;
1555 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1556 
1557 	/* f) change phase to header */
1558 	cfg &= ~CR_PH_MASK;
1559 	cfg |= CR_PH_HEADER;
1560 	stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1561 
1562 	/* g) XOR and write padded data */
1563 	for (i = 0; i < ARRAY_SIZE(block); i++) {
1564 		block[i] ^= cstmp1[i];
1565 		block[i] ^= cstmp2[i];
1566 		stm32_cryp_write(cryp, cryp->caps->din, block[i]);
1567 	}
1568 
1569 	/* h) wait for completion */
1570 	err = stm32_cryp_wait_busy(cryp);
1571 	if (err)
1572 		dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
1573 
1574 	/* i) run the he normal Final phase */
1575 	stm32_cryp_finish_req(cryp, err);
1576 }
1577 
stm32_cryp_irq_write_data(struct stm32_cryp * cryp)1578 static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1579 {
1580 	if (unlikely(!cryp->payload_in)) {
1581 		dev_warn(cryp->dev, "No more data to process\n");
1582 		return;
1583 	}
1584 
1585 	if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
1586 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1587 		     is_encrypt(cryp))) {
1588 		/* Padding for AES GCM encryption */
1589 		if (cryp->caps->padding_wa) {
1590 			/* Special case 1 */
1591 			stm32_cryp_irq_write_gcm_padded_data(cryp);
1592 			return;
1593 		}
1594 
1595 		/* Setting padding bytes (NBBLB) */
1596 		stm32_cryp_irq_set_npblb(cryp);
1597 	}
1598 
1599 	if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
1600 		     (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1601 		     is_decrypt(cryp))) {
1602 		/* Padding for AES CCM decryption */
1603 		if (cryp->caps->padding_wa) {
1604 			/* Special case 2 */
1605 			stm32_cryp_irq_write_ccm_padded_data(cryp);
1606 			return;
1607 		}
1608 
1609 		/* Setting padding bytes (NBBLB) */
1610 		stm32_cryp_irq_set_npblb(cryp);
1611 	}
1612 
1613 	if (is_aes(cryp) && is_ctr(cryp))
1614 		stm32_cryp_check_ctr_counter(cryp);
1615 
1616 	stm32_cryp_irq_write_block(cryp);
1617 }
1618 
stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp * cryp)1619 static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
1620 {
1621 	u32 block[AES_BLOCK_32] = {0};
1622 	size_t written;
1623 
1624 	written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
1625 
1626 	scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
1627 
1628 	writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
1629 
1630 	cryp->header_in -= written;
1631 
1632 	stm32_crypt_gcmccm_end_header(cryp);
1633 }
1634 
stm32_cryp_irq_thread(int irq,void * arg)1635 static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1636 {
1637 	struct stm32_cryp *cryp = arg;
1638 	u32 ph;
1639 	u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
1640 
1641 	if (cryp->irq_status & MISR_OUT)
1642 		/* Output FIFO IRQ: read data */
1643 		stm32_cryp_irq_read_data(cryp);
1644 
1645 	if (cryp->irq_status & MISR_IN) {
1646 		if (is_gcm(cryp) || is_ccm(cryp)) {
1647 			ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
1648 			if (unlikely(ph == CR_PH_HEADER))
1649 				/* Write Header */
1650 				stm32_cryp_irq_write_gcmccm_header(cryp);
1651 			else
1652 				/* Input FIFO IRQ: write data */
1653 				stm32_cryp_irq_write_data(cryp);
1654 			if (is_gcm(cryp))
1655 				cryp->gcm_ctr++;
1656 		} else {
1657 			/* Input FIFO IRQ: write data */
1658 			stm32_cryp_irq_write_data(cryp);
1659 		}
1660 	}
1661 
1662 	/* Mask useless interrupts */
1663 	if (!cryp->payload_in && !cryp->header_in)
1664 		it_mask &= ~IMSCR_IN;
1665 	if (!cryp->payload_out)
1666 		it_mask &= ~IMSCR_OUT;
1667 	stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
1668 
1669 	if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) {
1670 		local_bh_disable();
1671 		stm32_cryp_finish_req(cryp, 0);
1672 		local_bh_enable();
1673 	}
1674 
1675 	return IRQ_HANDLED;
1676 }
1677 
stm32_cryp_irq(int irq,void * arg)1678 static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1679 {
1680 	struct stm32_cryp *cryp = arg;
1681 
1682 	cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
1683 
1684 	return IRQ_WAKE_THREAD;
1685 }
1686 
1687 static struct skcipher_engine_alg crypto_algs[] = {
1688 {
1689 	.base = {
1690 		.base.cra_name		= "ecb(aes)",
1691 		.base.cra_driver_name	= "stm32-ecb-aes",
1692 		.base.cra_priority	= 200,
1693 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1694 		.base.cra_blocksize	= AES_BLOCK_SIZE,
1695 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1696 		.base.cra_alignmask	= 0,
1697 		.base.cra_module	= THIS_MODULE,
1698 
1699 		.init			= stm32_cryp_init_tfm,
1700 		.min_keysize		= AES_MIN_KEY_SIZE,
1701 		.max_keysize		= AES_MAX_KEY_SIZE,
1702 		.setkey			= stm32_cryp_aes_setkey,
1703 		.encrypt		= stm32_cryp_aes_ecb_encrypt,
1704 		.decrypt		= stm32_cryp_aes_ecb_decrypt,
1705 	},
1706 	.op = {
1707 		.do_one_request = stm32_cryp_cipher_one_req,
1708 	},
1709 },
1710 {
1711 	.base = {
1712 		.base.cra_name		= "cbc(aes)",
1713 		.base.cra_driver_name	= "stm32-cbc-aes",
1714 		.base.cra_priority	= 200,
1715 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1716 		.base.cra_blocksize	= AES_BLOCK_SIZE,
1717 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1718 		.base.cra_alignmask	= 0,
1719 		.base.cra_module	= THIS_MODULE,
1720 
1721 		.init			= stm32_cryp_init_tfm,
1722 		.min_keysize		= AES_MIN_KEY_SIZE,
1723 		.max_keysize		= AES_MAX_KEY_SIZE,
1724 		.ivsize			= AES_BLOCK_SIZE,
1725 		.setkey			= stm32_cryp_aes_setkey,
1726 		.encrypt		= stm32_cryp_aes_cbc_encrypt,
1727 		.decrypt		= stm32_cryp_aes_cbc_decrypt,
1728 	},
1729 	.op = {
1730 		.do_one_request = stm32_cryp_cipher_one_req,
1731 	},
1732 },
1733 {
1734 	.base = {
1735 		.base.cra_name		= "ctr(aes)",
1736 		.base.cra_driver_name	= "stm32-ctr-aes",
1737 		.base.cra_priority	= 200,
1738 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1739 		.base.cra_blocksize	= 1,
1740 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1741 		.base.cra_alignmask	= 0,
1742 		.base.cra_module	= THIS_MODULE,
1743 
1744 		.init			= stm32_cryp_init_tfm,
1745 		.min_keysize		= AES_MIN_KEY_SIZE,
1746 		.max_keysize		= AES_MAX_KEY_SIZE,
1747 		.ivsize			= AES_BLOCK_SIZE,
1748 		.setkey			= stm32_cryp_aes_setkey,
1749 		.encrypt		= stm32_cryp_aes_ctr_encrypt,
1750 		.decrypt		= stm32_cryp_aes_ctr_decrypt,
1751 	},
1752 	.op = {
1753 		.do_one_request = stm32_cryp_cipher_one_req,
1754 	},
1755 },
1756 {
1757 	.base = {
1758 		.base.cra_name		= "ecb(des)",
1759 		.base.cra_driver_name	= "stm32-ecb-des",
1760 		.base.cra_priority	= 200,
1761 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1762 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1763 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1764 		.base.cra_alignmask	= 0,
1765 		.base.cra_module	= THIS_MODULE,
1766 
1767 		.init			= stm32_cryp_init_tfm,
1768 		.min_keysize		= DES_BLOCK_SIZE,
1769 		.max_keysize		= DES_BLOCK_SIZE,
1770 		.setkey			= stm32_cryp_des_setkey,
1771 		.encrypt		= stm32_cryp_des_ecb_encrypt,
1772 		.decrypt		= stm32_cryp_des_ecb_decrypt,
1773 	},
1774 	.op = {
1775 		.do_one_request = stm32_cryp_cipher_one_req,
1776 	},
1777 },
1778 {
1779 	.base = {
1780 		.base.cra_name		= "cbc(des)",
1781 		.base.cra_driver_name	= "stm32-cbc-des",
1782 		.base.cra_priority	= 200,
1783 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1784 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1785 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1786 		.base.cra_alignmask	= 0,
1787 		.base.cra_module	= THIS_MODULE,
1788 
1789 		.init			= stm32_cryp_init_tfm,
1790 		.min_keysize		= DES_BLOCK_SIZE,
1791 		.max_keysize		= DES_BLOCK_SIZE,
1792 		.ivsize			= DES_BLOCK_SIZE,
1793 		.setkey			= stm32_cryp_des_setkey,
1794 		.encrypt		= stm32_cryp_des_cbc_encrypt,
1795 		.decrypt		= stm32_cryp_des_cbc_decrypt,
1796 	},
1797 	.op = {
1798 		.do_one_request = stm32_cryp_cipher_one_req,
1799 	},
1800 },
1801 {
1802 	.base = {
1803 		.base.cra_name		= "ecb(des3_ede)",
1804 		.base.cra_driver_name	= "stm32-ecb-des3",
1805 		.base.cra_priority	= 200,
1806 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1807 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1808 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1809 		.base.cra_alignmask	= 0,
1810 		.base.cra_module	= THIS_MODULE,
1811 
1812 		.init			= stm32_cryp_init_tfm,
1813 		.min_keysize		= 3 * DES_BLOCK_SIZE,
1814 		.max_keysize		= 3 * DES_BLOCK_SIZE,
1815 		.setkey			= stm32_cryp_tdes_setkey,
1816 		.encrypt		= stm32_cryp_tdes_ecb_encrypt,
1817 		.decrypt		= stm32_cryp_tdes_ecb_decrypt,
1818 	},
1819 	.op = {
1820 		.do_one_request = stm32_cryp_cipher_one_req,
1821 	},
1822 },
1823 {
1824 	.base = {
1825 		.base.cra_name		= "cbc(des3_ede)",
1826 		.base.cra_driver_name	= "stm32-cbc-des3",
1827 		.base.cra_priority	= 200,
1828 		.base.cra_flags		= CRYPTO_ALG_ASYNC,
1829 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1830 		.base.cra_ctxsize	= sizeof(struct stm32_cryp_ctx),
1831 		.base.cra_alignmask	= 0,
1832 		.base.cra_module	= THIS_MODULE,
1833 
1834 		.init			= stm32_cryp_init_tfm,
1835 		.min_keysize		= 3 * DES_BLOCK_SIZE,
1836 		.max_keysize		= 3 * DES_BLOCK_SIZE,
1837 		.ivsize			= DES_BLOCK_SIZE,
1838 		.setkey			= stm32_cryp_tdes_setkey,
1839 		.encrypt		= stm32_cryp_tdes_cbc_encrypt,
1840 		.decrypt		= stm32_cryp_tdes_cbc_decrypt,
1841 	},
1842 	.op = {
1843 		.do_one_request = stm32_cryp_cipher_one_req,
1844 	},
1845 },
1846 };
1847 
1848 static struct aead_engine_alg aead_algs[] = {
1849 {
1850 	.base.setkey		= stm32_cryp_aes_aead_setkey,
1851 	.base.setauthsize	= stm32_cryp_aes_gcm_setauthsize,
1852 	.base.encrypt		= stm32_cryp_aes_gcm_encrypt,
1853 	.base.decrypt		= stm32_cryp_aes_gcm_decrypt,
1854 	.base.init		= stm32_cryp_aes_aead_init,
1855 	.base.ivsize		= 12,
1856 	.base.maxauthsize	= AES_BLOCK_SIZE,
1857 
1858 	.base.base = {
1859 		.cra_name		= "gcm(aes)",
1860 		.cra_driver_name	= "stm32-gcm-aes",
1861 		.cra_priority		= 200,
1862 		.cra_flags		= CRYPTO_ALG_ASYNC,
1863 		.cra_blocksize		= 1,
1864 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
1865 		.cra_alignmask		= 0,
1866 		.cra_module		= THIS_MODULE,
1867 	},
1868 	.op = {
1869 		.do_one_request = stm32_cryp_aead_one_req,
1870 	},
1871 },
1872 {
1873 	.base.setkey		= stm32_cryp_aes_aead_setkey,
1874 	.base.setauthsize	= stm32_cryp_aes_ccm_setauthsize,
1875 	.base.encrypt		= stm32_cryp_aes_ccm_encrypt,
1876 	.base.decrypt		= stm32_cryp_aes_ccm_decrypt,
1877 	.base.init		= stm32_cryp_aes_aead_init,
1878 	.base.ivsize		= AES_BLOCK_SIZE,
1879 	.base.maxauthsize	= AES_BLOCK_SIZE,
1880 
1881 	.base.base = {
1882 		.cra_name		= "ccm(aes)",
1883 		.cra_driver_name	= "stm32-ccm-aes",
1884 		.cra_priority		= 200,
1885 		.cra_flags		= CRYPTO_ALG_ASYNC,
1886 		.cra_blocksize		= 1,
1887 		.cra_ctxsize		= sizeof(struct stm32_cryp_ctx),
1888 		.cra_alignmask		= 0,
1889 		.cra_module		= THIS_MODULE,
1890 	},
1891 	.op = {
1892 		.do_one_request = stm32_cryp_aead_one_req,
1893 	},
1894 },
1895 };
1896 
1897 static const struct stm32_cryp_caps ux500_data = {
1898 	.aeads_support = false,
1899 	.linear_aes_key = true,
1900 	.kp_mode = false,
1901 	.iv_protection = true,
1902 	.swap_final = true,
1903 	.padding_wa = true,
1904 	.cr = UX500_CRYP_CR,
1905 	.sr = UX500_CRYP_SR,
1906 	.din = UX500_CRYP_DIN,
1907 	.dout = UX500_CRYP_DOUT,
1908 	.imsc = UX500_CRYP_IMSC,
1909 	.mis = UX500_CRYP_MIS,
1910 	.k1l = UX500_CRYP_K1L,
1911 	.k1r = UX500_CRYP_K1R,
1912 	.k3r = UX500_CRYP_K3R,
1913 	.iv0l = UX500_CRYP_IV0L,
1914 	.iv0r = UX500_CRYP_IV0R,
1915 	.iv1l = UX500_CRYP_IV1L,
1916 	.iv1r = UX500_CRYP_IV1R,
1917 };
1918 
1919 static const struct stm32_cryp_caps f7_data = {
1920 	.aeads_support = true,
1921 	.linear_aes_key = false,
1922 	.kp_mode = true,
1923 	.iv_protection = false,
1924 	.swap_final = true,
1925 	.padding_wa = true,
1926 	.cr = CRYP_CR,
1927 	.sr = CRYP_SR,
1928 	.din = CRYP_DIN,
1929 	.dout = CRYP_DOUT,
1930 	.imsc = CRYP_IMSCR,
1931 	.mis = CRYP_MISR,
1932 	.k1l = CRYP_K1LR,
1933 	.k1r = CRYP_K1RR,
1934 	.k3r = CRYP_K3RR,
1935 	.iv0l = CRYP_IV0LR,
1936 	.iv0r = CRYP_IV0RR,
1937 	.iv1l = CRYP_IV1LR,
1938 	.iv1r = CRYP_IV1RR,
1939 };
1940 
1941 static const struct stm32_cryp_caps mp1_data = {
1942 	.aeads_support = true,
1943 	.linear_aes_key = false,
1944 	.kp_mode = true,
1945 	.iv_protection = false,
1946 	.swap_final = false,
1947 	.padding_wa = false,
1948 	.cr = CRYP_CR,
1949 	.sr = CRYP_SR,
1950 	.din = CRYP_DIN,
1951 	.dout = CRYP_DOUT,
1952 	.imsc = CRYP_IMSCR,
1953 	.mis = CRYP_MISR,
1954 	.k1l = CRYP_K1LR,
1955 	.k1r = CRYP_K1RR,
1956 	.k3r = CRYP_K3RR,
1957 	.iv0l = CRYP_IV0LR,
1958 	.iv0r = CRYP_IV0RR,
1959 	.iv1l = CRYP_IV1LR,
1960 	.iv1r = CRYP_IV1RR,
1961 };
1962 
1963 static const struct of_device_id stm32_dt_ids[] = {
1964 	{ .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
1965 	{ .compatible = "st,stm32f756-cryp", .data = &f7_data},
1966 	{ .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1967 	{},
1968 };
1969 MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1970 
stm32_cryp_probe(struct platform_device * pdev)1971 static int stm32_cryp_probe(struct platform_device *pdev)
1972 {
1973 	struct device *dev = &pdev->dev;
1974 	struct stm32_cryp *cryp;
1975 	struct reset_control *rst;
1976 	int irq, ret;
1977 
1978 	cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1979 	if (!cryp)
1980 		return -ENOMEM;
1981 
1982 	cryp->caps = of_device_get_match_data(dev);
1983 	if (!cryp->caps)
1984 		return -ENODEV;
1985 
1986 	cryp->dev = dev;
1987 
1988 	cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1989 	if (IS_ERR(cryp->regs))
1990 		return PTR_ERR(cryp->regs);
1991 
1992 	irq = platform_get_irq(pdev, 0);
1993 	if (irq < 0)
1994 		return irq;
1995 
1996 	ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1997 					stm32_cryp_irq_thread, IRQF_ONESHOT,
1998 					dev_name(dev), cryp);
1999 	if (ret) {
2000 		dev_err(dev, "Cannot grab IRQ\n");
2001 		return ret;
2002 	}
2003 
2004 	cryp->clk = devm_clk_get(dev, NULL);
2005 	if (IS_ERR(cryp->clk)) {
2006 		dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2007 
2008 		return PTR_ERR(cryp->clk);
2009 	}
2010 
2011 	ret = clk_prepare_enable(cryp->clk);
2012 	if (ret) {
2013 		dev_err(cryp->dev, "Failed to enable clock\n");
2014 		return ret;
2015 	}
2016 
2017 	pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2018 	pm_runtime_use_autosuspend(dev);
2019 
2020 	pm_runtime_get_noresume(dev);
2021 	pm_runtime_set_active(dev);
2022 	pm_runtime_enable(dev);
2023 
2024 	rst = devm_reset_control_get(dev, NULL);
2025 	if (IS_ERR(rst)) {
2026 		ret = PTR_ERR(rst);
2027 		if (ret == -EPROBE_DEFER)
2028 			goto err_rst;
2029 	} else {
2030 		reset_control_assert(rst);
2031 		udelay(2);
2032 		reset_control_deassert(rst);
2033 	}
2034 
2035 	platform_set_drvdata(pdev, cryp);
2036 
2037 	spin_lock(&cryp_list.lock);
2038 	list_add(&cryp->list, &cryp_list.dev_list);
2039 	spin_unlock(&cryp_list.lock);
2040 
2041 	/* Initialize crypto engine */
2042 	cryp->engine = crypto_engine_alloc_init(dev, 1);
2043 	if (!cryp->engine) {
2044 		dev_err(dev, "Could not init crypto engine\n");
2045 		ret = -ENOMEM;
2046 		goto err_engine1;
2047 	}
2048 
2049 	ret = crypto_engine_start(cryp->engine);
2050 	if (ret) {
2051 		dev_err(dev, "Could not start crypto engine\n");
2052 		goto err_engine2;
2053 	}
2054 
2055 	ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2056 	if (ret) {
2057 		dev_err(dev, "Could not register algs\n");
2058 		goto err_algs;
2059 	}
2060 
2061 	if (cryp->caps->aeads_support) {
2062 		ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2063 		if (ret)
2064 			goto err_aead_algs;
2065 	}
2066 
2067 	dev_info(dev, "Initialized\n");
2068 
2069 	pm_runtime_put_sync(dev);
2070 
2071 	return 0;
2072 
2073 err_aead_algs:
2074 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2075 err_algs:
2076 err_engine2:
2077 	crypto_engine_exit(cryp->engine);
2078 err_engine1:
2079 	spin_lock(&cryp_list.lock);
2080 	list_del(&cryp->list);
2081 	spin_unlock(&cryp_list.lock);
2082 err_rst:
2083 	pm_runtime_disable(dev);
2084 	pm_runtime_put_noidle(dev);
2085 
2086 	clk_disable_unprepare(cryp->clk);
2087 
2088 	return ret;
2089 }
2090 
stm32_cryp_remove(struct platform_device * pdev)2091 static int stm32_cryp_remove(struct platform_device *pdev)
2092 {
2093 	struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2094 	int ret;
2095 
2096 	if (!cryp)
2097 		return -ENODEV;
2098 
2099 	ret = pm_runtime_resume_and_get(cryp->dev);
2100 	if (ret < 0)
2101 		return ret;
2102 
2103 	if (cryp->caps->aeads_support)
2104 		crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2105 	crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2106 
2107 	crypto_engine_exit(cryp->engine);
2108 
2109 	spin_lock(&cryp_list.lock);
2110 	list_del(&cryp->list);
2111 	spin_unlock(&cryp_list.lock);
2112 
2113 	pm_runtime_disable(cryp->dev);
2114 	pm_runtime_put_noidle(cryp->dev);
2115 
2116 	clk_disable_unprepare(cryp->clk);
2117 
2118 	return 0;
2119 }
2120 
2121 #ifdef CONFIG_PM
stm32_cryp_runtime_suspend(struct device * dev)2122 static int stm32_cryp_runtime_suspend(struct device *dev)
2123 {
2124 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2125 
2126 	clk_disable_unprepare(cryp->clk);
2127 
2128 	return 0;
2129 }
2130 
stm32_cryp_runtime_resume(struct device * dev)2131 static int stm32_cryp_runtime_resume(struct device *dev)
2132 {
2133 	struct stm32_cryp *cryp = dev_get_drvdata(dev);
2134 	int ret;
2135 
2136 	ret = clk_prepare_enable(cryp->clk);
2137 	if (ret) {
2138 		dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2139 		return ret;
2140 	}
2141 
2142 	return 0;
2143 }
2144 #endif
2145 
2146 static const struct dev_pm_ops stm32_cryp_pm_ops = {
2147 	SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2148 				pm_runtime_force_resume)
2149 	SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2150 			   stm32_cryp_runtime_resume, NULL)
2151 };
2152 
2153 static struct platform_driver stm32_cryp_driver = {
2154 	.probe  = stm32_cryp_probe,
2155 	.remove = stm32_cryp_remove,
2156 	.driver = {
2157 		.name           = DRIVER_NAME,
2158 		.pm		= &stm32_cryp_pm_ops,
2159 		.of_match_table = stm32_dt_ids,
2160 	},
2161 };
2162 
2163 module_platform_driver(stm32_cryp_driver);
2164 
2165 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2166 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2167 MODULE_LICENSE("GPL");
2168