1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * sun8i-ss-core.c - hardware cryptographic offloader for
4  * Allwinner A80/A83T SoC
5  *
6  * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com>
7  *
8  * Core file which registers crypto algorithms supported by the SecuritySystem
9  *
10  * You could find a link for the datasheet in Documentation/arm/sunxi.rst
11  */
12 #include <linux/clk.h>
13 #include <linux/crypto.h>
14 #include <linux/delay.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/interrupt.h>
17 #include <linux/io.h>
18 #include <linux/irq.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/of.h>
22 #include <linux/of_device.h>
23 #include <linux/platform_device.h>
24 #include <linux/pm_runtime.h>
25 #include <linux/reset.h>
26 #include <crypto/internal/rng.h>
27 #include <crypto/internal/skcipher.h>
28 
29 #include "sun8i-ss.h"
30 
31 static const struct ss_variant ss_a80_variant = {
32 	.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
33 	},
34 	.alg_hash = { SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP,
35 	},
36 	.op_mode = { SS_OP_ECB, SS_OP_CBC,
37 	},
38 	.ss_clks = {
39 		{ "bus", 0, 300 * 1000 * 1000 },
40 		{ "mod", 0, 300 * 1000 * 1000 },
41 	}
42 };
43 
44 static const struct ss_variant ss_a83t_variant = {
45 	.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
46 	},
47 	.alg_hash = { SS_ALG_MD5, SS_ALG_SHA1, SS_ALG_SHA224, SS_ALG_SHA256,
48 	},
49 	.op_mode = { SS_OP_ECB, SS_OP_CBC,
50 	},
51 	.ss_clks = {
52 		{ "bus", 0, 300 * 1000 * 1000 },
53 		{ "mod", 0, 300 * 1000 * 1000 },
54 	}
55 };
56 
57 /*
58  * sun8i_ss_get_engine_number() get the next channel slot
59  * This is a simple round-robin way of getting the next channel
60  */
61 int sun8i_ss_get_engine_number(struct sun8i_ss_dev *ss)
62 {
63 	return atomic_inc_return(&ss->flow) % MAXFLOW;
64 }
65 
66 int sun8i_ss_run_task(struct sun8i_ss_dev *ss, struct sun8i_cipher_req_ctx *rctx,
67 		      const char *name)
68 {
69 	int flow = rctx->flow;
70 	unsigned int ivlen = rctx->ivlen;
71 	u32 v = SS_START;
72 	int i;
73 
74 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
75 	ss->flows[flow].stat_req++;
76 #endif
77 
78 	/* choose between stream0/stream1 */
79 	if (flow)
80 		v |= SS_FLOW1;
81 	else
82 		v |= SS_FLOW0;
83 
84 	v |= rctx->op_mode;
85 	v |= rctx->method;
86 
87 	if (rctx->op_dir)
88 		v |= SS_DECRYPTION;
89 
90 	switch (rctx->keylen) {
91 	case 128 / 8:
92 		v |= SS_AES_128BITS << 7;
93 		break;
94 	case 192 / 8:
95 		v |= SS_AES_192BITS << 7;
96 		break;
97 	case 256 / 8:
98 		v |= SS_AES_256BITS << 7;
99 		break;
100 	}
101 
102 	for (i = 0; i < MAX_SG; i++) {
103 		if (!rctx->t_dst[i].addr)
104 			break;
105 
106 		mutex_lock(&ss->mlock);
107 		writel(rctx->p_key, ss->base + SS_KEY_ADR_REG);
108 
109 		if (ivlen) {
110 			if (rctx->op_dir == SS_ENCRYPTION) {
111 				if (i == 0)
112 					writel(rctx->p_iv[0], ss->base + SS_IV_ADR_REG);
113 				else
114 					writel(rctx->t_dst[i - 1].addr + rctx->t_dst[i - 1].len * 4 - ivlen, ss->base + SS_IV_ADR_REG);
115 			} else {
116 				writel(rctx->p_iv[i], ss->base + SS_IV_ADR_REG);
117 			}
118 		}
119 
120 		dev_dbg(ss->dev,
121 			"Processing SG %d on flow %d %s ctl=%x %d to %d method=%x opmode=%x opdir=%x srclen=%d\n",
122 			i, flow, name, v,
123 			rctx->t_src[i].len, rctx->t_dst[i].len,
124 			rctx->method, rctx->op_mode,
125 			rctx->op_dir, rctx->t_src[i].len);
126 
127 		writel(rctx->t_src[i].addr, ss->base + SS_SRC_ADR_REG);
128 		writel(rctx->t_dst[i].addr, ss->base + SS_DST_ADR_REG);
129 		writel(rctx->t_src[i].len, ss->base + SS_LEN_ADR_REG);
130 
131 		reinit_completion(&ss->flows[flow].complete);
132 		ss->flows[flow].status = 0;
133 		wmb();
134 
135 		writel(v, ss->base + SS_CTL_REG);
136 		mutex_unlock(&ss->mlock);
137 		wait_for_completion_interruptible_timeout(&ss->flows[flow].complete,
138 							  msecs_to_jiffies(2000));
139 		if (ss->flows[flow].status == 0) {
140 			dev_err(ss->dev, "DMA timeout for %s\n", name);
141 			return -EFAULT;
142 		}
143 	}
144 
145 	return 0;
146 }
147 
148 static irqreturn_t ss_irq_handler(int irq, void *data)
149 {
150 	struct sun8i_ss_dev *ss = (struct sun8i_ss_dev *)data;
151 	int flow = 0;
152 	u32 p;
153 
154 	p = readl(ss->base + SS_INT_STA_REG);
155 	for (flow = 0; flow < MAXFLOW; flow++) {
156 		if (p & (BIT(flow))) {
157 			writel(BIT(flow), ss->base + SS_INT_STA_REG);
158 			ss->flows[flow].status = 1;
159 			complete(&ss->flows[flow].complete);
160 		}
161 	}
162 
163 	return IRQ_HANDLED;
164 }
165 
166 static struct sun8i_ss_alg_template ss_algs[] = {
167 {
168 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
169 	.ss_algo_id = SS_ID_CIPHER_AES,
170 	.ss_blockmode = SS_ID_OP_CBC,
171 	.alg.skcipher = {
172 		.base = {
173 			.cra_name = "cbc(aes)",
174 			.cra_driver_name = "cbc-aes-sun8i-ss",
175 			.cra_priority = 400,
176 			.cra_blocksize = AES_BLOCK_SIZE,
177 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
178 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
179 				CRYPTO_ALG_NEED_FALLBACK,
180 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
181 			.cra_module = THIS_MODULE,
182 			.cra_alignmask = 0xf,
183 			.cra_init = sun8i_ss_cipher_init,
184 			.cra_exit = sun8i_ss_cipher_exit,
185 		},
186 		.min_keysize	= AES_MIN_KEY_SIZE,
187 		.max_keysize	= AES_MAX_KEY_SIZE,
188 		.ivsize		= AES_BLOCK_SIZE,
189 		.setkey		= sun8i_ss_aes_setkey,
190 		.encrypt	= sun8i_ss_skencrypt,
191 		.decrypt	= sun8i_ss_skdecrypt,
192 	}
193 },
194 {
195 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
196 	.ss_algo_id = SS_ID_CIPHER_AES,
197 	.ss_blockmode = SS_ID_OP_ECB,
198 	.alg.skcipher = {
199 		.base = {
200 			.cra_name = "ecb(aes)",
201 			.cra_driver_name = "ecb-aes-sun8i-ss",
202 			.cra_priority = 400,
203 			.cra_blocksize = AES_BLOCK_SIZE,
204 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
205 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
206 				CRYPTO_ALG_NEED_FALLBACK,
207 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
208 			.cra_module = THIS_MODULE,
209 			.cra_alignmask = 0xf,
210 			.cra_init = sun8i_ss_cipher_init,
211 			.cra_exit = sun8i_ss_cipher_exit,
212 		},
213 		.min_keysize	= AES_MIN_KEY_SIZE,
214 		.max_keysize	= AES_MAX_KEY_SIZE,
215 		.setkey		= sun8i_ss_aes_setkey,
216 		.encrypt	= sun8i_ss_skencrypt,
217 		.decrypt	= sun8i_ss_skdecrypt,
218 	}
219 },
220 {
221 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
222 	.ss_algo_id = SS_ID_CIPHER_DES3,
223 	.ss_blockmode = SS_ID_OP_CBC,
224 	.alg.skcipher = {
225 		.base = {
226 			.cra_name = "cbc(des3_ede)",
227 			.cra_driver_name = "cbc-des3-sun8i-ss",
228 			.cra_priority = 400,
229 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
230 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
231 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
232 				CRYPTO_ALG_NEED_FALLBACK,
233 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
234 			.cra_module = THIS_MODULE,
235 			.cra_alignmask = 0xf,
236 			.cra_init = sun8i_ss_cipher_init,
237 			.cra_exit = sun8i_ss_cipher_exit,
238 		},
239 		.min_keysize	= DES3_EDE_KEY_SIZE,
240 		.max_keysize	= DES3_EDE_KEY_SIZE,
241 		.ivsize		= DES3_EDE_BLOCK_SIZE,
242 		.setkey		= sun8i_ss_des3_setkey,
243 		.encrypt	= sun8i_ss_skencrypt,
244 		.decrypt	= sun8i_ss_skdecrypt,
245 	}
246 },
247 {
248 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
249 	.ss_algo_id = SS_ID_CIPHER_DES3,
250 	.ss_blockmode = SS_ID_OP_ECB,
251 	.alg.skcipher = {
252 		.base = {
253 			.cra_name = "ecb(des3_ede)",
254 			.cra_driver_name = "ecb-des3-sun8i-ss",
255 			.cra_priority = 400,
256 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
257 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
258 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
259 				CRYPTO_ALG_NEED_FALLBACK,
260 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
261 			.cra_module = THIS_MODULE,
262 			.cra_alignmask = 0xf,
263 			.cra_init = sun8i_ss_cipher_init,
264 			.cra_exit = sun8i_ss_cipher_exit,
265 		},
266 		.min_keysize	= DES3_EDE_KEY_SIZE,
267 		.max_keysize	= DES3_EDE_KEY_SIZE,
268 		.setkey		= sun8i_ss_des3_setkey,
269 		.encrypt	= sun8i_ss_skencrypt,
270 		.decrypt	= sun8i_ss_skdecrypt,
271 	}
272 },
273 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_PRNG
274 {
275 	.type = CRYPTO_ALG_TYPE_RNG,
276 	.alg.rng = {
277 		.base = {
278 			.cra_name		= "stdrng",
279 			.cra_driver_name	= "sun8i-ss-prng",
280 			.cra_priority		= 300,
281 			.cra_ctxsize = sizeof(struct sun8i_ss_rng_tfm_ctx),
282 			.cra_module		= THIS_MODULE,
283 			.cra_init		= sun8i_ss_prng_init,
284 			.cra_exit		= sun8i_ss_prng_exit,
285 		},
286 		.generate               = sun8i_ss_prng_generate,
287 		.seed                   = sun8i_ss_prng_seed,
288 		.seedsize               = PRNG_SEED_SIZE,
289 	}
290 },
291 #endif
292 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_HASH
293 {	.type = CRYPTO_ALG_TYPE_AHASH,
294 	.ss_algo_id = SS_ID_HASH_MD5,
295 	.alg.hash = {
296 		.init = sun8i_ss_hash_init,
297 		.update = sun8i_ss_hash_update,
298 		.final = sun8i_ss_hash_final,
299 		.finup = sun8i_ss_hash_finup,
300 		.digest = sun8i_ss_hash_digest,
301 		.export = sun8i_ss_hash_export,
302 		.import = sun8i_ss_hash_import,
303 		.halg = {
304 			.digestsize = MD5_DIGEST_SIZE,
305 			.statesize = sizeof(struct md5_state),
306 			.base = {
307 				.cra_name = "md5",
308 				.cra_driver_name = "md5-sun8i-ss",
309 				.cra_priority = 300,
310 				.cra_alignmask = 3,
311 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
312 					CRYPTO_ALG_ASYNC |
313 					CRYPTO_ALG_NEED_FALLBACK,
314 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
315 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
316 				.cra_module = THIS_MODULE,
317 				.cra_init = sun8i_ss_hash_crainit,
318 				.cra_exit = sun8i_ss_hash_craexit,
319 			}
320 		}
321 	}
322 },
323 {	.type = CRYPTO_ALG_TYPE_AHASH,
324 	.ss_algo_id = SS_ID_HASH_SHA1,
325 	.alg.hash = {
326 		.init = sun8i_ss_hash_init,
327 		.update = sun8i_ss_hash_update,
328 		.final = sun8i_ss_hash_final,
329 		.finup = sun8i_ss_hash_finup,
330 		.digest = sun8i_ss_hash_digest,
331 		.export = sun8i_ss_hash_export,
332 		.import = sun8i_ss_hash_import,
333 		.halg = {
334 			.digestsize = SHA1_DIGEST_SIZE,
335 			.statesize = sizeof(struct sha1_state),
336 			.base = {
337 				.cra_name = "sha1",
338 				.cra_driver_name = "sha1-sun8i-ss",
339 				.cra_priority = 300,
340 				.cra_alignmask = 3,
341 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
342 					CRYPTO_ALG_ASYNC |
343 					CRYPTO_ALG_NEED_FALLBACK,
344 				.cra_blocksize = SHA1_BLOCK_SIZE,
345 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
346 				.cra_module = THIS_MODULE,
347 				.cra_init = sun8i_ss_hash_crainit,
348 				.cra_exit = sun8i_ss_hash_craexit,
349 			}
350 		}
351 	}
352 },
353 {	.type = CRYPTO_ALG_TYPE_AHASH,
354 	.ss_algo_id = SS_ID_HASH_SHA224,
355 	.alg.hash = {
356 		.init = sun8i_ss_hash_init,
357 		.update = sun8i_ss_hash_update,
358 		.final = sun8i_ss_hash_final,
359 		.finup = sun8i_ss_hash_finup,
360 		.digest = sun8i_ss_hash_digest,
361 		.export = sun8i_ss_hash_export,
362 		.import = sun8i_ss_hash_import,
363 		.halg = {
364 			.digestsize = SHA224_DIGEST_SIZE,
365 			.statesize = sizeof(struct sha256_state),
366 			.base = {
367 				.cra_name = "sha224",
368 				.cra_driver_name = "sha224-sun8i-ss",
369 				.cra_priority = 300,
370 				.cra_alignmask = 3,
371 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
372 					CRYPTO_ALG_ASYNC |
373 					CRYPTO_ALG_NEED_FALLBACK,
374 				.cra_blocksize = SHA224_BLOCK_SIZE,
375 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
376 				.cra_module = THIS_MODULE,
377 				.cra_init = sun8i_ss_hash_crainit,
378 				.cra_exit = sun8i_ss_hash_craexit,
379 			}
380 		}
381 	}
382 },
383 {	.type = CRYPTO_ALG_TYPE_AHASH,
384 	.ss_algo_id = SS_ID_HASH_SHA256,
385 	.alg.hash = {
386 		.init = sun8i_ss_hash_init,
387 		.update = sun8i_ss_hash_update,
388 		.final = sun8i_ss_hash_final,
389 		.finup = sun8i_ss_hash_finup,
390 		.digest = sun8i_ss_hash_digest,
391 		.export = sun8i_ss_hash_export,
392 		.import = sun8i_ss_hash_import,
393 		.halg = {
394 			.digestsize = SHA256_DIGEST_SIZE,
395 			.statesize = sizeof(struct sha256_state),
396 			.base = {
397 				.cra_name = "sha256",
398 				.cra_driver_name = "sha256-sun8i-ss",
399 				.cra_priority = 300,
400 				.cra_alignmask = 3,
401 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
402 					CRYPTO_ALG_ASYNC |
403 					CRYPTO_ALG_NEED_FALLBACK,
404 				.cra_blocksize = SHA256_BLOCK_SIZE,
405 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
406 				.cra_module = THIS_MODULE,
407 				.cra_init = sun8i_ss_hash_crainit,
408 				.cra_exit = sun8i_ss_hash_craexit,
409 			}
410 		}
411 	}
412 },
413 {	.type = CRYPTO_ALG_TYPE_AHASH,
414 	.ss_algo_id = SS_ID_HASH_SHA1,
415 	.alg.hash = {
416 		.init = sun8i_ss_hash_init,
417 		.update = sun8i_ss_hash_update,
418 		.final = sun8i_ss_hash_final,
419 		.finup = sun8i_ss_hash_finup,
420 		.digest = sun8i_ss_hash_digest,
421 		.export = sun8i_ss_hash_export,
422 		.import = sun8i_ss_hash_import,
423 		.setkey = sun8i_ss_hmac_setkey,
424 		.halg = {
425 			.digestsize = SHA1_DIGEST_SIZE,
426 			.statesize = sizeof(struct sha1_state),
427 			.base = {
428 				.cra_name = "hmac(sha1)",
429 				.cra_driver_name = "hmac-sha1-sun8i-ss",
430 				.cra_priority = 300,
431 				.cra_alignmask = 3,
432 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
433 					CRYPTO_ALG_ASYNC |
434 					CRYPTO_ALG_NEED_FALLBACK,
435 				.cra_blocksize = SHA1_BLOCK_SIZE,
436 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
437 				.cra_module = THIS_MODULE,
438 				.cra_init = sun8i_ss_hash_crainit,
439 				.cra_exit = sun8i_ss_hash_craexit,
440 			}
441 		}
442 	}
443 },
444 #endif
445 };
446 
447 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
448 static int sun8i_ss_debugfs_show(struct seq_file *seq, void *v)
449 {
450 	struct sun8i_ss_dev *ss = seq->private;
451 	unsigned int i;
452 
453 	for (i = 0; i < MAXFLOW; i++)
454 		seq_printf(seq, "Channel %d: nreq %lu\n", i, ss->flows[i].stat_req);
455 
456 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
457 		if (!ss_algs[i].ss)
458 			continue;
459 		switch (ss_algs[i].type) {
460 		case CRYPTO_ALG_TYPE_SKCIPHER:
461 			seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
462 				   ss_algs[i].alg.skcipher.base.cra_driver_name,
463 				   ss_algs[i].alg.skcipher.base.cra_name,
464 				   ss_algs[i].stat_req, ss_algs[i].stat_fb);
465 
466 			seq_printf(seq, "\tLast fallback is: %s\n",
467 				   ss_algs[i].fbname);
468 			seq_printf(seq, "\tFallback due to length: %lu\n",
469 				   ss_algs[i].stat_fb_len);
470 			seq_printf(seq, "\tFallback due to SG length: %lu\n",
471 				   ss_algs[i].stat_fb_sglen);
472 			seq_printf(seq, "\tFallback due to alignment: %lu\n",
473 				   ss_algs[i].stat_fb_align);
474 			seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
475 				   ss_algs[i].stat_fb_sgnum);
476 			break;
477 		case CRYPTO_ALG_TYPE_RNG:
478 			seq_printf(seq, "%s %s reqs=%lu tsize=%lu\n",
479 				   ss_algs[i].alg.rng.base.cra_driver_name,
480 				   ss_algs[i].alg.rng.base.cra_name,
481 				   ss_algs[i].stat_req, ss_algs[i].stat_bytes);
482 			break;
483 		case CRYPTO_ALG_TYPE_AHASH:
484 			seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
485 				   ss_algs[i].alg.hash.halg.base.cra_driver_name,
486 				   ss_algs[i].alg.hash.halg.base.cra_name,
487 				   ss_algs[i].stat_req, ss_algs[i].stat_fb);
488 			seq_printf(seq, "\tLast fallback is: %s\n",
489 				   ss_algs[i].fbname);
490 			seq_printf(seq, "\tFallback due to length: %lu\n",
491 				   ss_algs[i].stat_fb_len);
492 			seq_printf(seq, "\tFallback due to SG length: %lu\n",
493 				   ss_algs[i].stat_fb_sglen);
494 			seq_printf(seq, "\tFallback due to alignment: %lu\n",
495 				   ss_algs[i].stat_fb_align);
496 			seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
497 				   ss_algs[i].stat_fb_sgnum);
498 			break;
499 		}
500 	}
501 	return 0;
502 }
503 
504 DEFINE_SHOW_ATTRIBUTE(sun8i_ss_debugfs);
505 #endif
506 
507 static void sun8i_ss_free_flows(struct sun8i_ss_dev *ss, int i)
508 {
509 	while (i >= 0) {
510 		crypto_engine_exit(ss->flows[i].engine);
511 		i--;
512 	}
513 }
514 
515 /*
516  * Allocate the flow list structure
517  */
518 static int allocate_flows(struct sun8i_ss_dev *ss)
519 {
520 	int i, j, err;
521 
522 	ss->flows = devm_kcalloc(ss->dev, MAXFLOW, sizeof(struct sun8i_ss_flow),
523 				 GFP_KERNEL);
524 	if (!ss->flows)
525 		return -ENOMEM;
526 
527 	for (i = 0; i < MAXFLOW; i++) {
528 		init_completion(&ss->flows[i].complete);
529 
530 		ss->flows[i].biv = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
531 						GFP_KERNEL);
532 		if (!ss->flows[i].biv) {
533 			err = -ENOMEM;
534 			goto error_engine;
535 		}
536 
537 		for (j = 0; j < MAX_SG; j++) {
538 			ss->flows[i].iv[j] = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
539 							  GFP_KERNEL);
540 			if (!ss->flows[i].iv[j]) {
541 				err = -ENOMEM;
542 				goto error_engine;
543 			}
544 		}
545 
546 		/* the padding could be up to two block. */
547 		ss->flows[i].pad = devm_kmalloc(ss->dev, MAX_PAD_SIZE,
548 						GFP_KERNEL);
549 		if (!ss->flows[i].pad) {
550 			err = -ENOMEM;
551 			goto error_engine;
552 		}
553 		ss->flows[i].result =
554 			devm_kmalloc(ss->dev, max(SHA256_DIGEST_SIZE,
555 						  dma_get_cache_alignment()),
556 				     GFP_KERNEL);
557 		if (!ss->flows[i].result) {
558 			err = -ENOMEM;
559 			goto error_engine;
560 		}
561 
562 		ss->flows[i].engine = crypto_engine_alloc_init(ss->dev, true);
563 		if (!ss->flows[i].engine) {
564 			dev_err(ss->dev, "Cannot allocate engine\n");
565 			i--;
566 			err = -ENOMEM;
567 			goto error_engine;
568 		}
569 		err = crypto_engine_start(ss->flows[i].engine);
570 		if (err) {
571 			dev_err(ss->dev, "Cannot start engine\n");
572 			goto error_engine;
573 		}
574 	}
575 	return 0;
576 error_engine:
577 	sun8i_ss_free_flows(ss, i);
578 	return err;
579 }
580 
581 /*
582  * Power management strategy: The device is suspended unless a TFM exists for
583  * one of the algorithms proposed by this driver.
584  */
585 static int sun8i_ss_pm_suspend(struct device *dev)
586 {
587 	struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
588 	int i;
589 
590 	reset_control_assert(ss->reset);
591 	for (i = 0; i < SS_MAX_CLOCKS; i++)
592 		clk_disable_unprepare(ss->ssclks[i]);
593 	return 0;
594 }
595 
596 static int sun8i_ss_pm_resume(struct device *dev)
597 {
598 	struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
599 	int err, i;
600 
601 	for (i = 0; i < SS_MAX_CLOCKS; i++) {
602 		if (!ss->variant->ss_clks[i].name)
603 			continue;
604 		err = clk_prepare_enable(ss->ssclks[i]);
605 		if (err) {
606 			dev_err(ss->dev, "Cannot prepare_enable %s\n",
607 				ss->variant->ss_clks[i].name);
608 			goto error;
609 		}
610 	}
611 	err = reset_control_deassert(ss->reset);
612 	if (err) {
613 		dev_err(ss->dev, "Cannot deassert reset control\n");
614 		goto error;
615 	}
616 	/* enable interrupts for all flows */
617 	writel(BIT(0) | BIT(1), ss->base + SS_INT_CTL_REG);
618 
619 	return 0;
620 error:
621 	sun8i_ss_pm_suspend(dev);
622 	return err;
623 }
624 
625 static const struct dev_pm_ops sun8i_ss_pm_ops = {
626 	SET_RUNTIME_PM_OPS(sun8i_ss_pm_suspend, sun8i_ss_pm_resume, NULL)
627 };
628 
629 static int sun8i_ss_pm_init(struct sun8i_ss_dev *ss)
630 {
631 	int err;
632 
633 	pm_runtime_use_autosuspend(ss->dev);
634 	pm_runtime_set_autosuspend_delay(ss->dev, 2000);
635 
636 	err = pm_runtime_set_suspended(ss->dev);
637 	if (err)
638 		return err;
639 	pm_runtime_enable(ss->dev);
640 	return err;
641 }
642 
643 static void sun8i_ss_pm_exit(struct sun8i_ss_dev *ss)
644 {
645 	pm_runtime_disable(ss->dev);
646 }
647 
648 static int sun8i_ss_register_algs(struct sun8i_ss_dev *ss)
649 {
650 	int ss_method, err, id;
651 	unsigned int i;
652 
653 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
654 		ss_algs[i].ss = ss;
655 		switch (ss_algs[i].type) {
656 		case CRYPTO_ALG_TYPE_SKCIPHER:
657 			id = ss_algs[i].ss_algo_id;
658 			ss_method = ss->variant->alg_cipher[id];
659 			if (ss_method == SS_ID_NOTSUPP) {
660 				dev_info(ss->dev,
661 					 "DEBUG: Algo of %s not supported\n",
662 					 ss_algs[i].alg.skcipher.base.cra_name);
663 				ss_algs[i].ss = NULL;
664 				break;
665 			}
666 			id = ss_algs[i].ss_blockmode;
667 			ss_method = ss->variant->op_mode[id];
668 			if (ss_method == SS_ID_NOTSUPP) {
669 				dev_info(ss->dev, "DEBUG: Blockmode of %s not supported\n",
670 					 ss_algs[i].alg.skcipher.base.cra_name);
671 				ss_algs[i].ss = NULL;
672 				break;
673 			}
674 			dev_info(ss->dev, "DEBUG: Register %s\n",
675 				 ss_algs[i].alg.skcipher.base.cra_name);
676 			err = crypto_register_skcipher(&ss_algs[i].alg.skcipher);
677 			if (err) {
678 				dev_err(ss->dev, "Fail to register %s\n",
679 					ss_algs[i].alg.skcipher.base.cra_name);
680 				ss_algs[i].ss = NULL;
681 				return err;
682 			}
683 			break;
684 		case CRYPTO_ALG_TYPE_RNG:
685 			err = crypto_register_rng(&ss_algs[i].alg.rng);
686 			if (err) {
687 				dev_err(ss->dev, "Fail to register %s\n",
688 					ss_algs[i].alg.rng.base.cra_name);
689 				ss_algs[i].ss = NULL;
690 			}
691 			break;
692 		case CRYPTO_ALG_TYPE_AHASH:
693 			id = ss_algs[i].ss_algo_id;
694 			ss_method = ss->variant->alg_hash[id];
695 			if (ss_method == SS_ID_NOTSUPP) {
696 				dev_info(ss->dev,
697 					"DEBUG: Algo of %s not supported\n",
698 					ss_algs[i].alg.hash.halg.base.cra_name);
699 				ss_algs[i].ss = NULL;
700 				break;
701 			}
702 			dev_info(ss->dev, "Register %s\n",
703 				 ss_algs[i].alg.hash.halg.base.cra_name);
704 			err = crypto_register_ahash(&ss_algs[i].alg.hash);
705 			if (err) {
706 				dev_err(ss->dev, "ERROR: Fail to register %s\n",
707 					ss_algs[i].alg.hash.halg.base.cra_name);
708 				ss_algs[i].ss = NULL;
709 				return err;
710 			}
711 			break;
712 		default:
713 			ss_algs[i].ss = NULL;
714 			dev_err(ss->dev, "ERROR: tried to register an unknown algo\n");
715 		}
716 	}
717 	return 0;
718 }
719 
720 static void sun8i_ss_unregister_algs(struct sun8i_ss_dev *ss)
721 {
722 	unsigned int i;
723 
724 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
725 		if (!ss_algs[i].ss)
726 			continue;
727 		switch (ss_algs[i].type) {
728 		case CRYPTO_ALG_TYPE_SKCIPHER:
729 			dev_info(ss->dev, "Unregister %d %s\n", i,
730 				 ss_algs[i].alg.skcipher.base.cra_name);
731 			crypto_unregister_skcipher(&ss_algs[i].alg.skcipher);
732 			break;
733 		case CRYPTO_ALG_TYPE_RNG:
734 			dev_info(ss->dev, "Unregister %d %s\n", i,
735 				 ss_algs[i].alg.rng.base.cra_name);
736 			crypto_unregister_rng(&ss_algs[i].alg.rng);
737 			break;
738 		case CRYPTO_ALG_TYPE_AHASH:
739 			dev_info(ss->dev, "Unregister %d %s\n", i,
740 				 ss_algs[i].alg.hash.halg.base.cra_name);
741 			crypto_unregister_ahash(&ss_algs[i].alg.hash);
742 			break;
743 		}
744 	}
745 }
746 
747 static int sun8i_ss_get_clks(struct sun8i_ss_dev *ss)
748 {
749 	unsigned long cr;
750 	int err, i;
751 
752 	for (i = 0; i < SS_MAX_CLOCKS; i++) {
753 		if (!ss->variant->ss_clks[i].name)
754 			continue;
755 		ss->ssclks[i] = devm_clk_get(ss->dev, ss->variant->ss_clks[i].name);
756 		if (IS_ERR(ss->ssclks[i])) {
757 			err = PTR_ERR(ss->ssclks[i]);
758 			dev_err(ss->dev, "Cannot get %s SS clock err=%d\n",
759 				ss->variant->ss_clks[i].name, err);
760 			return err;
761 		}
762 		cr = clk_get_rate(ss->ssclks[i]);
763 		if (!cr)
764 			return -EINVAL;
765 		if (ss->variant->ss_clks[i].freq > 0 &&
766 		    cr != ss->variant->ss_clks[i].freq) {
767 			dev_info(ss->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
768 				 ss->variant->ss_clks[i].name,
769 				 ss->variant->ss_clks[i].freq,
770 				 ss->variant->ss_clks[i].freq / 1000000,
771 				 cr, cr / 1000000);
772 			err = clk_set_rate(ss->ssclks[i], ss->variant->ss_clks[i].freq);
773 			if (err)
774 				dev_err(ss->dev, "Fail to set %s clk speed to %lu hz\n",
775 					ss->variant->ss_clks[i].name,
776 					ss->variant->ss_clks[i].freq);
777 		}
778 		if (ss->variant->ss_clks[i].max_freq > 0 &&
779 		    cr > ss->variant->ss_clks[i].max_freq)
780 			dev_warn(ss->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
781 				 ss->variant->ss_clks[i].name, cr,
782 				 ss->variant->ss_clks[i].max_freq);
783 	}
784 	return 0;
785 }
786 
787 static int sun8i_ss_probe(struct platform_device *pdev)
788 {
789 	struct sun8i_ss_dev *ss;
790 	int err, irq;
791 	u32 v;
792 
793 	ss = devm_kzalloc(&pdev->dev, sizeof(*ss), GFP_KERNEL);
794 	if (!ss)
795 		return -ENOMEM;
796 
797 	ss->dev = &pdev->dev;
798 	platform_set_drvdata(pdev, ss);
799 
800 	ss->variant = of_device_get_match_data(&pdev->dev);
801 	if (!ss->variant) {
802 		dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
803 		return -EINVAL;
804 	}
805 
806 	ss->base = devm_platform_ioremap_resource(pdev, 0);
807 	if (IS_ERR(ss->base))
808 		return PTR_ERR(ss->base);
809 
810 	err = sun8i_ss_get_clks(ss);
811 	if (err)
812 		return err;
813 
814 	irq = platform_get_irq(pdev, 0);
815 	if (irq < 0)
816 		return irq;
817 
818 	ss->reset = devm_reset_control_get(&pdev->dev, NULL);
819 	if (IS_ERR(ss->reset))
820 		return dev_err_probe(&pdev->dev, PTR_ERR(ss->reset),
821 				     "No reset control found\n");
822 
823 	mutex_init(&ss->mlock);
824 
825 	err = allocate_flows(ss);
826 	if (err)
827 		return err;
828 
829 	err = sun8i_ss_pm_init(ss);
830 	if (err)
831 		goto error_pm;
832 
833 	err = devm_request_irq(&pdev->dev, irq, ss_irq_handler, 0, "sun8i-ss", ss);
834 	if (err) {
835 		dev_err(ss->dev, "Cannot request SecuritySystem IRQ (err=%d)\n", err);
836 		goto error_irq;
837 	}
838 
839 	err = sun8i_ss_register_algs(ss);
840 	if (err)
841 		goto error_alg;
842 
843 	err = pm_runtime_resume_and_get(ss->dev);
844 	if (err < 0)
845 		goto error_alg;
846 
847 	v = readl(ss->base + SS_CTL_REG);
848 	v >>= SS_DIE_ID_SHIFT;
849 	v &= SS_DIE_ID_MASK;
850 	dev_info(&pdev->dev, "Security System Die ID %x\n", v);
851 
852 	pm_runtime_put_sync(ss->dev);
853 
854 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
855 	/* Ignore error of debugfs */
856 	ss->dbgfs_dir = debugfs_create_dir("sun8i-ss", NULL);
857 	ss->dbgfs_stats = debugfs_create_file("stats", 0444,
858 					      ss->dbgfs_dir, ss,
859 					      &sun8i_ss_debugfs_fops);
860 #endif
861 
862 	return 0;
863 error_alg:
864 	sun8i_ss_unregister_algs(ss);
865 error_irq:
866 	sun8i_ss_pm_exit(ss);
867 error_pm:
868 	sun8i_ss_free_flows(ss, MAXFLOW - 1);
869 	return err;
870 }
871 
872 static int sun8i_ss_remove(struct platform_device *pdev)
873 {
874 	struct sun8i_ss_dev *ss = platform_get_drvdata(pdev);
875 
876 	sun8i_ss_unregister_algs(ss);
877 
878 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
879 	debugfs_remove_recursive(ss->dbgfs_dir);
880 #endif
881 
882 	sun8i_ss_free_flows(ss, MAXFLOW - 1);
883 
884 	sun8i_ss_pm_exit(ss);
885 
886 	return 0;
887 }
888 
889 static const struct of_device_id sun8i_ss_crypto_of_match_table[] = {
890 	{ .compatible = "allwinner,sun8i-a83t-crypto",
891 	  .data = &ss_a83t_variant },
892 	{ .compatible = "allwinner,sun9i-a80-crypto",
893 	  .data = &ss_a80_variant },
894 	{}
895 };
896 MODULE_DEVICE_TABLE(of, sun8i_ss_crypto_of_match_table);
897 
898 static struct platform_driver sun8i_ss_driver = {
899 	.probe		 = sun8i_ss_probe,
900 	.remove		 = sun8i_ss_remove,
901 	.driver		 = {
902 		.name		= "sun8i-ss",
903 		.pm             = &sun8i_ss_pm_ops,
904 		.of_match_table	= sun8i_ss_crypto_of_match_table,
905 	},
906 };
907 
908 module_platform_driver(sun8i_ss_driver);
909 
910 MODULE_DESCRIPTION("Allwinner SecuritySystem cryptographic offloader");
911 MODULE_LICENSE("GPL");
912 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>");
913