1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * sun8i-ss-core.c - hardware cryptographic offloader for
4  * Allwinner A80/A83T SoC
5  *
6  * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com>
7  *
8  * Core file which registers crypto algorithms supported by the SecuritySystem
9  *
10  * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
11  */
12 
13 #include <crypto/engine.h>
14 #include <crypto/internal/rng.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/clk.h>
17 #include <linux/delay.h>
18 #include <linux/dma-mapping.h>
19 #include <linux/err.h>
20 #include <linux/interrupt.h>
21 #include <linux/io.h>
22 #include <linux/irq.h>
23 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/of.h>
26 #include <linux/platform_device.h>
27 #include <linux/pm_runtime.h>
28 #include <linux/reset.h>
29 
30 #include "sun8i-ss.h"
31 
32 static const struct ss_variant ss_a80_variant = {
33 	.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
34 	},
35 	.alg_hash = { SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP, SS_ID_NOTSUPP,
36 	},
37 	.op_mode = { SS_OP_ECB, SS_OP_CBC,
38 	},
39 	.ss_clks = {
40 		{ "bus", 0, 300 * 1000 * 1000 },
41 		{ "mod", 0, 300 * 1000 * 1000 },
42 	}
43 };
44 
45 static const struct ss_variant ss_a83t_variant = {
46 	.alg_cipher = { SS_ALG_AES, SS_ALG_DES, SS_ALG_3DES,
47 	},
48 	.alg_hash = { SS_ALG_MD5, SS_ALG_SHA1, SS_ALG_SHA224, SS_ALG_SHA256,
49 	},
50 	.op_mode = { SS_OP_ECB, SS_OP_CBC,
51 	},
52 	.ss_clks = {
53 		{ "bus", 0, 300 * 1000 * 1000 },
54 		{ "mod", 0, 300 * 1000 * 1000 },
55 	}
56 };
57 
58 /*
59  * sun8i_ss_get_engine_number() get the next channel slot
60  * This is a simple round-robin way of getting the next channel
61  */
sun8i_ss_get_engine_number(struct sun8i_ss_dev * ss)62 int sun8i_ss_get_engine_number(struct sun8i_ss_dev *ss)
63 {
64 	return atomic_inc_return(&ss->flow) % MAXFLOW;
65 }
66 
sun8i_ss_run_task(struct sun8i_ss_dev * ss,struct sun8i_cipher_req_ctx * rctx,const char * name)67 int sun8i_ss_run_task(struct sun8i_ss_dev *ss, struct sun8i_cipher_req_ctx *rctx,
68 		      const char *name)
69 {
70 	int flow = rctx->flow;
71 	unsigned int ivlen = rctx->ivlen;
72 	u32 v = SS_START;
73 	int i;
74 
75 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
76 	ss->flows[flow].stat_req++;
77 #endif
78 
79 	/* choose between stream0/stream1 */
80 	if (flow)
81 		v |= SS_FLOW1;
82 	else
83 		v |= SS_FLOW0;
84 
85 	v |= rctx->op_mode;
86 	v |= rctx->method;
87 
88 	if (rctx->op_dir)
89 		v |= SS_DECRYPTION;
90 
91 	switch (rctx->keylen) {
92 	case 128 / 8:
93 		v |= SS_AES_128BITS << 7;
94 		break;
95 	case 192 / 8:
96 		v |= SS_AES_192BITS << 7;
97 		break;
98 	case 256 / 8:
99 		v |= SS_AES_256BITS << 7;
100 		break;
101 	}
102 
103 	for (i = 0; i < MAX_SG; i++) {
104 		if (!rctx->t_dst[i].addr)
105 			break;
106 
107 		mutex_lock(&ss->mlock);
108 		writel(rctx->p_key, ss->base + SS_KEY_ADR_REG);
109 
110 		if (ivlen) {
111 			if (rctx->op_dir == SS_ENCRYPTION) {
112 				if (i == 0)
113 					writel(rctx->p_iv[0], ss->base + SS_IV_ADR_REG);
114 				else
115 					writel(rctx->t_dst[i - 1].addr + rctx->t_dst[i - 1].len * 4 - ivlen, ss->base + SS_IV_ADR_REG);
116 			} else {
117 				writel(rctx->p_iv[i], ss->base + SS_IV_ADR_REG);
118 			}
119 		}
120 
121 		dev_dbg(ss->dev,
122 			"Processing SG %d on flow %d %s ctl=%x %d to %d method=%x opmode=%x opdir=%x srclen=%d\n",
123 			i, flow, name, v,
124 			rctx->t_src[i].len, rctx->t_dst[i].len,
125 			rctx->method, rctx->op_mode,
126 			rctx->op_dir, rctx->t_src[i].len);
127 
128 		writel(rctx->t_src[i].addr, ss->base + SS_SRC_ADR_REG);
129 		writel(rctx->t_dst[i].addr, ss->base + SS_DST_ADR_REG);
130 		writel(rctx->t_src[i].len, ss->base + SS_LEN_ADR_REG);
131 
132 		reinit_completion(&ss->flows[flow].complete);
133 		ss->flows[flow].status = 0;
134 		wmb();
135 
136 		writel(v, ss->base + SS_CTL_REG);
137 		mutex_unlock(&ss->mlock);
138 		wait_for_completion_interruptible_timeout(&ss->flows[flow].complete,
139 							  msecs_to_jiffies(2000));
140 		if (ss->flows[flow].status == 0) {
141 			dev_err(ss->dev, "DMA timeout for %s\n", name);
142 			return -EFAULT;
143 		}
144 	}
145 
146 	return 0;
147 }
148 
ss_irq_handler(int irq,void * data)149 static irqreturn_t ss_irq_handler(int irq, void *data)
150 {
151 	struct sun8i_ss_dev *ss = (struct sun8i_ss_dev *)data;
152 	int flow = 0;
153 	u32 p;
154 
155 	p = readl(ss->base + SS_INT_STA_REG);
156 	for (flow = 0; flow < MAXFLOW; flow++) {
157 		if (p & (BIT(flow))) {
158 			writel(BIT(flow), ss->base + SS_INT_STA_REG);
159 			ss->flows[flow].status = 1;
160 			complete(&ss->flows[flow].complete);
161 		}
162 	}
163 
164 	return IRQ_HANDLED;
165 }
166 
167 static struct sun8i_ss_alg_template ss_algs[] = {
168 {
169 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
170 	.ss_algo_id = SS_ID_CIPHER_AES,
171 	.ss_blockmode = SS_ID_OP_CBC,
172 	.alg.skcipher.base = {
173 		.base = {
174 			.cra_name = "cbc(aes)",
175 			.cra_driver_name = "cbc-aes-sun8i-ss",
176 			.cra_priority = 400,
177 			.cra_blocksize = AES_BLOCK_SIZE,
178 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
179 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
180 				CRYPTO_ALG_NEED_FALLBACK,
181 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
182 			.cra_module = THIS_MODULE,
183 			.cra_alignmask = 0xf,
184 			.cra_init = sun8i_ss_cipher_init,
185 			.cra_exit = sun8i_ss_cipher_exit,
186 		},
187 		.min_keysize	= AES_MIN_KEY_SIZE,
188 		.max_keysize	= AES_MAX_KEY_SIZE,
189 		.ivsize		= AES_BLOCK_SIZE,
190 		.setkey		= sun8i_ss_aes_setkey,
191 		.encrypt	= sun8i_ss_skencrypt,
192 		.decrypt	= sun8i_ss_skdecrypt,
193 	},
194 	.alg.skcipher.op = {
195 		.do_one_request = sun8i_ss_handle_cipher_request,
196 	},
197 },
198 {
199 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
200 	.ss_algo_id = SS_ID_CIPHER_AES,
201 	.ss_blockmode = SS_ID_OP_ECB,
202 	.alg.skcipher.base = {
203 		.base = {
204 			.cra_name = "ecb(aes)",
205 			.cra_driver_name = "ecb-aes-sun8i-ss",
206 			.cra_priority = 400,
207 			.cra_blocksize = AES_BLOCK_SIZE,
208 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
209 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
210 				CRYPTO_ALG_NEED_FALLBACK,
211 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
212 			.cra_module = THIS_MODULE,
213 			.cra_alignmask = 0xf,
214 			.cra_init = sun8i_ss_cipher_init,
215 			.cra_exit = sun8i_ss_cipher_exit,
216 		},
217 		.min_keysize	= AES_MIN_KEY_SIZE,
218 		.max_keysize	= AES_MAX_KEY_SIZE,
219 		.setkey		= sun8i_ss_aes_setkey,
220 		.encrypt	= sun8i_ss_skencrypt,
221 		.decrypt	= sun8i_ss_skdecrypt,
222 	},
223 	.alg.skcipher.op = {
224 		.do_one_request = sun8i_ss_handle_cipher_request,
225 	},
226 },
227 {
228 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
229 	.ss_algo_id = SS_ID_CIPHER_DES3,
230 	.ss_blockmode = SS_ID_OP_CBC,
231 	.alg.skcipher.base = {
232 		.base = {
233 			.cra_name = "cbc(des3_ede)",
234 			.cra_driver_name = "cbc-des3-sun8i-ss",
235 			.cra_priority = 400,
236 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
237 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
238 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
239 				CRYPTO_ALG_NEED_FALLBACK,
240 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
241 			.cra_module = THIS_MODULE,
242 			.cra_alignmask = 0xf,
243 			.cra_init = sun8i_ss_cipher_init,
244 			.cra_exit = sun8i_ss_cipher_exit,
245 		},
246 		.min_keysize	= DES3_EDE_KEY_SIZE,
247 		.max_keysize	= DES3_EDE_KEY_SIZE,
248 		.ivsize		= DES3_EDE_BLOCK_SIZE,
249 		.setkey		= sun8i_ss_des3_setkey,
250 		.encrypt	= sun8i_ss_skencrypt,
251 		.decrypt	= sun8i_ss_skdecrypt,
252 	},
253 	.alg.skcipher.op = {
254 		.do_one_request = sun8i_ss_handle_cipher_request,
255 	},
256 },
257 {
258 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
259 	.ss_algo_id = SS_ID_CIPHER_DES3,
260 	.ss_blockmode = SS_ID_OP_ECB,
261 	.alg.skcipher.base = {
262 		.base = {
263 			.cra_name = "ecb(des3_ede)",
264 			.cra_driver_name = "ecb-des3-sun8i-ss",
265 			.cra_priority = 400,
266 			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
267 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
268 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
269 				CRYPTO_ALG_NEED_FALLBACK,
270 			.cra_ctxsize = sizeof(struct sun8i_cipher_tfm_ctx),
271 			.cra_module = THIS_MODULE,
272 			.cra_alignmask = 0xf,
273 			.cra_init = sun8i_ss_cipher_init,
274 			.cra_exit = sun8i_ss_cipher_exit,
275 		},
276 		.min_keysize	= DES3_EDE_KEY_SIZE,
277 		.max_keysize	= DES3_EDE_KEY_SIZE,
278 		.setkey		= sun8i_ss_des3_setkey,
279 		.encrypt	= sun8i_ss_skencrypt,
280 		.decrypt	= sun8i_ss_skdecrypt,
281 	},
282 	.alg.skcipher.op = {
283 		.do_one_request = sun8i_ss_handle_cipher_request,
284 	},
285 },
286 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_PRNG
287 {
288 	.type = CRYPTO_ALG_TYPE_RNG,
289 	.alg.rng = {
290 		.base = {
291 			.cra_name		= "stdrng",
292 			.cra_driver_name	= "sun8i-ss-prng",
293 			.cra_priority		= 300,
294 			.cra_ctxsize = sizeof(struct sun8i_ss_rng_tfm_ctx),
295 			.cra_module		= THIS_MODULE,
296 			.cra_init		= sun8i_ss_prng_init,
297 			.cra_exit		= sun8i_ss_prng_exit,
298 		},
299 		.generate               = sun8i_ss_prng_generate,
300 		.seed                   = sun8i_ss_prng_seed,
301 		.seedsize               = PRNG_SEED_SIZE,
302 	}
303 },
304 #endif
305 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_HASH
306 {	.type = CRYPTO_ALG_TYPE_AHASH,
307 	.ss_algo_id = SS_ID_HASH_MD5,
308 	.alg.hash.base = {
309 		.init = sun8i_ss_hash_init,
310 		.update = sun8i_ss_hash_update,
311 		.final = sun8i_ss_hash_final,
312 		.finup = sun8i_ss_hash_finup,
313 		.digest = sun8i_ss_hash_digest,
314 		.export = sun8i_ss_hash_export,
315 		.import = sun8i_ss_hash_import,
316 		.init_tfm = sun8i_ss_hash_init_tfm,
317 		.exit_tfm = sun8i_ss_hash_exit_tfm,
318 		.halg = {
319 			.digestsize = MD5_DIGEST_SIZE,
320 			.statesize = sizeof(struct md5_state),
321 			.base = {
322 				.cra_name = "md5",
323 				.cra_driver_name = "md5-sun8i-ss",
324 				.cra_priority = 300,
325 				.cra_alignmask = 3,
326 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
327 					CRYPTO_ALG_ASYNC |
328 					CRYPTO_ALG_NEED_FALLBACK,
329 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
330 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
331 				.cra_module = THIS_MODULE,
332 			}
333 		}
334 	},
335 	.alg.hash.op = {
336 		.do_one_request = sun8i_ss_hash_run,
337 	},
338 },
339 {	.type = CRYPTO_ALG_TYPE_AHASH,
340 	.ss_algo_id = SS_ID_HASH_SHA1,
341 	.alg.hash.base = {
342 		.init = sun8i_ss_hash_init,
343 		.update = sun8i_ss_hash_update,
344 		.final = sun8i_ss_hash_final,
345 		.finup = sun8i_ss_hash_finup,
346 		.digest = sun8i_ss_hash_digest,
347 		.export = sun8i_ss_hash_export,
348 		.import = sun8i_ss_hash_import,
349 		.init_tfm = sun8i_ss_hash_init_tfm,
350 		.exit_tfm = sun8i_ss_hash_exit_tfm,
351 		.halg = {
352 			.digestsize = SHA1_DIGEST_SIZE,
353 			.statesize = sizeof(struct sha1_state),
354 			.base = {
355 				.cra_name = "sha1",
356 				.cra_driver_name = "sha1-sun8i-ss",
357 				.cra_priority = 300,
358 				.cra_alignmask = 3,
359 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
360 					CRYPTO_ALG_ASYNC |
361 					CRYPTO_ALG_NEED_FALLBACK,
362 				.cra_blocksize = SHA1_BLOCK_SIZE,
363 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
364 				.cra_module = THIS_MODULE,
365 			}
366 		}
367 	},
368 	.alg.hash.op = {
369 		.do_one_request = sun8i_ss_hash_run,
370 	},
371 },
372 {	.type = CRYPTO_ALG_TYPE_AHASH,
373 	.ss_algo_id = SS_ID_HASH_SHA224,
374 	.alg.hash.base = {
375 		.init = sun8i_ss_hash_init,
376 		.update = sun8i_ss_hash_update,
377 		.final = sun8i_ss_hash_final,
378 		.finup = sun8i_ss_hash_finup,
379 		.digest = sun8i_ss_hash_digest,
380 		.export = sun8i_ss_hash_export,
381 		.import = sun8i_ss_hash_import,
382 		.init_tfm = sun8i_ss_hash_init_tfm,
383 		.exit_tfm = sun8i_ss_hash_exit_tfm,
384 		.halg = {
385 			.digestsize = SHA224_DIGEST_SIZE,
386 			.statesize = sizeof(struct sha256_state),
387 			.base = {
388 				.cra_name = "sha224",
389 				.cra_driver_name = "sha224-sun8i-ss",
390 				.cra_priority = 300,
391 				.cra_alignmask = 3,
392 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
393 					CRYPTO_ALG_ASYNC |
394 					CRYPTO_ALG_NEED_FALLBACK,
395 				.cra_blocksize = SHA224_BLOCK_SIZE,
396 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
397 				.cra_module = THIS_MODULE,
398 			}
399 		}
400 	},
401 	.alg.hash.op = {
402 		.do_one_request = sun8i_ss_hash_run,
403 	},
404 },
405 {	.type = CRYPTO_ALG_TYPE_AHASH,
406 	.ss_algo_id = SS_ID_HASH_SHA256,
407 	.alg.hash.base = {
408 		.init = sun8i_ss_hash_init,
409 		.update = sun8i_ss_hash_update,
410 		.final = sun8i_ss_hash_final,
411 		.finup = sun8i_ss_hash_finup,
412 		.digest = sun8i_ss_hash_digest,
413 		.export = sun8i_ss_hash_export,
414 		.import = sun8i_ss_hash_import,
415 		.init_tfm = sun8i_ss_hash_init_tfm,
416 		.exit_tfm = sun8i_ss_hash_exit_tfm,
417 		.halg = {
418 			.digestsize = SHA256_DIGEST_SIZE,
419 			.statesize = sizeof(struct sha256_state),
420 			.base = {
421 				.cra_name = "sha256",
422 				.cra_driver_name = "sha256-sun8i-ss",
423 				.cra_priority = 300,
424 				.cra_alignmask = 3,
425 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
426 					CRYPTO_ALG_ASYNC |
427 					CRYPTO_ALG_NEED_FALLBACK,
428 				.cra_blocksize = SHA256_BLOCK_SIZE,
429 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
430 				.cra_module = THIS_MODULE,
431 			}
432 		}
433 	},
434 	.alg.hash.op = {
435 		.do_one_request = sun8i_ss_hash_run,
436 	},
437 },
438 {	.type = CRYPTO_ALG_TYPE_AHASH,
439 	.ss_algo_id = SS_ID_HASH_SHA1,
440 	.alg.hash.base = {
441 		.init = sun8i_ss_hash_init,
442 		.update = sun8i_ss_hash_update,
443 		.final = sun8i_ss_hash_final,
444 		.finup = sun8i_ss_hash_finup,
445 		.digest = sun8i_ss_hash_digest,
446 		.export = sun8i_ss_hash_export,
447 		.import = sun8i_ss_hash_import,
448 		.init_tfm = sun8i_ss_hash_init_tfm,
449 		.exit_tfm = sun8i_ss_hash_exit_tfm,
450 		.setkey = sun8i_ss_hmac_setkey,
451 		.halg = {
452 			.digestsize = SHA1_DIGEST_SIZE,
453 			.statesize = sizeof(struct sha1_state),
454 			.base = {
455 				.cra_name = "hmac(sha1)",
456 				.cra_driver_name = "hmac-sha1-sun8i-ss",
457 				.cra_priority = 300,
458 				.cra_alignmask = 3,
459 				.cra_flags = CRYPTO_ALG_TYPE_AHASH |
460 					CRYPTO_ALG_ASYNC |
461 					CRYPTO_ALG_NEED_FALLBACK,
462 				.cra_blocksize = SHA1_BLOCK_SIZE,
463 				.cra_ctxsize = sizeof(struct sun8i_ss_hash_tfm_ctx),
464 				.cra_module = THIS_MODULE,
465 			}
466 		}
467 	},
468 	.alg.hash.op = {
469 		.do_one_request = sun8i_ss_hash_run,
470 	},
471 },
472 #endif
473 };
474 
sun8i_ss_debugfs_show(struct seq_file * seq,void * v)475 static int sun8i_ss_debugfs_show(struct seq_file *seq, void *v)
476 {
477 	struct sun8i_ss_dev *ss __maybe_unused = seq->private;
478 	unsigned int i;
479 
480 	for (i = 0; i < MAXFLOW; i++)
481 		seq_printf(seq, "Channel %d: nreq %lu\n", i,
482 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
483 			   ss->flows[i].stat_req);
484 #else
485 			   0ul);
486 #endif
487 
488 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
489 		if (!ss_algs[i].ss)
490 			continue;
491 		switch (ss_algs[i].type) {
492 		case CRYPTO_ALG_TYPE_SKCIPHER:
493 			seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
494 				   ss_algs[i].alg.skcipher.base.base.cra_driver_name,
495 				   ss_algs[i].alg.skcipher.base.base.cra_name,
496 				   ss_algs[i].stat_req, ss_algs[i].stat_fb);
497 
498 			seq_printf(seq, "\tLast fallback is: %s\n",
499 				   ss_algs[i].fbname);
500 			seq_printf(seq, "\tFallback due to length: %lu\n",
501 				   ss_algs[i].stat_fb_len);
502 			seq_printf(seq, "\tFallback due to SG length: %lu\n",
503 				   ss_algs[i].stat_fb_sglen);
504 			seq_printf(seq, "\tFallback due to alignment: %lu\n",
505 				   ss_algs[i].stat_fb_align);
506 			seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
507 				   ss_algs[i].stat_fb_sgnum);
508 			break;
509 		case CRYPTO_ALG_TYPE_RNG:
510 			seq_printf(seq, "%s %s reqs=%lu tsize=%lu\n",
511 				   ss_algs[i].alg.rng.base.cra_driver_name,
512 				   ss_algs[i].alg.rng.base.cra_name,
513 				   ss_algs[i].stat_req, ss_algs[i].stat_bytes);
514 			break;
515 		case CRYPTO_ALG_TYPE_AHASH:
516 			seq_printf(seq, "%s %s reqs=%lu fallback=%lu\n",
517 				   ss_algs[i].alg.hash.base.halg.base.cra_driver_name,
518 				   ss_algs[i].alg.hash.base.halg.base.cra_name,
519 				   ss_algs[i].stat_req, ss_algs[i].stat_fb);
520 			seq_printf(seq, "\tLast fallback is: %s\n",
521 				   ss_algs[i].fbname);
522 			seq_printf(seq, "\tFallback due to length: %lu\n",
523 				   ss_algs[i].stat_fb_len);
524 			seq_printf(seq, "\tFallback due to SG length: %lu\n",
525 				   ss_algs[i].stat_fb_sglen);
526 			seq_printf(seq, "\tFallback due to alignment: %lu\n",
527 				   ss_algs[i].stat_fb_align);
528 			seq_printf(seq, "\tFallback due to SG numbers: %lu\n",
529 				   ss_algs[i].stat_fb_sgnum);
530 			break;
531 		}
532 	}
533 	return 0;
534 }
535 
536 DEFINE_SHOW_ATTRIBUTE(sun8i_ss_debugfs);
537 
sun8i_ss_free_flows(struct sun8i_ss_dev * ss,int i)538 static void sun8i_ss_free_flows(struct sun8i_ss_dev *ss, int i)
539 {
540 	while (i >= 0) {
541 		crypto_engine_exit(ss->flows[i].engine);
542 		i--;
543 	}
544 }
545 
546 /*
547  * Allocate the flow list structure
548  */
allocate_flows(struct sun8i_ss_dev * ss)549 static int allocate_flows(struct sun8i_ss_dev *ss)
550 {
551 	int i, j, err;
552 
553 	ss->flows = devm_kcalloc(ss->dev, MAXFLOW, sizeof(struct sun8i_ss_flow),
554 				 GFP_KERNEL);
555 	if (!ss->flows)
556 		return -ENOMEM;
557 
558 	for (i = 0; i < MAXFLOW; i++) {
559 		init_completion(&ss->flows[i].complete);
560 
561 		ss->flows[i].biv = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
562 						GFP_KERNEL);
563 		if (!ss->flows[i].biv) {
564 			err = -ENOMEM;
565 			goto error_engine;
566 		}
567 
568 		for (j = 0; j < MAX_SG; j++) {
569 			ss->flows[i].iv[j] = devm_kmalloc(ss->dev, AES_BLOCK_SIZE,
570 							  GFP_KERNEL);
571 			if (!ss->flows[i].iv[j]) {
572 				err = -ENOMEM;
573 				goto error_engine;
574 			}
575 		}
576 
577 		/* the padding could be up to two block. */
578 		ss->flows[i].pad = devm_kmalloc(ss->dev, MAX_PAD_SIZE,
579 						GFP_KERNEL);
580 		if (!ss->flows[i].pad) {
581 			err = -ENOMEM;
582 			goto error_engine;
583 		}
584 		ss->flows[i].result =
585 			devm_kmalloc(ss->dev, max(SHA256_DIGEST_SIZE,
586 						  dma_get_cache_alignment()),
587 				     GFP_KERNEL);
588 		if (!ss->flows[i].result) {
589 			err = -ENOMEM;
590 			goto error_engine;
591 		}
592 
593 		ss->flows[i].engine = crypto_engine_alloc_init(ss->dev, true);
594 		if (!ss->flows[i].engine) {
595 			dev_err(ss->dev, "Cannot allocate engine\n");
596 			i--;
597 			err = -ENOMEM;
598 			goto error_engine;
599 		}
600 		err = crypto_engine_start(ss->flows[i].engine);
601 		if (err) {
602 			dev_err(ss->dev, "Cannot start engine\n");
603 			goto error_engine;
604 		}
605 	}
606 	return 0;
607 error_engine:
608 	sun8i_ss_free_flows(ss, i);
609 	return err;
610 }
611 
612 /*
613  * Power management strategy: The device is suspended unless a TFM exists for
614  * one of the algorithms proposed by this driver.
615  */
sun8i_ss_pm_suspend(struct device * dev)616 static int sun8i_ss_pm_suspend(struct device *dev)
617 {
618 	struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
619 	int i;
620 
621 	reset_control_assert(ss->reset);
622 	for (i = 0; i < SS_MAX_CLOCKS; i++)
623 		clk_disable_unprepare(ss->ssclks[i]);
624 	return 0;
625 }
626 
sun8i_ss_pm_resume(struct device * dev)627 static int sun8i_ss_pm_resume(struct device *dev)
628 {
629 	struct sun8i_ss_dev *ss = dev_get_drvdata(dev);
630 	int err, i;
631 
632 	for (i = 0; i < SS_MAX_CLOCKS; i++) {
633 		if (!ss->variant->ss_clks[i].name)
634 			continue;
635 		err = clk_prepare_enable(ss->ssclks[i]);
636 		if (err) {
637 			dev_err(ss->dev, "Cannot prepare_enable %s\n",
638 				ss->variant->ss_clks[i].name);
639 			goto error;
640 		}
641 	}
642 	err = reset_control_deassert(ss->reset);
643 	if (err) {
644 		dev_err(ss->dev, "Cannot deassert reset control\n");
645 		goto error;
646 	}
647 	/* enable interrupts for all flows */
648 	writel(BIT(0) | BIT(1), ss->base + SS_INT_CTL_REG);
649 
650 	return 0;
651 error:
652 	sun8i_ss_pm_suspend(dev);
653 	return err;
654 }
655 
656 static const struct dev_pm_ops sun8i_ss_pm_ops = {
657 	SET_RUNTIME_PM_OPS(sun8i_ss_pm_suspend, sun8i_ss_pm_resume, NULL)
658 };
659 
sun8i_ss_pm_init(struct sun8i_ss_dev * ss)660 static int sun8i_ss_pm_init(struct sun8i_ss_dev *ss)
661 {
662 	int err;
663 
664 	pm_runtime_use_autosuspend(ss->dev);
665 	pm_runtime_set_autosuspend_delay(ss->dev, 2000);
666 
667 	err = pm_runtime_set_suspended(ss->dev);
668 	if (err)
669 		return err;
670 	pm_runtime_enable(ss->dev);
671 	return err;
672 }
673 
sun8i_ss_pm_exit(struct sun8i_ss_dev * ss)674 static void sun8i_ss_pm_exit(struct sun8i_ss_dev *ss)
675 {
676 	pm_runtime_disable(ss->dev);
677 }
678 
sun8i_ss_register_algs(struct sun8i_ss_dev * ss)679 static int sun8i_ss_register_algs(struct sun8i_ss_dev *ss)
680 {
681 	int ss_method, err, id;
682 	unsigned int i;
683 
684 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
685 		ss_algs[i].ss = ss;
686 		switch (ss_algs[i].type) {
687 		case CRYPTO_ALG_TYPE_SKCIPHER:
688 			id = ss_algs[i].ss_algo_id;
689 			ss_method = ss->variant->alg_cipher[id];
690 			if (ss_method == SS_ID_NOTSUPP) {
691 				dev_info(ss->dev,
692 					 "DEBUG: Algo of %s not supported\n",
693 					 ss_algs[i].alg.skcipher.base.base.cra_name);
694 				ss_algs[i].ss = NULL;
695 				break;
696 			}
697 			id = ss_algs[i].ss_blockmode;
698 			ss_method = ss->variant->op_mode[id];
699 			if (ss_method == SS_ID_NOTSUPP) {
700 				dev_info(ss->dev, "DEBUG: Blockmode of %s not supported\n",
701 					 ss_algs[i].alg.skcipher.base.base.cra_name);
702 				ss_algs[i].ss = NULL;
703 				break;
704 			}
705 			dev_info(ss->dev, "DEBUG: Register %s\n",
706 				 ss_algs[i].alg.skcipher.base.base.cra_name);
707 			err = crypto_engine_register_skcipher(&ss_algs[i].alg.skcipher);
708 			if (err) {
709 				dev_err(ss->dev, "Fail to register %s\n",
710 					ss_algs[i].alg.skcipher.base.base.cra_name);
711 				ss_algs[i].ss = NULL;
712 				return err;
713 			}
714 			break;
715 		case CRYPTO_ALG_TYPE_RNG:
716 			err = crypto_register_rng(&ss_algs[i].alg.rng);
717 			if (err) {
718 				dev_err(ss->dev, "Fail to register %s\n",
719 					ss_algs[i].alg.rng.base.cra_name);
720 				ss_algs[i].ss = NULL;
721 			}
722 			break;
723 		case CRYPTO_ALG_TYPE_AHASH:
724 			id = ss_algs[i].ss_algo_id;
725 			ss_method = ss->variant->alg_hash[id];
726 			if (ss_method == SS_ID_NOTSUPP) {
727 				dev_info(ss->dev,
728 					"DEBUG: Algo of %s not supported\n",
729 					ss_algs[i].alg.hash.base.halg.base.cra_name);
730 				ss_algs[i].ss = NULL;
731 				break;
732 			}
733 			dev_info(ss->dev, "Register %s\n",
734 				 ss_algs[i].alg.hash.base.halg.base.cra_name);
735 			err = crypto_engine_register_ahash(&ss_algs[i].alg.hash);
736 			if (err) {
737 				dev_err(ss->dev, "ERROR: Fail to register %s\n",
738 					ss_algs[i].alg.hash.base.halg.base.cra_name);
739 				ss_algs[i].ss = NULL;
740 				return err;
741 			}
742 			break;
743 		default:
744 			ss_algs[i].ss = NULL;
745 			dev_err(ss->dev, "ERROR: tried to register an unknown algo\n");
746 		}
747 	}
748 	return 0;
749 }
750 
sun8i_ss_unregister_algs(struct sun8i_ss_dev * ss)751 static void sun8i_ss_unregister_algs(struct sun8i_ss_dev *ss)
752 {
753 	unsigned int i;
754 
755 	for (i = 0; i < ARRAY_SIZE(ss_algs); i++) {
756 		if (!ss_algs[i].ss)
757 			continue;
758 		switch (ss_algs[i].type) {
759 		case CRYPTO_ALG_TYPE_SKCIPHER:
760 			dev_info(ss->dev, "Unregister %d %s\n", i,
761 				 ss_algs[i].alg.skcipher.base.base.cra_name);
762 			crypto_engine_unregister_skcipher(&ss_algs[i].alg.skcipher);
763 			break;
764 		case CRYPTO_ALG_TYPE_RNG:
765 			dev_info(ss->dev, "Unregister %d %s\n", i,
766 				 ss_algs[i].alg.rng.base.cra_name);
767 			crypto_unregister_rng(&ss_algs[i].alg.rng);
768 			break;
769 		case CRYPTO_ALG_TYPE_AHASH:
770 			dev_info(ss->dev, "Unregister %d %s\n", i,
771 				 ss_algs[i].alg.hash.base.halg.base.cra_name);
772 			crypto_engine_unregister_ahash(&ss_algs[i].alg.hash);
773 			break;
774 		}
775 	}
776 }
777 
sun8i_ss_get_clks(struct sun8i_ss_dev * ss)778 static int sun8i_ss_get_clks(struct sun8i_ss_dev *ss)
779 {
780 	unsigned long cr;
781 	int err, i;
782 
783 	for (i = 0; i < SS_MAX_CLOCKS; i++) {
784 		if (!ss->variant->ss_clks[i].name)
785 			continue;
786 		ss->ssclks[i] = devm_clk_get(ss->dev, ss->variant->ss_clks[i].name);
787 		if (IS_ERR(ss->ssclks[i])) {
788 			err = PTR_ERR(ss->ssclks[i]);
789 			dev_err(ss->dev, "Cannot get %s SS clock err=%d\n",
790 				ss->variant->ss_clks[i].name, err);
791 			return err;
792 		}
793 		cr = clk_get_rate(ss->ssclks[i]);
794 		if (!cr)
795 			return -EINVAL;
796 		if (ss->variant->ss_clks[i].freq > 0 &&
797 		    cr != ss->variant->ss_clks[i].freq) {
798 			dev_info(ss->dev, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
799 				 ss->variant->ss_clks[i].name,
800 				 ss->variant->ss_clks[i].freq,
801 				 ss->variant->ss_clks[i].freq / 1000000,
802 				 cr, cr / 1000000);
803 			err = clk_set_rate(ss->ssclks[i], ss->variant->ss_clks[i].freq);
804 			if (err)
805 				dev_err(ss->dev, "Fail to set %s clk speed to %lu hz\n",
806 					ss->variant->ss_clks[i].name,
807 					ss->variant->ss_clks[i].freq);
808 		}
809 		if (ss->variant->ss_clks[i].max_freq > 0 &&
810 		    cr > ss->variant->ss_clks[i].max_freq)
811 			dev_warn(ss->dev, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
812 				 ss->variant->ss_clks[i].name, cr,
813 				 ss->variant->ss_clks[i].max_freq);
814 	}
815 	return 0;
816 }
817 
sun8i_ss_probe(struct platform_device * pdev)818 static int sun8i_ss_probe(struct platform_device *pdev)
819 {
820 	struct sun8i_ss_dev *ss;
821 	int err, irq;
822 	u32 v;
823 
824 	ss = devm_kzalloc(&pdev->dev, sizeof(*ss), GFP_KERNEL);
825 	if (!ss)
826 		return -ENOMEM;
827 
828 	ss->dev = &pdev->dev;
829 	platform_set_drvdata(pdev, ss);
830 
831 	ss->variant = of_device_get_match_data(&pdev->dev);
832 	if (!ss->variant) {
833 		dev_err(&pdev->dev, "Missing Crypto Engine variant\n");
834 		return -EINVAL;
835 	}
836 
837 	ss->base = devm_platform_ioremap_resource(pdev, 0);
838 	if (IS_ERR(ss->base))
839 		return PTR_ERR(ss->base);
840 
841 	err = sun8i_ss_get_clks(ss);
842 	if (err)
843 		return err;
844 
845 	irq = platform_get_irq(pdev, 0);
846 	if (irq < 0)
847 		return irq;
848 
849 	ss->reset = devm_reset_control_get(&pdev->dev, NULL);
850 	if (IS_ERR(ss->reset))
851 		return dev_err_probe(&pdev->dev, PTR_ERR(ss->reset),
852 				     "No reset control found\n");
853 
854 	mutex_init(&ss->mlock);
855 
856 	err = allocate_flows(ss);
857 	if (err)
858 		return err;
859 
860 	err = sun8i_ss_pm_init(ss);
861 	if (err)
862 		goto error_pm;
863 
864 	err = devm_request_irq(&pdev->dev, irq, ss_irq_handler, 0, "sun8i-ss", ss);
865 	if (err) {
866 		dev_err(ss->dev, "Cannot request SecuritySystem IRQ (err=%d)\n", err);
867 		goto error_irq;
868 	}
869 
870 	err = sun8i_ss_register_algs(ss);
871 	if (err)
872 		goto error_alg;
873 
874 	err = pm_runtime_resume_and_get(ss->dev);
875 	if (err < 0)
876 		goto error_alg;
877 
878 	v = readl(ss->base + SS_CTL_REG);
879 	v >>= SS_DIE_ID_SHIFT;
880 	v &= SS_DIE_ID_MASK;
881 	dev_info(&pdev->dev, "Security System Die ID %x\n", v);
882 
883 	pm_runtime_put_sync(ss->dev);
884 
885 	if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG)) {
886 		struct dentry *dbgfs_dir __maybe_unused;
887 		struct dentry *dbgfs_stats __maybe_unused;
888 
889 		/* Ignore error of debugfs */
890 		dbgfs_dir = debugfs_create_dir("sun8i-ss", NULL);
891 		dbgfs_stats = debugfs_create_file("stats", 0444,
892 						   dbgfs_dir, ss,
893 						   &sun8i_ss_debugfs_fops);
894 
895 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
896 		ss->dbgfs_dir = dbgfs_dir;
897 		ss->dbgfs_stats = dbgfs_stats;
898 #endif
899 	}
900 
901 	return 0;
902 error_alg:
903 	sun8i_ss_unregister_algs(ss);
904 error_irq:
905 	sun8i_ss_pm_exit(ss);
906 error_pm:
907 	sun8i_ss_free_flows(ss, MAXFLOW - 1);
908 	return err;
909 }
910 
sun8i_ss_remove(struct platform_device * pdev)911 static int sun8i_ss_remove(struct platform_device *pdev)
912 {
913 	struct sun8i_ss_dev *ss = platform_get_drvdata(pdev);
914 
915 	sun8i_ss_unregister_algs(ss);
916 
917 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
918 	debugfs_remove_recursive(ss->dbgfs_dir);
919 #endif
920 
921 	sun8i_ss_free_flows(ss, MAXFLOW - 1);
922 
923 	sun8i_ss_pm_exit(ss);
924 
925 	return 0;
926 }
927 
928 static const struct of_device_id sun8i_ss_crypto_of_match_table[] = {
929 	{ .compatible = "allwinner,sun8i-a83t-crypto",
930 	  .data = &ss_a83t_variant },
931 	{ .compatible = "allwinner,sun9i-a80-crypto",
932 	  .data = &ss_a80_variant },
933 	{}
934 };
935 MODULE_DEVICE_TABLE(of, sun8i_ss_crypto_of_match_table);
936 
937 static struct platform_driver sun8i_ss_driver = {
938 	.probe		 = sun8i_ss_probe,
939 	.remove		 = sun8i_ss_remove,
940 	.driver		 = {
941 		.name		= "sun8i-ss",
942 		.pm             = &sun8i_ss_pm_ops,
943 		.of_match_table	= sun8i_ss_crypto_of_match_table,
944 	},
945 };
946 
947 module_platform_driver(sun8i_ss_driver);
948 
949 MODULE_DESCRIPTION("Allwinner SecuritySystem cryptographic offloader");
950 MODULE_LICENSE("GPL");
951 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>");
952