1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Cryptographic API.
4  *
5  * Support for StarFive hardware cryptographic engine.
6  * Copyright (c) 2022 StarFive Technology
7  *
8  */
9 
10 #include <crypto/engine.h>
11 #include "jh7110-cryp.h"
12 #include <linux/clk.h>
13 #include <linux/completion.h>
14 #include <linux/err.h>
15 #include <linux/interrupt.h>
16 #include <linux/iopoll.h>
17 #include <linux/kernel.h>
18 #include <linux/mod_devicetable.h>
19 #include <linux/module.h>
20 #include <linux/platform_device.h>
21 #include <linux/pm_runtime.h>
22 #include <linux/reset.h>
23 #include <linux/spinlock.h>
24 
25 #define DRIVER_NAME             "jh7110-crypto"
26 
27 struct starfive_dev_list {
28 	struct list_head        dev_list;
29 	spinlock_t              lock; /* protect dev_list */
30 };
31 
32 static struct starfive_dev_list dev_list = {
33 	.dev_list = LIST_HEAD_INIT(dev_list.dev_list),
34 	.lock     = __SPIN_LOCK_UNLOCKED(dev_list.lock),
35 };
36 
37 struct starfive_cryp_dev *starfive_cryp_find_dev(struct starfive_cryp_ctx *ctx)
38 {
39 	struct starfive_cryp_dev *cryp = NULL, *tmp;
40 
41 	spin_lock_bh(&dev_list.lock);
42 	if (!ctx->cryp) {
43 		list_for_each_entry(tmp, &dev_list.dev_list, list) {
44 			cryp = tmp;
45 			break;
46 		}
47 		ctx->cryp = cryp;
48 	} else {
49 		cryp = ctx->cryp;
50 	}
51 
52 	spin_unlock_bh(&dev_list.lock);
53 
54 	return cryp;
55 }
56 
57 static u16 side_chan;
58 module_param(side_chan, ushort, 0);
59 MODULE_PARM_DESC(side_chan, "Enable side channel mitigation for AES module.\n"
60 			    "Enabling this feature will reduce speed performance.\n"
61 			    " 0 - Disabled\n"
62 			    " other - Enabled");
63 
64 static int starfive_dma_init(struct starfive_cryp_dev *cryp)
65 {
66 	dma_cap_mask_t mask;
67 
68 	dma_cap_zero(mask);
69 	dma_cap_set(DMA_SLAVE, mask);
70 
71 	cryp->tx = dma_request_chan(cryp->dev, "tx");
72 	if (IS_ERR(cryp->tx))
73 		return dev_err_probe(cryp->dev, PTR_ERR(cryp->tx),
74 				     "Error requesting tx dma channel.\n");
75 
76 	cryp->rx = dma_request_chan(cryp->dev, "rx");
77 	if (IS_ERR(cryp->rx)) {
78 		dma_release_channel(cryp->tx);
79 		return dev_err_probe(cryp->dev, PTR_ERR(cryp->rx),
80 				     "Error requesting rx dma channel.\n");
81 	}
82 
83 	return 0;
84 }
85 
86 static void starfive_dma_cleanup(struct starfive_cryp_dev *cryp)
87 {
88 	dma_release_channel(cryp->tx);
89 	dma_release_channel(cryp->rx);
90 }
91 
92 static irqreturn_t starfive_cryp_irq(int irq, void *priv)
93 {
94 	u32 status;
95 	u32 mask;
96 	struct starfive_cryp_dev *cryp = (struct starfive_cryp_dev *)priv;
97 
98 	mask = readl(cryp->base + STARFIVE_IE_MASK_OFFSET);
99 	status = readl(cryp->base + STARFIVE_IE_FLAG_OFFSET);
100 	if (status & STARFIVE_IE_FLAG_AES_DONE) {
101 		mask |= STARFIVE_IE_MASK_AES_DONE;
102 		writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET);
103 		tasklet_schedule(&cryp->aes_done);
104 	}
105 
106 	if (status & STARFIVE_IE_FLAG_HASH_DONE) {
107 		mask |= STARFIVE_IE_MASK_HASH_DONE;
108 		writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET);
109 		tasklet_schedule(&cryp->hash_done);
110 	}
111 
112 	if (status & STARFIVE_IE_FLAG_PKA_DONE) {
113 		mask |= STARFIVE_IE_MASK_PKA_DONE;
114 		writel(mask, cryp->base + STARFIVE_IE_MASK_OFFSET);
115 		complete(&cryp->pka_done);
116 	}
117 
118 	return IRQ_HANDLED;
119 }
120 
121 static int starfive_cryp_probe(struct platform_device *pdev)
122 {
123 	struct starfive_cryp_dev *cryp;
124 	struct resource *res;
125 	int irq;
126 	int ret;
127 
128 	cryp = devm_kzalloc(&pdev->dev, sizeof(*cryp), GFP_KERNEL);
129 	if (!cryp)
130 		return -ENOMEM;
131 
132 	platform_set_drvdata(pdev, cryp);
133 	cryp->dev = &pdev->dev;
134 
135 	cryp->base = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
136 	if (IS_ERR(cryp->base))
137 		return dev_err_probe(&pdev->dev, PTR_ERR(cryp->base),
138 				     "Error remapping memory for platform device\n");
139 
140 	tasklet_init(&cryp->aes_done, starfive_aes_done_task, (unsigned long)cryp);
141 	tasklet_init(&cryp->hash_done, starfive_hash_done_task, (unsigned long)cryp);
142 
143 	cryp->phys_base = res->start;
144 	cryp->dma_maxburst = 32;
145 	cryp->side_chan = side_chan;
146 
147 	cryp->hclk = devm_clk_get(&pdev->dev, "hclk");
148 	if (IS_ERR(cryp->hclk))
149 		return dev_err_probe(&pdev->dev, PTR_ERR(cryp->hclk),
150 				     "Error getting hardware reference clock\n");
151 
152 	cryp->ahb = devm_clk_get(&pdev->dev, "ahb");
153 	if (IS_ERR(cryp->ahb))
154 		return dev_err_probe(&pdev->dev, PTR_ERR(cryp->ahb),
155 				     "Error getting ahb reference clock\n");
156 
157 	cryp->rst = devm_reset_control_get_shared(cryp->dev, NULL);
158 	if (IS_ERR(cryp->rst))
159 		return dev_err_probe(&pdev->dev, PTR_ERR(cryp->rst),
160 				     "Error getting hardware reset line\n");
161 
162 	init_completion(&cryp->pka_done);
163 
164 	irq = platform_get_irq(pdev, 0);
165 	if (irq < 0)
166 		return irq;
167 
168 	ret = devm_request_irq(&pdev->dev, irq, starfive_cryp_irq, 0, pdev->name,
169 			       (void *)cryp);
170 	if (ret)
171 		return dev_err_probe(&pdev->dev, irq,
172 				     "Failed to register interrupt handler\n");
173 
174 	clk_prepare_enable(cryp->hclk);
175 	clk_prepare_enable(cryp->ahb);
176 	reset_control_deassert(cryp->rst);
177 
178 	spin_lock(&dev_list.lock);
179 	list_add(&cryp->list, &dev_list.dev_list);
180 	spin_unlock(&dev_list.lock);
181 
182 	ret = starfive_dma_init(cryp);
183 	if (ret)
184 		goto err_dma_init;
185 
186 	/* Initialize crypto engine */
187 	cryp->engine = crypto_engine_alloc_init(&pdev->dev, 1);
188 	if (!cryp->engine) {
189 		ret = -ENOMEM;
190 		goto err_engine;
191 	}
192 
193 	ret = crypto_engine_start(cryp->engine);
194 	if (ret)
195 		goto err_engine_start;
196 
197 	ret = starfive_aes_register_algs();
198 	if (ret)
199 		goto err_algs_aes;
200 
201 	ret = starfive_hash_register_algs();
202 	if (ret)
203 		goto err_algs_hash;
204 
205 	ret = starfive_rsa_register_algs();
206 	if (ret)
207 		goto err_algs_rsa;
208 
209 	return 0;
210 
211 err_algs_rsa:
212 	starfive_hash_unregister_algs();
213 err_algs_hash:
214 	starfive_aes_unregister_algs();
215 err_algs_aes:
216 	crypto_engine_stop(cryp->engine);
217 err_engine_start:
218 	crypto_engine_exit(cryp->engine);
219 err_engine:
220 	starfive_dma_cleanup(cryp);
221 err_dma_init:
222 	spin_lock(&dev_list.lock);
223 	list_del(&cryp->list);
224 	spin_unlock(&dev_list.lock);
225 
226 	clk_disable_unprepare(cryp->hclk);
227 	clk_disable_unprepare(cryp->ahb);
228 	reset_control_assert(cryp->rst);
229 
230 	tasklet_kill(&cryp->aes_done);
231 	tasklet_kill(&cryp->hash_done);
232 
233 	return ret;
234 }
235 
236 static void starfive_cryp_remove(struct platform_device *pdev)
237 {
238 	struct starfive_cryp_dev *cryp = platform_get_drvdata(pdev);
239 
240 	starfive_aes_unregister_algs();
241 	starfive_hash_unregister_algs();
242 	starfive_rsa_unregister_algs();
243 
244 	tasklet_kill(&cryp->aes_done);
245 	tasklet_kill(&cryp->hash_done);
246 
247 	crypto_engine_stop(cryp->engine);
248 	crypto_engine_exit(cryp->engine);
249 
250 	starfive_dma_cleanup(cryp);
251 
252 	spin_lock(&dev_list.lock);
253 	list_del(&cryp->list);
254 	spin_unlock(&dev_list.lock);
255 
256 	clk_disable_unprepare(cryp->hclk);
257 	clk_disable_unprepare(cryp->ahb);
258 	reset_control_assert(cryp->rst);
259 }
260 
261 static const struct of_device_id starfive_dt_ids[] __maybe_unused = {
262 	{ .compatible = "starfive,jh7110-crypto", .data = NULL},
263 	{},
264 };
265 MODULE_DEVICE_TABLE(of, starfive_dt_ids);
266 
267 static struct platform_driver starfive_cryp_driver = {
268 	.probe  = starfive_cryp_probe,
269 	.remove_new = starfive_cryp_remove,
270 	.driver = {
271 		.name           = DRIVER_NAME,
272 		.of_match_table = starfive_dt_ids,
273 	},
274 };
275 
276 module_platform_driver(starfive_cryp_driver);
277 
278 MODULE_LICENSE("GPL");
279 MODULE_DESCRIPTION("StarFive JH7110 Cryptographic Module");
280