1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Xilinx ZynqMP AES Driver.
4  * Copyright (c) 2020 Xilinx Inc.
5  */
6 
7 #include <crypto/aes.h>
8 #include <crypto/engine.h>
9 #include <crypto/gcm.h>
10 #include <crypto/internal/aead.h>
11 #include <crypto/scatterwalk.h>
12 
13 #include <linux/module.h>
14 #include <linux/of_device.h>
15 #include <linux/platform_device.h>
16 
17 #include <linux/firmware/xlnx-zynqmp.h>
18 
19 #define ZYNQMP_DMA_BIT_MASK	32U
20 
21 #define ZYNQMP_AES_KEY_SIZE		AES_KEYSIZE_256
22 #define ZYNQMP_AES_AUTH_SIZE		16U
23 #define ZYNQMP_KEY_SRC_SEL_KEY_LEN	1U
24 #define ZYNQMP_AES_BLK_SIZE		1U
25 #define ZYNQMP_AES_MIN_INPUT_BLK_SIZE	4U
26 #define ZYNQMP_AES_WORD_LEN		4U
27 
28 #define ZYNQMP_AES_GCM_TAG_MISMATCH_ERR		0x01
29 #define ZYNQMP_AES_WRONG_KEY_SRC_ERR		0x13
30 #define ZYNQMP_AES_PUF_NOT_PROGRAMMED		0xE300
31 
32 enum zynqmp_aead_op {
33 	ZYNQMP_AES_DECRYPT = 0,
34 	ZYNQMP_AES_ENCRYPT
35 };
36 
37 enum zynqmp_aead_keysrc {
38 	ZYNQMP_AES_KUP_KEY = 0,
39 	ZYNQMP_AES_DEV_KEY,
40 	ZYNQMP_AES_PUF_KEY
41 };
42 
43 struct zynqmp_aead_drv_ctx {
44 	union {
45 		struct aead_alg aead;
46 	} alg;
47 	struct device *dev;
48 	struct crypto_engine *engine;
49 };
50 
51 struct zynqmp_aead_hw_req {
52 	u64 src;
53 	u64 iv;
54 	u64 key;
55 	u64 dst;
56 	u64 size;
57 	u64 op;
58 	u64 keysrc;
59 };
60 
61 struct zynqmp_aead_tfm_ctx {
62 	struct crypto_engine_ctx engine_ctx;
63 	struct device *dev;
64 	u8 key[ZYNQMP_AES_KEY_SIZE];
65 	u8 *iv;
66 	u32 keylen;
67 	u32 authsize;
68 	enum zynqmp_aead_keysrc keysrc;
69 	struct crypto_aead *fbk_cipher;
70 };
71 
72 struct zynqmp_aead_req_ctx {
73 	enum zynqmp_aead_op op;
74 };
75 
76 static int zynqmp_aes_aead_cipher(struct aead_request *req)
77 {
78 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
79 	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
80 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
81 	struct device *dev = tfm_ctx->dev;
82 	struct zynqmp_aead_hw_req *hwreq;
83 	dma_addr_t dma_addr_data, dma_addr_hw_req;
84 	unsigned int data_size;
85 	unsigned int status;
86 	int ret;
87 	size_t dma_size;
88 	char *kbuf;
89 	int err;
90 
91 	if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY)
92 		dma_size = req->cryptlen + ZYNQMP_AES_KEY_SIZE
93 			   + GCM_AES_IV_SIZE;
94 	else
95 		dma_size = req->cryptlen + GCM_AES_IV_SIZE;
96 
97 	kbuf = dma_alloc_coherent(dev, dma_size, &dma_addr_data, GFP_KERNEL);
98 	if (!kbuf)
99 		return -ENOMEM;
100 
101 	hwreq = dma_alloc_coherent(dev, sizeof(struct zynqmp_aead_hw_req),
102 				   &dma_addr_hw_req, GFP_KERNEL);
103 	if (!hwreq) {
104 		dma_free_coherent(dev, dma_size, kbuf, dma_addr_data);
105 		return -ENOMEM;
106 	}
107 
108 	data_size = req->cryptlen;
109 	scatterwalk_map_and_copy(kbuf, req->src, 0, req->cryptlen, 0);
110 	memcpy(kbuf + data_size, req->iv, GCM_AES_IV_SIZE);
111 
112 	hwreq->src = dma_addr_data;
113 	hwreq->dst = dma_addr_data;
114 	hwreq->iv = hwreq->src + data_size;
115 	hwreq->keysrc = tfm_ctx->keysrc;
116 	hwreq->op = rq_ctx->op;
117 
118 	if (hwreq->op == ZYNQMP_AES_ENCRYPT)
119 		hwreq->size = data_size;
120 	else
121 		hwreq->size = data_size - ZYNQMP_AES_AUTH_SIZE;
122 
123 	if (hwreq->keysrc == ZYNQMP_AES_KUP_KEY) {
124 		memcpy(kbuf + data_size + GCM_AES_IV_SIZE,
125 		       tfm_ctx->key, ZYNQMP_AES_KEY_SIZE);
126 
127 		hwreq->key = hwreq->src + data_size + GCM_AES_IV_SIZE;
128 	} else {
129 		hwreq->key = 0;
130 	}
131 
132 	ret = zynqmp_pm_aes_engine(dma_addr_hw_req, &status);
133 
134 	if (ret) {
135 		dev_err(dev, "ERROR: AES PM API failed\n");
136 		err = ret;
137 	} else if (status) {
138 		switch (status) {
139 		case ZYNQMP_AES_GCM_TAG_MISMATCH_ERR:
140 			dev_err(dev, "ERROR: Gcm Tag mismatch\n");
141 			break;
142 		case ZYNQMP_AES_WRONG_KEY_SRC_ERR:
143 			dev_err(dev, "ERROR: Wrong KeySrc, enable secure mode\n");
144 			break;
145 		case ZYNQMP_AES_PUF_NOT_PROGRAMMED:
146 			dev_err(dev, "ERROR: PUF is not registered\n");
147 			break;
148 		default:
149 			dev_err(dev, "ERROR: Unknown error\n");
150 			break;
151 		}
152 		err = -status;
153 	} else {
154 		if (hwreq->op == ZYNQMP_AES_ENCRYPT)
155 			data_size = data_size + ZYNQMP_AES_AUTH_SIZE;
156 		else
157 			data_size = data_size - ZYNQMP_AES_AUTH_SIZE;
158 
159 		sg_copy_from_buffer(req->dst, sg_nents(req->dst),
160 				    kbuf, data_size);
161 		err = 0;
162 	}
163 
164 	if (kbuf) {
165 		memzero_explicit(kbuf, dma_size);
166 		dma_free_coherent(dev, dma_size, kbuf, dma_addr_data);
167 	}
168 	if (hwreq) {
169 		memzero_explicit(hwreq, sizeof(struct zynqmp_aead_hw_req));
170 		dma_free_coherent(dev, sizeof(struct zynqmp_aead_hw_req),
171 				  hwreq, dma_addr_hw_req);
172 	}
173 	return err;
174 }
175 
176 static int zynqmp_fallback_check(struct zynqmp_aead_tfm_ctx *tfm_ctx,
177 				 struct aead_request *req)
178 {
179 	int need_fallback = 0;
180 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
181 
182 	if (tfm_ctx->authsize != ZYNQMP_AES_AUTH_SIZE)
183 		need_fallback = 1;
184 
185 	if (tfm_ctx->keysrc == ZYNQMP_AES_KUP_KEY &&
186 	    tfm_ctx->keylen != ZYNQMP_AES_KEY_SIZE) {
187 		need_fallback = 1;
188 	}
189 	if (req->assoclen != 0 ||
190 	    req->cryptlen < ZYNQMP_AES_MIN_INPUT_BLK_SIZE) {
191 		need_fallback = 1;
192 	}
193 	if ((req->cryptlen % ZYNQMP_AES_WORD_LEN) != 0)
194 		need_fallback = 1;
195 
196 	if (rq_ctx->op == ZYNQMP_AES_DECRYPT &&
197 	    req->cryptlen <= ZYNQMP_AES_AUTH_SIZE) {
198 		need_fallback = 1;
199 	}
200 	return need_fallback;
201 }
202 
203 static int zynqmp_handle_aes_req(struct crypto_engine *engine,
204 				 void *req)
205 {
206 	struct aead_request *areq =
207 				container_of(req, struct aead_request, base);
208 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
209 	struct zynqmp_aead_tfm_ctx *tfm_ctx = crypto_aead_ctx(aead);
210 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(areq);
211 	struct aead_request *subreq = aead_request_ctx(req);
212 	int need_fallback;
213 	int err;
214 
215 	need_fallback = zynqmp_fallback_check(tfm_ctx, areq);
216 
217 	if (need_fallback) {
218 		aead_request_set_tfm(subreq, tfm_ctx->fbk_cipher);
219 
220 		aead_request_set_callback(subreq, areq->base.flags,
221 					  NULL, NULL);
222 		aead_request_set_crypt(subreq, areq->src, areq->dst,
223 				       areq->cryptlen, areq->iv);
224 		aead_request_set_ad(subreq, areq->assoclen);
225 		if (rq_ctx->op == ZYNQMP_AES_ENCRYPT)
226 			err = crypto_aead_encrypt(subreq);
227 		else
228 			err = crypto_aead_decrypt(subreq);
229 	} else {
230 		err = zynqmp_aes_aead_cipher(areq);
231 	}
232 
233 	crypto_finalize_aead_request(engine, areq, err);
234 	return 0;
235 }
236 
237 static int zynqmp_aes_aead_setkey(struct crypto_aead *aead, const u8 *key,
238 				  unsigned int keylen)
239 {
240 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
241 	struct zynqmp_aead_tfm_ctx *tfm_ctx =
242 			(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
243 	unsigned char keysrc;
244 
245 	if (keylen == ZYNQMP_KEY_SRC_SEL_KEY_LEN) {
246 		keysrc = *key;
247 		if (keysrc == ZYNQMP_AES_KUP_KEY ||
248 		    keysrc == ZYNQMP_AES_DEV_KEY ||
249 		    keysrc == ZYNQMP_AES_PUF_KEY) {
250 			tfm_ctx->keysrc = (enum zynqmp_aead_keysrc)keysrc;
251 		} else {
252 			tfm_ctx->keylen = keylen;
253 		}
254 	} else {
255 		tfm_ctx->keylen = keylen;
256 		if (keylen == ZYNQMP_AES_KEY_SIZE) {
257 			tfm_ctx->keysrc = ZYNQMP_AES_KUP_KEY;
258 			memcpy(tfm_ctx->key, key, keylen);
259 		}
260 	}
261 
262 	tfm_ctx->fbk_cipher->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
263 	tfm_ctx->fbk_cipher->base.crt_flags |= (aead->base.crt_flags &
264 					CRYPTO_TFM_REQ_MASK);
265 
266 	return crypto_aead_setkey(tfm_ctx->fbk_cipher, key, keylen);
267 }
268 
269 static int zynqmp_aes_aead_setauthsize(struct crypto_aead *aead,
270 				       unsigned int authsize)
271 {
272 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
273 	struct zynqmp_aead_tfm_ctx *tfm_ctx =
274 			(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
275 
276 	tfm_ctx->authsize = authsize;
277 	return crypto_aead_setauthsize(tfm_ctx->fbk_cipher, authsize);
278 }
279 
280 static int zynqmp_aes_aead_encrypt(struct aead_request *req)
281 {
282 	struct zynqmp_aead_drv_ctx *drv_ctx;
283 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
284 	struct aead_alg *alg = crypto_aead_alg(aead);
285 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
286 
287 	rq_ctx->op = ZYNQMP_AES_ENCRYPT;
288 	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
289 
290 	return crypto_transfer_aead_request_to_engine(drv_ctx->engine, req);
291 }
292 
293 static int zynqmp_aes_aead_decrypt(struct aead_request *req)
294 {
295 	struct zynqmp_aead_drv_ctx *drv_ctx;
296 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
297 	struct aead_alg *alg = crypto_aead_alg(aead);
298 	struct zynqmp_aead_req_ctx *rq_ctx = aead_request_ctx(req);
299 
300 	rq_ctx->op = ZYNQMP_AES_DECRYPT;
301 	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
302 
303 	return crypto_transfer_aead_request_to_engine(drv_ctx->engine, req);
304 }
305 
306 static int zynqmp_aes_aead_init(struct crypto_aead *aead)
307 {
308 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
309 	struct zynqmp_aead_tfm_ctx *tfm_ctx =
310 		(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
311 	struct zynqmp_aead_drv_ctx *drv_ctx;
312 	struct aead_alg *alg = crypto_aead_alg(aead);
313 
314 	drv_ctx = container_of(alg, struct zynqmp_aead_drv_ctx, alg.aead);
315 	tfm_ctx->dev = drv_ctx->dev;
316 
317 	tfm_ctx->engine_ctx.op.do_one_request = zynqmp_handle_aes_req;
318 	tfm_ctx->engine_ctx.op.prepare_request = NULL;
319 	tfm_ctx->engine_ctx.op.unprepare_request = NULL;
320 
321 	tfm_ctx->fbk_cipher = crypto_alloc_aead(drv_ctx->alg.aead.base.cra_name,
322 						0,
323 						CRYPTO_ALG_NEED_FALLBACK);
324 
325 	if (IS_ERR(tfm_ctx->fbk_cipher)) {
326 		pr_err("%s() Error: failed to allocate fallback for %s\n",
327 		       __func__, drv_ctx->alg.aead.base.cra_name);
328 		return PTR_ERR(tfm_ctx->fbk_cipher);
329 	}
330 
331 	crypto_aead_set_reqsize(aead,
332 				max(sizeof(struct zynqmp_aead_req_ctx),
333 				    sizeof(struct aead_request) +
334 				    crypto_aead_reqsize(tfm_ctx->fbk_cipher)));
335 	return 0;
336 }
337 
338 static void zynqmp_aes_aead_exit(struct crypto_aead *aead)
339 {
340 	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
341 	struct zynqmp_aead_tfm_ctx *tfm_ctx =
342 			(struct zynqmp_aead_tfm_ctx *)crypto_tfm_ctx(tfm);
343 
344 	if (tfm_ctx->fbk_cipher) {
345 		crypto_free_aead(tfm_ctx->fbk_cipher);
346 		tfm_ctx->fbk_cipher = NULL;
347 	}
348 	memzero_explicit(tfm_ctx, sizeof(struct zynqmp_aead_tfm_ctx));
349 }
350 
351 static struct zynqmp_aead_drv_ctx aes_drv_ctx = {
352 	.alg.aead = {
353 		.setkey		= zynqmp_aes_aead_setkey,
354 		.setauthsize	= zynqmp_aes_aead_setauthsize,
355 		.encrypt	= zynqmp_aes_aead_encrypt,
356 		.decrypt	= zynqmp_aes_aead_decrypt,
357 		.init		= zynqmp_aes_aead_init,
358 		.exit		= zynqmp_aes_aead_exit,
359 		.ivsize		= GCM_AES_IV_SIZE,
360 		.maxauthsize	= ZYNQMP_AES_AUTH_SIZE,
361 		.base = {
362 		.cra_name		= "gcm(aes)",
363 		.cra_driver_name	= "xilinx-zynqmp-aes-gcm",
364 		.cra_priority		= 200,
365 		.cra_flags		= CRYPTO_ALG_TYPE_AEAD |
366 					  CRYPTO_ALG_ASYNC |
367 					  CRYPTO_ALG_ALLOCATES_MEMORY |
368 					  CRYPTO_ALG_KERN_DRIVER_ONLY |
369 					  CRYPTO_ALG_NEED_FALLBACK,
370 		.cra_blocksize		= ZYNQMP_AES_BLK_SIZE,
371 		.cra_ctxsize		= sizeof(struct zynqmp_aead_tfm_ctx),
372 		.cra_module		= THIS_MODULE,
373 		}
374 	}
375 };
376 
377 static int zynqmp_aes_aead_probe(struct platform_device *pdev)
378 {
379 	struct device *dev = &pdev->dev;
380 	int err;
381 
382 	/* ZynqMP AES driver supports only one instance */
383 	if (!aes_drv_ctx.dev)
384 		aes_drv_ctx.dev = dev;
385 	else
386 		return -ENODEV;
387 
388 	err = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(ZYNQMP_DMA_BIT_MASK));
389 	if (err < 0) {
390 		dev_err(dev, "No usable DMA configuration\n");
391 		return err;
392 	}
393 
394 	aes_drv_ctx.engine = crypto_engine_alloc_init(dev, 1);
395 	if (!aes_drv_ctx.engine) {
396 		dev_err(dev, "Cannot alloc AES engine\n");
397 		err = -ENOMEM;
398 		goto err_engine;
399 	}
400 
401 	err = crypto_engine_start(aes_drv_ctx.engine);
402 	if (err) {
403 		dev_err(dev, "Cannot start AES engine\n");
404 		goto err_engine;
405 	}
406 
407 	err = crypto_register_aead(&aes_drv_ctx.alg.aead);
408 	if (err < 0) {
409 		dev_err(dev, "Failed to register AEAD alg.\n");
410 		goto err_aead;
411 	}
412 	return 0;
413 
414 err_aead:
415 	crypto_unregister_aead(&aes_drv_ctx.alg.aead);
416 
417 err_engine:
418 	if (aes_drv_ctx.engine)
419 		crypto_engine_exit(aes_drv_ctx.engine);
420 
421 	return err;
422 }
423 
424 static int zynqmp_aes_aead_remove(struct platform_device *pdev)
425 {
426 	crypto_engine_exit(aes_drv_ctx.engine);
427 	crypto_unregister_aead(&aes_drv_ctx.alg.aead);
428 
429 	return 0;
430 }
431 
432 static const struct of_device_id zynqmp_aes_dt_ids[] = {
433 	{ .compatible = "xlnx,zynqmp-aes" },
434 	{ /* sentinel */ }
435 };
436 MODULE_DEVICE_TABLE(of, zynqmp_aes_dt_ids);
437 
438 static struct platform_driver zynqmp_aes_driver = {
439 	.probe	= zynqmp_aes_aead_probe,
440 	.remove = zynqmp_aes_aead_remove,
441 	.driver = {
442 		.name		= "zynqmp-aes",
443 		.of_match_table = zynqmp_aes_dt_ids,
444 	},
445 };
446 
447 module_platform_driver(zynqmp_aes_driver);
448 MODULE_LICENSE("GPL");
449