xref: /openbmc/linux/drivers/crypto/geode-aes.c (revision 674f368a)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2  /* Copyright (C) 2004-2006, Advanced Micro Devices, Inc.
3   */
4 
5 #include <linux/module.h>
6 #include <linux/kernel.h>
7 #include <linux/pci.h>
8 #include <linux/pci_ids.h>
9 #include <linux/crypto.h>
10 #include <linux/spinlock.h>
11 #include <crypto/algapi.h>
12 #include <crypto/aes.h>
13 #include <crypto/internal/skcipher.h>
14 
15 #include <linux/io.h>
16 #include <linux/delay.h>
17 
18 #include "geode-aes.h"
19 
20 /* Static structures */
21 
22 static void __iomem *_iobase;
23 static spinlock_t lock;
24 
25 /* Write a 128 bit field (either a writable key or IV) */
26 static inline void
27 _writefield(u32 offset, const void *value)
28 {
29 	int i;
30 
31 	for (i = 0; i < 4; i++)
32 		iowrite32(((const u32 *) value)[i], _iobase + offset + (i * 4));
33 }
34 
35 /* Read a 128 bit field (either a writable key or IV) */
36 static inline void
37 _readfield(u32 offset, void *value)
38 {
39 	int i;
40 
41 	for (i = 0; i < 4; i++)
42 		((u32 *) value)[i] = ioread32(_iobase + offset + (i * 4));
43 }
44 
45 static int
46 do_crypt(const void *src, void *dst, u32 len, u32 flags)
47 {
48 	u32 status;
49 	u32 counter = AES_OP_TIMEOUT;
50 
51 	iowrite32(virt_to_phys((void *)src), _iobase + AES_SOURCEA_REG);
52 	iowrite32(virt_to_phys(dst), _iobase + AES_DSTA_REG);
53 	iowrite32(len,  _iobase + AES_LENA_REG);
54 
55 	/* Start the operation */
56 	iowrite32(AES_CTRL_START | flags, _iobase + AES_CTRLA_REG);
57 
58 	do {
59 		status = ioread32(_iobase + AES_INTR_REG);
60 		cpu_relax();
61 	} while (!(status & AES_INTRA_PENDING) && --counter);
62 
63 	/* Clear the event */
64 	iowrite32((status & 0xFF) | AES_INTRA_PENDING, _iobase + AES_INTR_REG);
65 	return counter ? 0 : 1;
66 }
67 
68 static void
69 geode_aes_crypt(const struct geode_aes_tfm_ctx *tctx, const void *src,
70 		void *dst, u32 len, u8 *iv, int mode, int dir)
71 {
72 	u32 flags = 0;
73 	unsigned long iflags;
74 	int ret;
75 
76 	/* If the source and destination is the same, then
77 	 * we need to turn on the coherent flags, otherwise
78 	 * we don't need to worry
79 	 */
80 
81 	flags |= (AES_CTRL_DCA | AES_CTRL_SCA);
82 
83 	if (dir == AES_DIR_ENCRYPT)
84 		flags |= AES_CTRL_ENCRYPT;
85 
86 	/* Start the critical section */
87 
88 	spin_lock_irqsave(&lock, iflags);
89 
90 	if (mode == AES_MODE_CBC) {
91 		flags |= AES_CTRL_CBC;
92 		_writefield(AES_WRITEIV0_REG, iv);
93 	}
94 
95 	flags |= AES_CTRL_WRKEY;
96 	_writefield(AES_WRITEKEY0_REG, tctx->key);
97 
98 	ret = do_crypt(src, dst, len, flags);
99 	BUG_ON(ret);
100 
101 	if (mode == AES_MODE_CBC)
102 		_readfield(AES_WRITEIV0_REG, iv);
103 
104 	spin_unlock_irqrestore(&lock, iflags);
105 }
106 
107 /* CRYPTO-API Functions */
108 
109 static int geode_setkey_cip(struct crypto_tfm *tfm, const u8 *key,
110 		unsigned int len)
111 {
112 	struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
113 	unsigned int ret;
114 
115 	tctx->keylen = len;
116 
117 	if (len == AES_KEYSIZE_128) {
118 		memcpy(tctx->key, key, len);
119 		return 0;
120 	}
121 
122 	if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256)
123 		/* not supported at all */
124 		return -EINVAL;
125 
126 	/*
127 	 * The requested key size is not supported by HW, do a fallback
128 	 */
129 	tctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
130 	tctx->fallback.cip->base.crt_flags |=
131 		(tfm->crt_flags & CRYPTO_TFM_REQ_MASK);
132 
133 	ret = crypto_cipher_setkey(tctx->fallback.cip, key, len);
134 	if (ret) {
135 		tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
136 		tfm->crt_flags |= (tctx->fallback.cip->base.crt_flags &
137 				   CRYPTO_TFM_RES_MASK);
138 	}
139 	return ret;
140 }
141 
142 static int geode_setkey_skcipher(struct crypto_skcipher *tfm, const u8 *key,
143 				 unsigned int len)
144 {
145 	struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
146 	unsigned int ret;
147 
148 	tctx->keylen = len;
149 
150 	if (len == AES_KEYSIZE_128) {
151 		memcpy(tctx->key, key, len);
152 		return 0;
153 	}
154 
155 	if (len != AES_KEYSIZE_192 && len != AES_KEYSIZE_256)
156 		/* not supported at all */
157 		return -EINVAL;
158 
159 	/*
160 	 * The requested key size is not supported by HW, do a fallback
161 	 */
162 	crypto_skcipher_clear_flags(tctx->fallback.skcipher,
163 				    CRYPTO_TFM_REQ_MASK);
164 	crypto_skcipher_set_flags(tctx->fallback.skcipher,
165 				  crypto_skcipher_get_flags(tfm) &
166 				  CRYPTO_TFM_REQ_MASK);
167 	ret = crypto_skcipher_setkey(tctx->fallback.skcipher, key, len);
168 	crypto_skcipher_set_flags(tfm,
169 				  crypto_skcipher_get_flags(tctx->fallback.skcipher) &
170 				  CRYPTO_TFM_RES_MASK);
171 	return ret;
172 }
173 
174 static void
175 geode_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
176 {
177 	const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
178 
179 	if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
180 		crypto_cipher_encrypt_one(tctx->fallback.cip, out, in);
181 		return;
182 	}
183 
184 	geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
185 			AES_MODE_ECB, AES_DIR_ENCRYPT);
186 }
187 
188 
189 static void
190 geode_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
191 {
192 	const struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
193 
194 	if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
195 		crypto_cipher_decrypt_one(tctx->fallback.cip, out, in);
196 		return;
197 	}
198 
199 	geode_aes_crypt(tctx, in, out, AES_BLOCK_SIZE, NULL,
200 			AES_MODE_ECB, AES_DIR_DECRYPT);
201 }
202 
203 static int fallback_init_cip(struct crypto_tfm *tfm)
204 {
205 	const char *name = crypto_tfm_alg_name(tfm);
206 	struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
207 
208 	tctx->fallback.cip = crypto_alloc_cipher(name, 0,
209 						 CRYPTO_ALG_NEED_FALLBACK);
210 
211 	if (IS_ERR(tctx->fallback.cip)) {
212 		printk(KERN_ERR "Error allocating fallback algo %s\n", name);
213 		return PTR_ERR(tctx->fallback.cip);
214 	}
215 
216 	return 0;
217 }
218 
219 static void fallback_exit_cip(struct crypto_tfm *tfm)
220 {
221 	struct geode_aes_tfm_ctx *tctx = crypto_tfm_ctx(tfm);
222 
223 	crypto_free_cipher(tctx->fallback.cip);
224 }
225 
226 static struct crypto_alg geode_alg = {
227 	.cra_name			=	"aes",
228 	.cra_driver_name	=	"geode-aes",
229 	.cra_priority		=	300,
230 	.cra_alignmask		=	15,
231 	.cra_flags			=	CRYPTO_ALG_TYPE_CIPHER |
232 							CRYPTO_ALG_NEED_FALLBACK,
233 	.cra_init			=	fallback_init_cip,
234 	.cra_exit			=	fallback_exit_cip,
235 	.cra_blocksize		=	AES_BLOCK_SIZE,
236 	.cra_ctxsize		=	sizeof(struct geode_aes_tfm_ctx),
237 	.cra_module			=	THIS_MODULE,
238 	.cra_u				=	{
239 		.cipher	=	{
240 			.cia_min_keysize	=	AES_MIN_KEY_SIZE,
241 			.cia_max_keysize	=	AES_MAX_KEY_SIZE,
242 			.cia_setkey			=	geode_setkey_cip,
243 			.cia_encrypt		=	geode_encrypt,
244 			.cia_decrypt		=	geode_decrypt
245 		}
246 	}
247 };
248 
249 static int geode_init_skcipher(struct crypto_skcipher *tfm)
250 {
251 	const char *name = crypto_tfm_alg_name(&tfm->base);
252 	struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
253 
254 	tctx->fallback.skcipher =
255 		crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK |
256 				      CRYPTO_ALG_ASYNC);
257 	if (IS_ERR(tctx->fallback.skcipher)) {
258 		printk(KERN_ERR "Error allocating fallback algo %s\n", name);
259 		return PTR_ERR(tctx->fallback.skcipher);
260 	}
261 
262 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
263 				    crypto_skcipher_reqsize(tctx->fallback.skcipher));
264 	return 0;
265 }
266 
267 static void geode_exit_skcipher(struct crypto_skcipher *tfm)
268 {
269 	struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
270 
271 	crypto_free_skcipher(tctx->fallback.skcipher);
272 }
273 
274 static int geode_skcipher_crypt(struct skcipher_request *req, int mode, int dir)
275 {
276 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
277 	const struct geode_aes_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
278 	struct skcipher_walk walk;
279 	unsigned int nbytes;
280 	int err;
281 
282 	if (unlikely(tctx->keylen != AES_KEYSIZE_128)) {
283 		struct skcipher_request *subreq = skcipher_request_ctx(req);
284 
285 		*subreq = *req;
286 		skcipher_request_set_tfm(subreq, tctx->fallback.skcipher);
287 		if (dir == AES_DIR_DECRYPT)
288 			return crypto_skcipher_decrypt(subreq);
289 		else
290 			return crypto_skcipher_encrypt(subreq);
291 	}
292 
293 	err = skcipher_walk_virt(&walk, req, false);
294 
295 	while ((nbytes = walk.nbytes) != 0) {
296 		geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr,
297 				round_down(nbytes, AES_BLOCK_SIZE),
298 				walk.iv, mode, dir);
299 		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
300 	}
301 
302 	return err;
303 }
304 
305 static int geode_cbc_encrypt(struct skcipher_request *req)
306 {
307 	return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_ENCRYPT);
308 }
309 
310 static int geode_cbc_decrypt(struct skcipher_request *req)
311 {
312 	return geode_skcipher_crypt(req, AES_MODE_CBC, AES_DIR_DECRYPT);
313 }
314 
315 static int geode_ecb_encrypt(struct skcipher_request *req)
316 {
317 	return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_ENCRYPT);
318 }
319 
320 static int geode_ecb_decrypt(struct skcipher_request *req)
321 {
322 	return geode_skcipher_crypt(req, AES_MODE_ECB, AES_DIR_DECRYPT);
323 }
324 
325 static struct skcipher_alg geode_skcipher_algs[] = {
326 	{
327 		.base.cra_name		= "cbc(aes)",
328 		.base.cra_driver_name	= "cbc-aes-geode",
329 		.base.cra_priority	= 400,
330 		.base.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
331 					  CRYPTO_ALG_NEED_FALLBACK,
332 		.base.cra_blocksize	= AES_BLOCK_SIZE,
333 		.base.cra_ctxsize	= sizeof(struct geode_aes_tfm_ctx),
334 		.base.cra_alignmask	= 15,
335 		.base.cra_module	= THIS_MODULE,
336 		.init			= geode_init_skcipher,
337 		.exit			= geode_exit_skcipher,
338 		.setkey			= geode_setkey_skcipher,
339 		.encrypt		= geode_cbc_encrypt,
340 		.decrypt		= geode_cbc_decrypt,
341 		.min_keysize		= AES_MIN_KEY_SIZE,
342 		.max_keysize		= AES_MAX_KEY_SIZE,
343 		.ivsize			= AES_BLOCK_SIZE,
344 	}, {
345 		.base.cra_name		= "ecb(aes)",
346 		.base.cra_driver_name	= "ecb-aes-geode",
347 		.base.cra_priority	= 400,
348 		.base.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |
349 					  CRYPTO_ALG_NEED_FALLBACK,
350 		.base.cra_blocksize	= AES_BLOCK_SIZE,
351 		.base.cra_ctxsize	= sizeof(struct geode_aes_tfm_ctx),
352 		.base.cra_alignmask	= 15,
353 		.base.cra_module	= THIS_MODULE,
354 		.init			= geode_init_skcipher,
355 		.exit			= geode_exit_skcipher,
356 		.setkey			= geode_setkey_skcipher,
357 		.encrypt		= geode_ecb_encrypt,
358 		.decrypt		= geode_ecb_decrypt,
359 		.min_keysize		= AES_MIN_KEY_SIZE,
360 		.max_keysize		= AES_MAX_KEY_SIZE,
361 	},
362 };
363 
364 static void geode_aes_remove(struct pci_dev *dev)
365 {
366 	crypto_unregister_alg(&geode_alg);
367 	crypto_unregister_skciphers(geode_skcipher_algs,
368 				    ARRAY_SIZE(geode_skcipher_algs));
369 
370 	pci_iounmap(dev, _iobase);
371 	_iobase = NULL;
372 
373 	pci_release_regions(dev);
374 	pci_disable_device(dev);
375 }
376 
377 
378 static int geode_aes_probe(struct pci_dev *dev, const struct pci_device_id *id)
379 {
380 	int ret;
381 
382 	ret = pci_enable_device(dev);
383 	if (ret)
384 		return ret;
385 
386 	ret = pci_request_regions(dev, "geode-aes");
387 	if (ret)
388 		goto eenable;
389 
390 	_iobase = pci_iomap(dev, 0, 0);
391 
392 	if (_iobase == NULL) {
393 		ret = -ENOMEM;
394 		goto erequest;
395 	}
396 
397 	spin_lock_init(&lock);
398 
399 	/* Clear any pending activity */
400 	iowrite32(AES_INTR_PENDING | AES_INTR_MASK, _iobase + AES_INTR_REG);
401 
402 	ret = crypto_register_alg(&geode_alg);
403 	if (ret)
404 		goto eiomap;
405 
406 	ret = crypto_register_skciphers(geode_skcipher_algs,
407 					ARRAY_SIZE(geode_skcipher_algs));
408 	if (ret)
409 		goto ealg;
410 
411 	dev_notice(&dev->dev, "GEODE AES engine enabled.\n");
412 	return 0;
413 
414  ealg:
415 	crypto_unregister_alg(&geode_alg);
416 
417  eiomap:
418 	pci_iounmap(dev, _iobase);
419 
420  erequest:
421 	pci_release_regions(dev);
422 
423  eenable:
424 	pci_disable_device(dev);
425 
426 	dev_err(&dev->dev, "GEODE AES initialization failed.\n");
427 	return ret;
428 }
429 
430 static struct pci_device_id geode_aes_tbl[] = {
431 	{ PCI_VDEVICE(AMD, PCI_DEVICE_ID_AMD_LX_AES), },
432 	{ 0, }
433 };
434 
435 MODULE_DEVICE_TABLE(pci, geode_aes_tbl);
436 
437 static struct pci_driver geode_aes_driver = {
438 	.name = "Geode LX AES",
439 	.id_table = geode_aes_tbl,
440 	.probe = geode_aes_probe,
441 	.remove = geode_aes_remove,
442 };
443 
444 module_pci_driver(geode_aes_driver);
445 
446 MODULE_AUTHOR("Advanced Micro Devices, Inc.");
447 MODULE_DESCRIPTION("Geode LX Hardware AES driver");
448 MODULE_LICENSE("GPL");
449