1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/crypto.h>
3 #include <linux/kernel.h>
4 #include <linux/module.h>
5 #include <linux/printk.h>
6 
7 #include <crypto/aes.h>
8 #include <crypto/skcipher.h>
9 #include <crypto/ctr.h>
10 #include <crypto/des.h>
11 #include <crypto/xts.h>
12 
13 #include "nitrox_dev.h"
14 #include "nitrox_common.h"
15 #include "nitrox_req.h"
16 
17 #define PRIO 4001
18 
19 struct nitrox_cipher {
20 	const char *name;
21 	enum flexi_cipher value;
22 };
23 
24 /**
25  * supported cipher list
26  */
27 static const struct nitrox_cipher flexi_cipher_table[] = {
28 	{ "null",		CIPHER_NULL },
29 	{ "cbc(des3_ede)",	CIPHER_3DES_CBC },
30 	{ "ecb(des3_ede)",	CIPHER_3DES_ECB },
31 	{ "cbc(aes)",		CIPHER_AES_CBC },
32 	{ "ecb(aes)",		CIPHER_AES_ECB },
33 	{ "cfb(aes)",		CIPHER_AES_CFB },
34 	{ "rfc3686(ctr(aes))",	CIPHER_AES_CTR },
35 	{ "xts(aes)",		CIPHER_AES_XTS },
36 	{ "cts(cbc(aes))",	CIPHER_AES_CBC_CTS },
37 	{ NULL,			CIPHER_INVALID }
38 };
39 
40 static enum flexi_cipher flexi_cipher_type(const char *name)
41 {
42 	const struct nitrox_cipher *cipher = flexi_cipher_table;
43 
44 	while (cipher->name) {
45 		if (!strcmp(cipher->name, name))
46 			break;
47 		cipher++;
48 	}
49 	return cipher->value;
50 }
51 
52 static int flexi_aes_keylen(int keylen)
53 {
54 	int aes_keylen;
55 
56 	switch (keylen) {
57 	case AES_KEYSIZE_128:
58 		aes_keylen = 1;
59 		break;
60 	case AES_KEYSIZE_192:
61 		aes_keylen = 2;
62 		break;
63 	case AES_KEYSIZE_256:
64 		aes_keylen = 3;
65 		break;
66 	default:
67 		aes_keylen = -EINVAL;
68 		break;
69 	}
70 	return aes_keylen;
71 }
72 
73 static int nitrox_skcipher_init(struct crypto_skcipher *tfm)
74 {
75 	struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
76 	struct crypto_ctx_hdr *chdr;
77 
78 	/* get the first device */
79 	nctx->ndev = nitrox_get_first_device();
80 	if (!nctx->ndev)
81 		return -ENODEV;
82 
83 	/* allocate nitrox crypto context */
84 	chdr = crypto_alloc_context(nctx->ndev);
85 	if (!chdr) {
86 		nitrox_put_device(nctx->ndev);
87 		return -ENOMEM;
88 	}
89 	nctx->chdr = chdr;
90 	nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr +
91 					 sizeof(struct ctx_hdr));
92 	crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) +
93 				    sizeof(struct nitrox_kcrypt_request));
94 	return 0;
95 }
96 
97 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm)
98 {
99 	struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm);
100 
101 	/* free the nitrox crypto context */
102 	if (nctx->u.ctx_handle) {
103 		struct flexi_crypto_context *fctx = nctx->u.fctx;
104 
105 		memset(&fctx->crypto, 0, sizeof(struct crypto_keys));
106 		memset(&fctx->auth, 0, sizeof(struct auth_keys));
107 		crypto_free_context((void *)nctx->chdr);
108 	}
109 	nitrox_put_device(nctx->ndev);
110 
111 	nctx->u.ctx_handle = 0;
112 	nctx->ndev = NULL;
113 }
114 
115 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher,
116 					 int aes_keylen, const u8 *key,
117 					 unsigned int keylen)
118 {
119 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
120 	struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
121 	struct flexi_crypto_context *fctx;
122 	enum flexi_cipher cipher_type;
123 	const char *name;
124 
125 	name = crypto_tfm_alg_name(tfm);
126 	cipher_type = flexi_cipher_type(name);
127 	if (unlikely(cipher_type == CIPHER_INVALID)) {
128 		pr_err("unsupported cipher: %s\n", name);
129 		return -EINVAL;
130 	}
131 
132 	/* fill crypto context */
133 	fctx = nctx->u.fctx;
134 	fctx->flags = 0;
135 	fctx->w0.cipher_type = cipher_type;
136 	fctx->w0.aes_keylen = aes_keylen;
137 	fctx->w0.iv_source = IV_FROM_DPTR;
138 	fctx->flags = cpu_to_be64(*(u64 *)&fctx->w0);
139 	/* copy the key to context */
140 	memcpy(fctx->crypto.u.key, key, keylen);
141 
142 	return 0;
143 }
144 
145 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
146 			     unsigned int keylen)
147 {
148 	int aes_keylen;
149 
150 	aes_keylen = flexi_aes_keylen(keylen);
151 	if (aes_keylen < 0) {
152 		crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
153 		return -EINVAL;
154 	}
155 	return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
156 }
157 
158 static void nitrox_skcipher_callback(struct skcipher_request *skreq,
159 				     int err)
160 {
161 	if (err) {
162 		pr_err_ratelimited("request failed status 0x%0x\n", err);
163 		err = -EINVAL;
164 	}
165 	skcipher_request_complete(skreq, err);
166 }
167 
168 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc)
169 {
170 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq);
171 	struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher);
172 	struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq);
173 	int ivsize = crypto_skcipher_ivsize(cipher);
174 	struct se_crypto_request *creq;
175 
176 	creq = &nkreq->creq;
177 	creq->flags = skreq->base.flags;
178 	creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
179 		     GFP_KERNEL : GFP_ATOMIC;
180 
181 	/* fill the request */
182 	creq->ctrl.value = 0;
183 	creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC;
184 	creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT);
185 	/* param0: length of the data to be encrypted */
186 	creq->gph.param0 = cpu_to_be16(skreq->cryptlen);
187 	creq->gph.param1 = 0;
188 	/* param2: encryption data offset */
189 	creq->gph.param2 = cpu_to_be16(ivsize);
190 	creq->gph.param3 = 0;
191 
192 	creq->ctx_handle = nctx->u.ctx_handle;
193 	creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context);
194 
195 	/* copy the iv */
196 	memcpy(creq->iv, skreq->iv, ivsize);
197 	creq->ivsize = ivsize;
198 	creq->src = skreq->src;
199 	creq->dst = skreq->dst;
200 
201 	nkreq->nctx = nctx;
202 	nkreq->skreq = skreq;
203 
204 	/* send the crypto request */
205 	return nitrox_process_se_request(nctx->ndev, creq,
206 					 nitrox_skcipher_callback, skreq);
207 }
208 
209 static int nitrox_aes_encrypt(struct skcipher_request *skreq)
210 {
211 	return nitrox_skcipher_crypt(skreq, true);
212 }
213 
214 static int nitrox_aes_decrypt(struct skcipher_request *skreq)
215 {
216 	return nitrox_skcipher_crypt(skreq, false);
217 }
218 
219 static int nitrox_3des_setkey(struct crypto_skcipher *cipher,
220 			      const u8 *key, unsigned int keylen)
221 {
222 	if (keylen != DES3_EDE_KEY_SIZE) {
223 		crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
224 		return -EINVAL;
225 	}
226 
227 	return nitrox_skcipher_setkey(cipher, 0, key, keylen);
228 }
229 
230 static int nitrox_3des_encrypt(struct skcipher_request *skreq)
231 {
232 	return nitrox_skcipher_crypt(skreq, true);
233 }
234 
235 static int nitrox_3des_decrypt(struct skcipher_request *skreq)
236 {
237 	return nitrox_skcipher_crypt(skreq, false);
238 }
239 
240 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher,
241 				 const u8 *key, unsigned int keylen)
242 {
243 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
244 	struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
245 	struct flexi_crypto_context *fctx;
246 	int aes_keylen, ret;
247 
248 	ret = xts_check_key(tfm, key, keylen);
249 	if (ret)
250 		return ret;
251 
252 	keylen /= 2;
253 
254 	aes_keylen = flexi_aes_keylen(keylen);
255 	if (aes_keylen < 0) {
256 		crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
257 		return -EINVAL;
258 	}
259 
260 	fctx = nctx->u.fctx;
261 	/* copy KEY2 */
262 	memcpy(fctx->auth.u.key2, (key + keylen), keylen);
263 
264 	return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
265 }
266 
267 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher,
268 					 const u8 *key, unsigned int keylen)
269 {
270 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
271 	struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm);
272 	struct flexi_crypto_context *fctx;
273 	int aes_keylen;
274 
275 	if (keylen < CTR_RFC3686_NONCE_SIZE)
276 		return -EINVAL;
277 
278 	fctx = nctx->u.fctx;
279 
280 	memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE),
281 	       CTR_RFC3686_NONCE_SIZE);
282 
283 	keylen -= CTR_RFC3686_NONCE_SIZE;
284 
285 	aes_keylen = flexi_aes_keylen(keylen);
286 	if (aes_keylen < 0) {
287 		crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
288 		return -EINVAL;
289 	}
290 	return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen);
291 }
292 
293 static struct skcipher_alg nitrox_skciphers[] = { {
294 	.base = {
295 		.cra_name = "cbc(aes)",
296 		.cra_driver_name = "n5_cbc(aes)",
297 		.cra_priority = PRIO,
298 		.cra_flags = CRYPTO_ALG_ASYNC,
299 		.cra_blocksize = AES_BLOCK_SIZE,
300 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
301 		.cra_alignmask = 0,
302 		.cra_module = THIS_MODULE,
303 	},
304 	.min_keysize = AES_MIN_KEY_SIZE,
305 	.max_keysize = AES_MAX_KEY_SIZE,
306 	.ivsize = AES_BLOCK_SIZE,
307 	.setkey = nitrox_aes_setkey,
308 	.encrypt = nitrox_aes_encrypt,
309 	.decrypt = nitrox_aes_decrypt,
310 	.init = nitrox_skcipher_init,
311 	.exit = nitrox_skcipher_exit,
312 }, {
313 	.base = {
314 		.cra_name = "ecb(aes)",
315 		.cra_driver_name = "n5_ecb(aes)",
316 		.cra_priority = PRIO,
317 		.cra_flags = CRYPTO_ALG_ASYNC,
318 		.cra_blocksize = AES_BLOCK_SIZE,
319 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
320 		.cra_alignmask = 0,
321 		.cra_module = THIS_MODULE,
322 	},
323 	.min_keysize = AES_MIN_KEY_SIZE,
324 	.max_keysize = AES_MAX_KEY_SIZE,
325 	.ivsize = AES_BLOCK_SIZE,
326 	.setkey = nitrox_aes_setkey,
327 	.encrypt = nitrox_aes_encrypt,
328 	.decrypt = nitrox_aes_decrypt,
329 	.init = nitrox_skcipher_init,
330 	.exit = nitrox_skcipher_exit,
331 }, {
332 	.base = {
333 		.cra_name = "cfb(aes)",
334 		.cra_driver_name = "n5_cfb(aes)",
335 		.cra_priority = PRIO,
336 		.cra_flags = CRYPTO_ALG_ASYNC,
337 		.cra_blocksize = AES_BLOCK_SIZE,
338 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
339 		.cra_alignmask = 0,
340 		.cra_module = THIS_MODULE,
341 	},
342 	.min_keysize = AES_MIN_KEY_SIZE,
343 	.max_keysize = AES_MAX_KEY_SIZE,
344 	.ivsize = AES_BLOCK_SIZE,
345 	.setkey = nitrox_aes_setkey,
346 	.encrypt = nitrox_aes_encrypt,
347 	.decrypt = nitrox_aes_decrypt,
348 	.init = nitrox_skcipher_init,
349 	.exit = nitrox_skcipher_exit,
350 }, {
351 	.base = {
352 		.cra_name = "xts(aes)",
353 		.cra_driver_name = "n5_xts(aes)",
354 		.cra_priority = PRIO,
355 		.cra_flags = CRYPTO_ALG_ASYNC,
356 		.cra_blocksize = AES_BLOCK_SIZE,
357 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
358 		.cra_alignmask = 0,
359 		.cra_module = THIS_MODULE,
360 	},
361 	.min_keysize = 2 * AES_MIN_KEY_SIZE,
362 	.max_keysize = 2 * AES_MAX_KEY_SIZE,
363 	.ivsize = AES_BLOCK_SIZE,
364 	.setkey = nitrox_aes_xts_setkey,
365 	.encrypt = nitrox_aes_encrypt,
366 	.decrypt = nitrox_aes_decrypt,
367 	.init = nitrox_skcipher_init,
368 	.exit = nitrox_skcipher_exit,
369 }, {
370 	.base = {
371 		.cra_name = "rfc3686(ctr(aes))",
372 		.cra_driver_name = "n5_rfc3686(ctr(aes))",
373 		.cra_priority = PRIO,
374 		.cra_flags = CRYPTO_ALG_ASYNC,
375 		.cra_blocksize = 1,
376 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
377 		.cra_alignmask = 0,
378 		.cra_module = THIS_MODULE,
379 	},
380 	.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
381 	.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
382 	.ivsize = CTR_RFC3686_IV_SIZE,
383 	.init = nitrox_skcipher_init,
384 	.exit = nitrox_skcipher_exit,
385 	.setkey = nitrox_aes_ctr_rfc3686_setkey,
386 	.encrypt = nitrox_aes_encrypt,
387 	.decrypt = nitrox_aes_decrypt,
388 }, {
389 	.base = {
390 		.cra_name = "cts(cbc(aes))",
391 		.cra_driver_name = "n5_cts(cbc(aes))",
392 		.cra_priority = PRIO,
393 		.cra_flags = CRYPTO_ALG_ASYNC,
394 		.cra_blocksize = AES_BLOCK_SIZE,
395 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
396 		.cra_alignmask = 0,
397 		.cra_type = &crypto_ablkcipher_type,
398 		.cra_module = THIS_MODULE,
399 	},
400 	.min_keysize = AES_MIN_KEY_SIZE,
401 	.max_keysize = AES_MAX_KEY_SIZE,
402 	.ivsize = AES_BLOCK_SIZE,
403 	.setkey = nitrox_aes_setkey,
404 	.encrypt = nitrox_aes_encrypt,
405 	.decrypt = nitrox_aes_decrypt,
406 	.init = nitrox_skcipher_init,
407 	.exit = nitrox_skcipher_exit,
408 }, {
409 	.base = {
410 		.cra_name = "cbc(des3_ede)",
411 		.cra_driver_name = "n5_cbc(des3_ede)",
412 		.cra_priority = PRIO,
413 		.cra_flags = CRYPTO_ALG_ASYNC,
414 		.cra_blocksize = DES3_EDE_BLOCK_SIZE,
415 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
416 		.cra_alignmask = 0,
417 		.cra_module = THIS_MODULE,
418 	},
419 	.min_keysize = DES3_EDE_KEY_SIZE,
420 	.max_keysize = DES3_EDE_KEY_SIZE,
421 	.ivsize = DES3_EDE_BLOCK_SIZE,
422 	.setkey = nitrox_3des_setkey,
423 	.encrypt = nitrox_3des_encrypt,
424 	.decrypt = nitrox_3des_decrypt,
425 	.init = nitrox_skcipher_init,
426 	.exit = nitrox_skcipher_exit,
427 }, {
428 	.base = {
429 		.cra_name = "ecb(des3_ede)",
430 		.cra_driver_name = "n5_ecb(des3_ede)",
431 		.cra_priority = PRIO,
432 		.cra_flags = CRYPTO_ALG_ASYNC,
433 		.cra_blocksize = DES3_EDE_BLOCK_SIZE,
434 		.cra_ctxsize = sizeof(struct nitrox_crypto_ctx),
435 		.cra_alignmask = 0,
436 		.cra_module = THIS_MODULE,
437 	},
438 	.min_keysize = DES3_EDE_KEY_SIZE,
439 	.max_keysize = DES3_EDE_KEY_SIZE,
440 	.ivsize = DES3_EDE_BLOCK_SIZE,
441 	.setkey = nitrox_3des_setkey,
442 	.encrypt = nitrox_3des_encrypt,
443 	.decrypt = nitrox_3des_decrypt,
444 	.init = nitrox_skcipher_init,
445 	.exit = nitrox_skcipher_exit,
446 }
447 
448 };
449 
450 int nitrox_crypto_register(void)
451 {
452 	return crypto_register_skciphers(nitrox_skciphers,
453 					 ARRAY_SIZE(nitrox_skciphers));
454 }
455 
456 void nitrox_crypto_unregister(void)
457 {
458 	crypto_unregister_skciphers(nitrox_skciphers,
459 				    ARRAY_SIZE(nitrox_skciphers));
460 }
461