1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/crypto.h> 3 #include <linux/kernel.h> 4 #include <linux/module.h> 5 #include <linux/printk.h> 6 7 #include <crypto/aes.h> 8 #include <crypto/skcipher.h> 9 #include <crypto/scatterwalk.h> 10 #include <crypto/ctr.h> 11 #include <crypto/internal/des.h> 12 #include <crypto/xts.h> 13 14 #include "nitrox_dev.h" 15 #include "nitrox_common.h" 16 #include "nitrox_req.h" 17 18 struct nitrox_cipher { 19 const char *name; 20 enum flexi_cipher value; 21 }; 22 23 /** 24 * supported cipher list 25 */ 26 static const struct nitrox_cipher flexi_cipher_table[] = { 27 { "null", CIPHER_NULL }, 28 { "cbc(des3_ede)", CIPHER_3DES_CBC }, 29 { "ecb(des3_ede)", CIPHER_3DES_ECB }, 30 { "cbc(aes)", CIPHER_AES_CBC }, 31 { "ecb(aes)", CIPHER_AES_ECB }, 32 { "cfb(aes)", CIPHER_AES_CFB }, 33 { "rfc3686(ctr(aes))", CIPHER_AES_CTR }, 34 { "xts(aes)", CIPHER_AES_XTS }, 35 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS }, 36 { NULL, CIPHER_INVALID } 37 }; 38 39 static enum flexi_cipher flexi_cipher_type(const char *name) 40 { 41 const struct nitrox_cipher *cipher = flexi_cipher_table; 42 43 while (cipher->name) { 44 if (!strcmp(cipher->name, name)) 45 break; 46 cipher++; 47 } 48 return cipher->value; 49 } 50 51 static void free_src_sglist(struct skcipher_request *skreq) 52 { 53 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 54 55 kfree(nkreq->src); 56 } 57 58 static void free_dst_sglist(struct skcipher_request *skreq) 59 { 60 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 61 62 kfree(nkreq->dst); 63 } 64 65 static void nitrox_skcipher_callback(void *arg, int err) 66 { 67 struct skcipher_request *skreq = arg; 68 69 free_src_sglist(skreq); 70 free_dst_sglist(skreq); 71 if (err) { 72 pr_err_ratelimited("request failed status 0x%0x\n", err); 73 err = -EINVAL; 74 } 75 76 skcipher_request_complete(skreq, err); 77 } 78 79 static void nitrox_cbc_cipher_callback(void *arg, int err) 80 { 81 struct skcipher_request *skreq = arg; 82 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 83 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 84 int ivsize = crypto_skcipher_ivsize(cipher); 85 unsigned int start = skreq->cryptlen - ivsize; 86 87 if (err) { 88 nitrox_skcipher_callback(arg, err); 89 return; 90 } 91 92 if (nkreq->creq.ctrl.s.arg == ENCRYPT) { 93 scatterwalk_map_and_copy(skreq->iv, skreq->dst, start, ivsize, 94 0); 95 } else { 96 if (skreq->src != skreq->dst) { 97 scatterwalk_map_and_copy(skreq->iv, skreq->src, start, 98 ivsize, 0); 99 } else { 100 memcpy(skreq->iv, nkreq->iv_out, ivsize); 101 kfree(nkreq->iv_out); 102 } 103 } 104 105 nitrox_skcipher_callback(arg, err); 106 } 107 108 static int nitrox_skcipher_init(struct crypto_skcipher *tfm) 109 { 110 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 111 struct crypto_ctx_hdr *chdr; 112 113 /* get the first device */ 114 nctx->ndev = nitrox_get_first_device(); 115 if (!nctx->ndev) 116 return -ENODEV; 117 118 /* allocate nitrox crypto context */ 119 chdr = crypto_alloc_context(nctx->ndev); 120 if (!chdr) { 121 nitrox_put_device(nctx->ndev); 122 return -ENOMEM; 123 } 124 125 nctx->callback = nitrox_skcipher_callback; 126 nctx->chdr = chdr; 127 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr + 128 sizeof(struct ctx_hdr)); 129 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) + 130 sizeof(struct nitrox_kcrypt_request)); 131 return 0; 132 } 133 134 static int nitrox_cbc_init(struct crypto_skcipher *tfm) 135 { 136 int err; 137 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 138 139 err = nitrox_skcipher_init(tfm); 140 if (err) 141 return err; 142 143 nctx->callback = nitrox_cbc_cipher_callback; 144 return 0; 145 } 146 147 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm) 148 { 149 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 150 151 /* free the nitrox crypto context */ 152 if (nctx->u.ctx_handle) { 153 struct flexi_crypto_context *fctx = nctx->u.fctx; 154 155 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); 156 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); 157 crypto_free_context((void *)nctx->chdr); 158 } 159 nitrox_put_device(nctx->ndev); 160 161 nctx->u.ctx_handle = 0; 162 nctx->ndev = NULL; 163 } 164 165 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher, 166 int aes_keylen, const u8 *key, 167 unsigned int keylen) 168 { 169 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 170 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 171 struct flexi_crypto_context *fctx; 172 union fc_ctx_flags *flags; 173 enum flexi_cipher cipher_type; 174 const char *name; 175 176 name = crypto_tfm_alg_name(tfm); 177 cipher_type = flexi_cipher_type(name); 178 if (unlikely(cipher_type == CIPHER_INVALID)) { 179 pr_err("unsupported cipher: %s\n", name); 180 return -EINVAL; 181 } 182 183 /* fill crypto context */ 184 fctx = nctx->u.fctx; 185 flags = &fctx->flags; 186 flags->f = 0; 187 flags->w0.cipher_type = cipher_type; 188 flags->w0.aes_keylen = aes_keylen; 189 flags->w0.iv_source = IV_FROM_DPTR; 190 flags->f = cpu_to_be64(*(u64 *)&flags->w0); 191 /* copy the key to context */ 192 memcpy(fctx->crypto.u.key, key, keylen); 193 194 return 0; 195 } 196 197 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, 198 unsigned int keylen) 199 { 200 int aes_keylen; 201 202 aes_keylen = flexi_aes_keylen(keylen); 203 if (aes_keylen < 0) { 204 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 205 return -EINVAL; 206 } 207 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 208 } 209 210 static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize) 211 { 212 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 213 int nents = sg_nents(skreq->src) + 1; 214 int ret; 215 216 /* Allocate buffer to hold IV and input scatterlist array */ 217 ret = alloc_src_req_buf(nkreq, nents, ivsize); 218 if (ret) 219 return ret; 220 221 nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize); 222 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, 223 skreq->cryptlen); 224 225 return 0; 226 } 227 228 static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize) 229 { 230 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 231 int nents = sg_nents(skreq->dst) + 3; 232 int ret; 233 234 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist 235 * array 236 */ 237 ret = alloc_dst_req_buf(nkreq, nents); 238 if (ret) 239 return ret; 240 241 nitrox_creq_set_orh(nkreq); 242 nitrox_creq_set_comp(nkreq); 243 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, 244 skreq->cryptlen); 245 246 return 0; 247 } 248 249 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc) 250 { 251 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 252 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher); 253 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 254 int ivsize = crypto_skcipher_ivsize(cipher); 255 struct se_crypto_request *creq; 256 int ret; 257 258 creq = &nkreq->creq; 259 creq->flags = skreq->base.flags; 260 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 261 GFP_KERNEL : GFP_ATOMIC; 262 263 /* fill the request */ 264 creq->ctrl.value = 0; 265 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 266 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); 267 /* param0: length of the data to be encrypted */ 268 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); 269 creq->gph.param1 = 0; 270 /* param2: encryption data offset */ 271 creq->gph.param2 = cpu_to_be16(ivsize); 272 creq->gph.param3 = 0; 273 274 creq->ctx_handle = nctx->u.ctx_handle; 275 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 276 277 ret = alloc_src_sglist(skreq, ivsize); 278 if (ret) 279 return ret; 280 281 ret = alloc_dst_sglist(skreq, ivsize); 282 if (ret) { 283 free_src_sglist(skreq); 284 return ret; 285 } 286 287 /* send the crypto request */ 288 return nitrox_process_se_request(nctx->ndev, creq, nctx->callback, 289 skreq); 290 } 291 292 static int nitrox_cbc_decrypt(struct skcipher_request *skreq) 293 { 294 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 295 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 296 int ivsize = crypto_skcipher_ivsize(cipher); 297 gfp_t flags = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 298 GFP_KERNEL : GFP_ATOMIC; 299 unsigned int start = skreq->cryptlen - ivsize; 300 301 if (skreq->src != skreq->dst) 302 return nitrox_skcipher_crypt(skreq, false); 303 304 nkreq->iv_out = kmalloc(ivsize, flags); 305 if (!nkreq->iv_out) 306 return -ENOMEM; 307 308 scatterwalk_map_and_copy(nkreq->iv_out, skreq->src, start, ivsize, 0); 309 return nitrox_skcipher_crypt(skreq, false); 310 } 311 312 static int nitrox_aes_encrypt(struct skcipher_request *skreq) 313 { 314 return nitrox_skcipher_crypt(skreq, true); 315 } 316 317 static int nitrox_aes_decrypt(struct skcipher_request *skreq) 318 { 319 return nitrox_skcipher_crypt(skreq, false); 320 } 321 322 static int nitrox_3des_setkey(struct crypto_skcipher *cipher, 323 const u8 *key, unsigned int keylen) 324 { 325 return verify_skcipher_des3_key(cipher, key) ?: 326 nitrox_skcipher_setkey(cipher, 0, key, keylen); 327 } 328 329 static int nitrox_3des_encrypt(struct skcipher_request *skreq) 330 { 331 return nitrox_skcipher_crypt(skreq, true); 332 } 333 334 static int nitrox_3des_decrypt(struct skcipher_request *skreq) 335 { 336 return nitrox_skcipher_crypt(skreq, false); 337 } 338 339 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher, 340 const u8 *key, unsigned int keylen) 341 { 342 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 343 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 344 struct flexi_crypto_context *fctx; 345 int aes_keylen, ret; 346 347 ret = xts_check_key(tfm, key, keylen); 348 if (ret) 349 return ret; 350 351 keylen /= 2; 352 353 aes_keylen = flexi_aes_keylen(keylen); 354 if (aes_keylen < 0) { 355 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 356 return -EINVAL; 357 } 358 359 fctx = nctx->u.fctx; 360 /* copy KEY2 */ 361 memcpy(fctx->auth.u.key2, (key + keylen), keylen); 362 363 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 364 } 365 366 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher, 367 const u8 *key, unsigned int keylen) 368 { 369 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 370 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 371 struct flexi_crypto_context *fctx; 372 int aes_keylen; 373 374 if (keylen < CTR_RFC3686_NONCE_SIZE) 375 return -EINVAL; 376 377 fctx = nctx->u.fctx; 378 379 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), 380 CTR_RFC3686_NONCE_SIZE); 381 382 keylen -= CTR_RFC3686_NONCE_SIZE; 383 384 aes_keylen = flexi_aes_keylen(keylen); 385 if (aes_keylen < 0) { 386 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 387 return -EINVAL; 388 } 389 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 390 } 391 392 static struct skcipher_alg nitrox_skciphers[] = { { 393 .base = { 394 .cra_name = "cbc(aes)", 395 .cra_driver_name = "n5_cbc(aes)", 396 .cra_priority = PRIO, 397 .cra_flags = CRYPTO_ALG_ASYNC, 398 .cra_blocksize = AES_BLOCK_SIZE, 399 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 400 .cra_alignmask = 0, 401 .cra_module = THIS_MODULE, 402 }, 403 .min_keysize = AES_MIN_KEY_SIZE, 404 .max_keysize = AES_MAX_KEY_SIZE, 405 .ivsize = AES_BLOCK_SIZE, 406 .setkey = nitrox_aes_setkey, 407 .encrypt = nitrox_aes_encrypt, 408 .decrypt = nitrox_cbc_decrypt, 409 .init = nitrox_cbc_init, 410 .exit = nitrox_skcipher_exit, 411 }, { 412 .base = { 413 .cra_name = "ecb(aes)", 414 .cra_driver_name = "n5_ecb(aes)", 415 .cra_priority = PRIO, 416 .cra_flags = CRYPTO_ALG_ASYNC, 417 .cra_blocksize = AES_BLOCK_SIZE, 418 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 419 .cra_alignmask = 0, 420 .cra_module = THIS_MODULE, 421 }, 422 .min_keysize = AES_MIN_KEY_SIZE, 423 .max_keysize = AES_MAX_KEY_SIZE, 424 .ivsize = AES_BLOCK_SIZE, 425 .setkey = nitrox_aes_setkey, 426 .encrypt = nitrox_aes_encrypt, 427 .decrypt = nitrox_aes_decrypt, 428 .init = nitrox_skcipher_init, 429 .exit = nitrox_skcipher_exit, 430 }, { 431 .base = { 432 .cra_name = "cfb(aes)", 433 .cra_driver_name = "n5_cfb(aes)", 434 .cra_priority = PRIO, 435 .cra_flags = CRYPTO_ALG_ASYNC, 436 .cra_blocksize = AES_BLOCK_SIZE, 437 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 438 .cra_alignmask = 0, 439 .cra_module = THIS_MODULE, 440 }, 441 .min_keysize = AES_MIN_KEY_SIZE, 442 .max_keysize = AES_MAX_KEY_SIZE, 443 .ivsize = AES_BLOCK_SIZE, 444 .setkey = nitrox_aes_setkey, 445 .encrypt = nitrox_aes_encrypt, 446 .decrypt = nitrox_aes_decrypt, 447 .init = nitrox_skcipher_init, 448 .exit = nitrox_skcipher_exit, 449 }, { 450 .base = { 451 .cra_name = "xts(aes)", 452 .cra_driver_name = "n5_xts(aes)", 453 .cra_priority = PRIO, 454 .cra_flags = CRYPTO_ALG_ASYNC, 455 .cra_blocksize = AES_BLOCK_SIZE, 456 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 457 .cra_alignmask = 0, 458 .cra_module = THIS_MODULE, 459 }, 460 .min_keysize = 2 * AES_MIN_KEY_SIZE, 461 .max_keysize = 2 * AES_MAX_KEY_SIZE, 462 .ivsize = AES_BLOCK_SIZE, 463 .setkey = nitrox_aes_xts_setkey, 464 .encrypt = nitrox_aes_encrypt, 465 .decrypt = nitrox_aes_decrypt, 466 .init = nitrox_skcipher_init, 467 .exit = nitrox_skcipher_exit, 468 }, { 469 .base = { 470 .cra_name = "rfc3686(ctr(aes))", 471 .cra_driver_name = "n5_rfc3686(ctr(aes))", 472 .cra_priority = PRIO, 473 .cra_flags = CRYPTO_ALG_ASYNC, 474 .cra_blocksize = 1, 475 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 476 .cra_alignmask = 0, 477 .cra_module = THIS_MODULE, 478 }, 479 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 480 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 481 .ivsize = CTR_RFC3686_IV_SIZE, 482 .init = nitrox_skcipher_init, 483 .exit = nitrox_skcipher_exit, 484 .setkey = nitrox_aes_ctr_rfc3686_setkey, 485 .encrypt = nitrox_aes_encrypt, 486 .decrypt = nitrox_aes_decrypt, 487 }, { 488 .base = { 489 .cra_name = "cts(cbc(aes))", 490 .cra_driver_name = "n5_cts(cbc(aes))", 491 .cra_priority = PRIO, 492 .cra_flags = CRYPTO_ALG_ASYNC, 493 .cra_blocksize = AES_BLOCK_SIZE, 494 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 495 .cra_alignmask = 0, 496 .cra_module = THIS_MODULE, 497 }, 498 .min_keysize = AES_MIN_KEY_SIZE, 499 .max_keysize = AES_MAX_KEY_SIZE, 500 .ivsize = AES_BLOCK_SIZE, 501 .setkey = nitrox_aes_setkey, 502 .encrypt = nitrox_aes_encrypt, 503 .decrypt = nitrox_aes_decrypt, 504 .init = nitrox_skcipher_init, 505 .exit = nitrox_skcipher_exit, 506 }, { 507 .base = { 508 .cra_name = "cbc(des3_ede)", 509 .cra_driver_name = "n5_cbc(des3_ede)", 510 .cra_priority = PRIO, 511 .cra_flags = CRYPTO_ALG_ASYNC, 512 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 513 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 514 .cra_alignmask = 0, 515 .cra_module = THIS_MODULE, 516 }, 517 .min_keysize = DES3_EDE_KEY_SIZE, 518 .max_keysize = DES3_EDE_KEY_SIZE, 519 .ivsize = DES3_EDE_BLOCK_SIZE, 520 .setkey = nitrox_3des_setkey, 521 .encrypt = nitrox_3des_encrypt, 522 .decrypt = nitrox_cbc_decrypt, 523 .init = nitrox_cbc_init, 524 .exit = nitrox_skcipher_exit, 525 }, { 526 .base = { 527 .cra_name = "ecb(des3_ede)", 528 .cra_driver_name = "n5_ecb(des3_ede)", 529 .cra_priority = PRIO, 530 .cra_flags = CRYPTO_ALG_ASYNC, 531 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 532 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 533 .cra_alignmask = 0, 534 .cra_module = THIS_MODULE, 535 }, 536 .min_keysize = DES3_EDE_KEY_SIZE, 537 .max_keysize = DES3_EDE_KEY_SIZE, 538 .ivsize = DES3_EDE_BLOCK_SIZE, 539 .setkey = nitrox_3des_setkey, 540 .encrypt = nitrox_3des_encrypt, 541 .decrypt = nitrox_3des_decrypt, 542 .init = nitrox_skcipher_init, 543 .exit = nitrox_skcipher_exit, 544 } 545 546 }; 547 548 int nitrox_register_skciphers(void) 549 { 550 return crypto_register_skciphers(nitrox_skciphers, 551 ARRAY_SIZE(nitrox_skciphers)); 552 } 553 554 void nitrox_unregister_skciphers(void) 555 { 556 crypto_unregister_skciphers(nitrox_skciphers, 557 ARRAY_SIZE(nitrox_skciphers)); 558 } 559