1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/crypto.h> 3 #include <linux/kernel.h> 4 #include <linux/module.h> 5 #include <linux/printk.h> 6 7 #include <crypto/aes.h> 8 #include <crypto/skcipher.h> 9 #include <crypto/ctr.h> 10 #include <crypto/des.h> 11 #include <crypto/xts.h> 12 13 #include "nitrox_dev.h" 14 #include "nitrox_common.h" 15 #include "nitrox_req.h" 16 17 struct nitrox_cipher { 18 const char *name; 19 enum flexi_cipher value; 20 }; 21 22 /** 23 * supported cipher list 24 */ 25 static const struct nitrox_cipher flexi_cipher_table[] = { 26 { "null", CIPHER_NULL }, 27 { "cbc(des3_ede)", CIPHER_3DES_CBC }, 28 { "ecb(des3_ede)", CIPHER_3DES_ECB }, 29 { "cbc(aes)", CIPHER_AES_CBC }, 30 { "ecb(aes)", CIPHER_AES_ECB }, 31 { "cfb(aes)", CIPHER_AES_CFB }, 32 { "rfc3686(ctr(aes))", CIPHER_AES_CTR }, 33 { "xts(aes)", CIPHER_AES_XTS }, 34 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS }, 35 { NULL, CIPHER_INVALID } 36 }; 37 38 static enum flexi_cipher flexi_cipher_type(const char *name) 39 { 40 const struct nitrox_cipher *cipher = flexi_cipher_table; 41 42 while (cipher->name) { 43 if (!strcmp(cipher->name, name)) 44 break; 45 cipher++; 46 } 47 return cipher->value; 48 } 49 50 static int nitrox_skcipher_init(struct crypto_skcipher *tfm) 51 { 52 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 53 struct crypto_ctx_hdr *chdr; 54 55 /* get the first device */ 56 nctx->ndev = nitrox_get_first_device(); 57 if (!nctx->ndev) 58 return -ENODEV; 59 60 /* allocate nitrox crypto context */ 61 chdr = crypto_alloc_context(nctx->ndev); 62 if (!chdr) { 63 nitrox_put_device(nctx->ndev); 64 return -ENOMEM; 65 } 66 nctx->chdr = chdr; 67 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr + 68 sizeof(struct ctx_hdr)); 69 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) + 70 sizeof(struct nitrox_kcrypt_request)); 71 return 0; 72 } 73 74 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm) 75 { 76 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 77 78 /* free the nitrox crypto context */ 79 if (nctx->u.ctx_handle) { 80 struct flexi_crypto_context *fctx = nctx->u.fctx; 81 82 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); 83 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); 84 crypto_free_context((void *)nctx->chdr); 85 } 86 nitrox_put_device(nctx->ndev); 87 88 nctx->u.ctx_handle = 0; 89 nctx->ndev = NULL; 90 } 91 92 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher, 93 int aes_keylen, const u8 *key, 94 unsigned int keylen) 95 { 96 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 97 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 98 struct flexi_crypto_context *fctx; 99 union fc_ctx_flags *flags; 100 enum flexi_cipher cipher_type; 101 const char *name; 102 103 name = crypto_tfm_alg_name(tfm); 104 cipher_type = flexi_cipher_type(name); 105 if (unlikely(cipher_type == CIPHER_INVALID)) { 106 pr_err("unsupported cipher: %s\n", name); 107 return -EINVAL; 108 } 109 110 /* fill crypto context */ 111 fctx = nctx->u.fctx; 112 flags = &fctx->flags; 113 flags->f = 0; 114 flags->w0.cipher_type = cipher_type; 115 flags->w0.aes_keylen = aes_keylen; 116 flags->w0.iv_source = IV_FROM_DPTR; 117 flags->f = cpu_to_be64(*(u64 *)&flags->w0); 118 /* copy the key to context */ 119 memcpy(fctx->crypto.u.key, key, keylen); 120 121 return 0; 122 } 123 124 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, 125 unsigned int keylen) 126 { 127 int aes_keylen; 128 129 aes_keylen = flexi_aes_keylen(keylen); 130 if (aes_keylen < 0) { 131 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 132 return -EINVAL; 133 } 134 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 135 } 136 137 static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize) 138 { 139 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 140 int nents = sg_nents(skreq->src) + 1; 141 int ret; 142 143 /* Allocate buffer to hold IV and input scatterlist array */ 144 ret = alloc_src_req_buf(nkreq, nents, ivsize); 145 if (ret) 146 return ret; 147 148 nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize); 149 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, 150 skreq->cryptlen); 151 152 return 0; 153 } 154 155 static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize) 156 { 157 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 158 int nents = sg_nents(skreq->dst) + 3; 159 int ret; 160 161 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist 162 * array 163 */ 164 ret = alloc_dst_req_buf(nkreq, nents); 165 if (ret) 166 return ret; 167 168 nitrox_creq_set_orh(nkreq); 169 nitrox_creq_set_comp(nkreq); 170 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, 171 skreq->cryptlen); 172 173 return 0; 174 } 175 176 static void free_src_sglist(struct skcipher_request *skreq) 177 { 178 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 179 180 kfree(nkreq->src); 181 } 182 183 static void free_dst_sglist(struct skcipher_request *skreq) 184 { 185 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 186 187 kfree(nkreq->dst); 188 } 189 190 static void nitrox_skcipher_callback(void *arg, int err) 191 { 192 struct skcipher_request *skreq = arg; 193 194 free_src_sglist(skreq); 195 free_dst_sglist(skreq); 196 if (err) { 197 pr_err_ratelimited("request failed status 0x%0x\n", err); 198 err = -EINVAL; 199 } 200 201 skcipher_request_complete(skreq, err); 202 } 203 204 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc) 205 { 206 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 207 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher); 208 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 209 int ivsize = crypto_skcipher_ivsize(cipher); 210 struct se_crypto_request *creq; 211 int ret; 212 213 creq = &nkreq->creq; 214 creq->flags = skreq->base.flags; 215 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 216 GFP_KERNEL : GFP_ATOMIC; 217 218 /* fill the request */ 219 creq->ctrl.value = 0; 220 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 221 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); 222 /* param0: length of the data to be encrypted */ 223 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); 224 creq->gph.param1 = 0; 225 /* param2: encryption data offset */ 226 creq->gph.param2 = cpu_to_be16(ivsize); 227 creq->gph.param3 = 0; 228 229 creq->ctx_handle = nctx->u.ctx_handle; 230 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 231 232 ret = alloc_src_sglist(skreq, ivsize); 233 if (ret) 234 return ret; 235 236 ret = alloc_dst_sglist(skreq, ivsize); 237 if (ret) { 238 free_src_sglist(skreq); 239 return ret; 240 } 241 242 /* send the crypto request */ 243 return nitrox_process_se_request(nctx->ndev, creq, 244 nitrox_skcipher_callback, skreq); 245 } 246 247 static int nitrox_aes_encrypt(struct skcipher_request *skreq) 248 { 249 return nitrox_skcipher_crypt(skreq, true); 250 } 251 252 static int nitrox_aes_decrypt(struct skcipher_request *skreq) 253 { 254 return nitrox_skcipher_crypt(skreq, false); 255 } 256 257 static int nitrox_3des_setkey(struct crypto_skcipher *cipher, 258 const u8 *key, unsigned int keylen) 259 { 260 return unlikely(des3_verify_key(cipher, key)) ?: 261 nitrox_skcipher_setkey(cipher, 0, key, keylen); 262 } 263 264 static int nitrox_3des_encrypt(struct skcipher_request *skreq) 265 { 266 return nitrox_skcipher_crypt(skreq, true); 267 } 268 269 static int nitrox_3des_decrypt(struct skcipher_request *skreq) 270 { 271 return nitrox_skcipher_crypt(skreq, false); 272 } 273 274 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher, 275 const u8 *key, unsigned int keylen) 276 { 277 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 278 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 279 struct flexi_crypto_context *fctx; 280 int aes_keylen, ret; 281 282 ret = xts_check_key(tfm, key, keylen); 283 if (ret) 284 return ret; 285 286 keylen /= 2; 287 288 aes_keylen = flexi_aes_keylen(keylen); 289 if (aes_keylen < 0) { 290 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 291 return -EINVAL; 292 } 293 294 fctx = nctx->u.fctx; 295 /* copy KEY2 */ 296 memcpy(fctx->auth.u.key2, (key + keylen), keylen); 297 298 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 299 } 300 301 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher, 302 const u8 *key, unsigned int keylen) 303 { 304 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 305 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 306 struct flexi_crypto_context *fctx; 307 int aes_keylen; 308 309 if (keylen < CTR_RFC3686_NONCE_SIZE) 310 return -EINVAL; 311 312 fctx = nctx->u.fctx; 313 314 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), 315 CTR_RFC3686_NONCE_SIZE); 316 317 keylen -= CTR_RFC3686_NONCE_SIZE; 318 319 aes_keylen = flexi_aes_keylen(keylen); 320 if (aes_keylen < 0) { 321 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 322 return -EINVAL; 323 } 324 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 325 } 326 327 static struct skcipher_alg nitrox_skciphers[] = { { 328 .base = { 329 .cra_name = "cbc(aes)", 330 .cra_driver_name = "n5_cbc(aes)", 331 .cra_priority = PRIO, 332 .cra_flags = CRYPTO_ALG_ASYNC, 333 .cra_blocksize = AES_BLOCK_SIZE, 334 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 335 .cra_alignmask = 0, 336 .cra_module = THIS_MODULE, 337 }, 338 .min_keysize = AES_MIN_KEY_SIZE, 339 .max_keysize = AES_MAX_KEY_SIZE, 340 .ivsize = AES_BLOCK_SIZE, 341 .setkey = nitrox_aes_setkey, 342 .encrypt = nitrox_aes_encrypt, 343 .decrypt = nitrox_aes_decrypt, 344 .init = nitrox_skcipher_init, 345 .exit = nitrox_skcipher_exit, 346 }, { 347 .base = { 348 .cra_name = "ecb(aes)", 349 .cra_driver_name = "n5_ecb(aes)", 350 .cra_priority = PRIO, 351 .cra_flags = CRYPTO_ALG_ASYNC, 352 .cra_blocksize = AES_BLOCK_SIZE, 353 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 354 .cra_alignmask = 0, 355 .cra_module = THIS_MODULE, 356 }, 357 .min_keysize = AES_MIN_KEY_SIZE, 358 .max_keysize = AES_MAX_KEY_SIZE, 359 .ivsize = AES_BLOCK_SIZE, 360 .setkey = nitrox_aes_setkey, 361 .encrypt = nitrox_aes_encrypt, 362 .decrypt = nitrox_aes_decrypt, 363 .init = nitrox_skcipher_init, 364 .exit = nitrox_skcipher_exit, 365 }, { 366 .base = { 367 .cra_name = "cfb(aes)", 368 .cra_driver_name = "n5_cfb(aes)", 369 .cra_priority = PRIO, 370 .cra_flags = CRYPTO_ALG_ASYNC, 371 .cra_blocksize = AES_BLOCK_SIZE, 372 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 373 .cra_alignmask = 0, 374 .cra_module = THIS_MODULE, 375 }, 376 .min_keysize = AES_MIN_KEY_SIZE, 377 .max_keysize = AES_MAX_KEY_SIZE, 378 .ivsize = AES_BLOCK_SIZE, 379 .setkey = nitrox_aes_setkey, 380 .encrypt = nitrox_aes_encrypt, 381 .decrypt = nitrox_aes_decrypt, 382 .init = nitrox_skcipher_init, 383 .exit = nitrox_skcipher_exit, 384 }, { 385 .base = { 386 .cra_name = "xts(aes)", 387 .cra_driver_name = "n5_xts(aes)", 388 .cra_priority = PRIO, 389 .cra_flags = CRYPTO_ALG_ASYNC, 390 .cra_blocksize = AES_BLOCK_SIZE, 391 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 392 .cra_alignmask = 0, 393 .cra_module = THIS_MODULE, 394 }, 395 .min_keysize = 2 * AES_MIN_KEY_SIZE, 396 .max_keysize = 2 * AES_MAX_KEY_SIZE, 397 .ivsize = AES_BLOCK_SIZE, 398 .setkey = nitrox_aes_xts_setkey, 399 .encrypt = nitrox_aes_encrypt, 400 .decrypt = nitrox_aes_decrypt, 401 .init = nitrox_skcipher_init, 402 .exit = nitrox_skcipher_exit, 403 }, { 404 .base = { 405 .cra_name = "rfc3686(ctr(aes))", 406 .cra_driver_name = "n5_rfc3686(ctr(aes))", 407 .cra_priority = PRIO, 408 .cra_flags = CRYPTO_ALG_ASYNC, 409 .cra_blocksize = 1, 410 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 411 .cra_alignmask = 0, 412 .cra_module = THIS_MODULE, 413 }, 414 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 415 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 416 .ivsize = CTR_RFC3686_IV_SIZE, 417 .init = nitrox_skcipher_init, 418 .exit = nitrox_skcipher_exit, 419 .setkey = nitrox_aes_ctr_rfc3686_setkey, 420 .encrypt = nitrox_aes_encrypt, 421 .decrypt = nitrox_aes_decrypt, 422 }, { 423 .base = { 424 .cra_name = "cts(cbc(aes))", 425 .cra_driver_name = "n5_cts(cbc(aes))", 426 .cra_priority = PRIO, 427 .cra_flags = CRYPTO_ALG_ASYNC, 428 .cra_blocksize = AES_BLOCK_SIZE, 429 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 430 .cra_alignmask = 0, 431 .cra_type = &crypto_ablkcipher_type, 432 .cra_module = THIS_MODULE, 433 }, 434 .min_keysize = AES_MIN_KEY_SIZE, 435 .max_keysize = AES_MAX_KEY_SIZE, 436 .ivsize = AES_BLOCK_SIZE, 437 .setkey = nitrox_aes_setkey, 438 .encrypt = nitrox_aes_encrypt, 439 .decrypt = nitrox_aes_decrypt, 440 .init = nitrox_skcipher_init, 441 .exit = nitrox_skcipher_exit, 442 }, { 443 .base = { 444 .cra_name = "cbc(des3_ede)", 445 .cra_driver_name = "n5_cbc(des3_ede)", 446 .cra_priority = PRIO, 447 .cra_flags = CRYPTO_ALG_ASYNC, 448 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 449 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 450 .cra_alignmask = 0, 451 .cra_module = THIS_MODULE, 452 }, 453 .min_keysize = DES3_EDE_KEY_SIZE, 454 .max_keysize = DES3_EDE_KEY_SIZE, 455 .ivsize = DES3_EDE_BLOCK_SIZE, 456 .setkey = nitrox_3des_setkey, 457 .encrypt = nitrox_3des_encrypt, 458 .decrypt = nitrox_3des_decrypt, 459 .init = nitrox_skcipher_init, 460 .exit = nitrox_skcipher_exit, 461 }, { 462 .base = { 463 .cra_name = "ecb(des3_ede)", 464 .cra_driver_name = "n5_ecb(des3_ede)", 465 .cra_priority = PRIO, 466 .cra_flags = CRYPTO_ALG_ASYNC, 467 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 468 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 469 .cra_alignmask = 0, 470 .cra_module = THIS_MODULE, 471 }, 472 .min_keysize = DES3_EDE_KEY_SIZE, 473 .max_keysize = DES3_EDE_KEY_SIZE, 474 .ivsize = DES3_EDE_BLOCK_SIZE, 475 .setkey = nitrox_3des_setkey, 476 .encrypt = nitrox_3des_encrypt, 477 .decrypt = nitrox_3des_decrypt, 478 .init = nitrox_skcipher_init, 479 .exit = nitrox_skcipher_exit, 480 } 481 482 }; 483 484 int nitrox_register_skciphers(void) 485 { 486 return crypto_register_skciphers(nitrox_skciphers, 487 ARRAY_SIZE(nitrox_skciphers)); 488 } 489 490 void nitrox_unregister_skciphers(void) 491 { 492 crypto_unregister_skciphers(nitrox_skciphers, 493 ARRAY_SIZE(nitrox_skciphers)); 494 } 495