1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/crypto.h> 3 #include <linux/kernel.h> 4 #include <linux/module.h> 5 #include <linux/printk.h> 6 7 #include <crypto/aes.h> 8 #include <crypto/skcipher.h> 9 #include <crypto/ctr.h> 10 #include <crypto/des.h> 11 #include <crypto/xts.h> 12 13 #include "nitrox_dev.h" 14 #include "nitrox_common.h" 15 #include "nitrox_req.h" 16 17 #define PRIO 4001 18 19 struct nitrox_cipher { 20 const char *name; 21 enum flexi_cipher value; 22 }; 23 24 /** 25 * supported cipher list 26 */ 27 static const struct nitrox_cipher flexi_cipher_table[] = { 28 { "null", CIPHER_NULL }, 29 { "cbc(des3_ede)", CIPHER_3DES_CBC }, 30 { "ecb(des3_ede)", CIPHER_3DES_ECB }, 31 { "cbc(aes)", CIPHER_AES_CBC }, 32 { "ecb(aes)", CIPHER_AES_ECB }, 33 { "cfb(aes)", CIPHER_AES_CFB }, 34 { "rfc3686(ctr(aes))", CIPHER_AES_CTR }, 35 { "xts(aes)", CIPHER_AES_XTS }, 36 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS }, 37 { NULL, CIPHER_INVALID } 38 }; 39 40 static enum flexi_cipher flexi_cipher_type(const char *name) 41 { 42 const struct nitrox_cipher *cipher = flexi_cipher_table; 43 44 while (cipher->name) { 45 if (!strcmp(cipher->name, name)) 46 break; 47 cipher++; 48 } 49 return cipher->value; 50 } 51 52 static int flexi_aes_keylen(int keylen) 53 { 54 int aes_keylen; 55 56 switch (keylen) { 57 case AES_KEYSIZE_128: 58 aes_keylen = 1; 59 break; 60 case AES_KEYSIZE_192: 61 aes_keylen = 2; 62 break; 63 case AES_KEYSIZE_256: 64 aes_keylen = 3; 65 break; 66 default: 67 aes_keylen = -EINVAL; 68 break; 69 } 70 return aes_keylen; 71 } 72 73 static int nitrox_skcipher_init(struct crypto_skcipher *tfm) 74 { 75 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 76 void *fctx; 77 78 /* get the first device */ 79 nctx->ndev = nitrox_get_first_device(); 80 if (!nctx->ndev) 81 return -ENODEV; 82 83 /* allocate nitrox crypto context */ 84 fctx = crypto_alloc_context(nctx->ndev); 85 if (!fctx) { 86 nitrox_put_device(nctx->ndev); 87 return -ENOMEM; 88 } 89 nctx->u.ctx_handle = (uintptr_t)fctx; 90 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) + 91 sizeof(struct nitrox_kcrypt_request)); 92 return 0; 93 } 94 95 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm) 96 { 97 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 98 99 /* free the nitrox crypto context */ 100 if (nctx->u.ctx_handle) { 101 struct flexi_crypto_context *fctx = nctx->u.fctx; 102 103 memset(&fctx->crypto, 0, sizeof(struct crypto_keys)); 104 memset(&fctx->auth, 0, sizeof(struct auth_keys)); 105 crypto_free_context((void *)fctx); 106 } 107 nitrox_put_device(nctx->ndev); 108 109 nctx->u.ctx_handle = 0; 110 nctx->ndev = NULL; 111 } 112 113 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher, 114 int aes_keylen, const u8 *key, 115 unsigned int keylen) 116 { 117 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 118 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 119 struct flexi_crypto_context *fctx; 120 enum flexi_cipher cipher_type; 121 const char *name; 122 123 name = crypto_tfm_alg_name(tfm); 124 cipher_type = flexi_cipher_type(name); 125 if (unlikely(cipher_type == CIPHER_INVALID)) { 126 pr_err("unsupported cipher: %s\n", name); 127 return -EINVAL; 128 } 129 130 /* fill crypto context */ 131 fctx = nctx->u.fctx; 132 fctx->flags = 0; 133 fctx->w0.cipher_type = cipher_type; 134 fctx->w0.aes_keylen = aes_keylen; 135 fctx->w0.iv_source = IV_FROM_DPTR; 136 fctx->flags = cpu_to_be64(*(u64 *)&fctx->w0); 137 /* copy the key to context */ 138 memcpy(fctx->crypto.u.key, key, keylen); 139 140 return 0; 141 } 142 143 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, 144 unsigned int keylen) 145 { 146 int aes_keylen; 147 148 aes_keylen = flexi_aes_keylen(keylen); 149 if (aes_keylen < 0) { 150 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 151 return -EINVAL; 152 } 153 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 154 } 155 156 static void nitrox_skcipher_callback(struct skcipher_request *skreq, 157 int err) 158 { 159 if (err) { 160 pr_err_ratelimited("request failed status 0x%0x\n", err); 161 err = -EINVAL; 162 } 163 skcipher_request_complete(skreq, err); 164 } 165 166 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc) 167 { 168 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 169 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher); 170 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 171 int ivsize = crypto_skcipher_ivsize(cipher); 172 struct se_crypto_request *creq; 173 174 creq = &nkreq->creq; 175 creq->flags = skreq->base.flags; 176 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 177 GFP_KERNEL : GFP_ATOMIC; 178 179 /* fill the request */ 180 creq->ctrl.value = 0; 181 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 182 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); 183 /* param0: length of the data to be encrypted */ 184 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); 185 creq->gph.param1 = 0; 186 /* param2: encryption data offset */ 187 creq->gph.param2 = cpu_to_be16(ivsize); 188 creq->gph.param3 = 0; 189 190 creq->ctx_handle = nctx->u.ctx_handle; 191 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 192 193 /* copy the iv */ 194 memcpy(creq->iv, skreq->iv, ivsize); 195 creq->ivsize = ivsize; 196 creq->src = skreq->src; 197 creq->dst = skreq->dst; 198 199 nkreq->nctx = nctx; 200 nkreq->skreq = skreq; 201 202 /* send the crypto request */ 203 return nitrox_process_se_request(nctx->ndev, creq, 204 nitrox_skcipher_callback, skreq); 205 } 206 207 static int nitrox_aes_encrypt(struct skcipher_request *skreq) 208 { 209 return nitrox_skcipher_crypt(skreq, true); 210 } 211 212 static int nitrox_aes_decrypt(struct skcipher_request *skreq) 213 { 214 return nitrox_skcipher_crypt(skreq, false); 215 } 216 217 static int nitrox_3des_setkey(struct crypto_skcipher *cipher, 218 const u8 *key, unsigned int keylen) 219 { 220 if (keylen != DES3_EDE_KEY_SIZE) { 221 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 222 return -EINVAL; 223 } 224 225 return nitrox_skcipher_setkey(cipher, 0, key, keylen); 226 } 227 228 static int nitrox_3des_encrypt(struct skcipher_request *skreq) 229 { 230 return nitrox_skcipher_crypt(skreq, true); 231 } 232 233 static int nitrox_3des_decrypt(struct skcipher_request *skreq) 234 { 235 return nitrox_skcipher_crypt(skreq, false); 236 } 237 238 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher, 239 const u8 *key, unsigned int keylen) 240 { 241 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 242 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 243 struct flexi_crypto_context *fctx; 244 int aes_keylen, ret; 245 246 ret = xts_check_key(tfm, key, keylen); 247 if (ret) 248 return ret; 249 250 keylen /= 2; 251 252 aes_keylen = flexi_aes_keylen(keylen); 253 if (aes_keylen < 0) { 254 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 255 return -EINVAL; 256 } 257 258 fctx = nctx->u.fctx; 259 /* copy KEY2 */ 260 memcpy(fctx->auth.u.key2, (key + keylen), keylen); 261 262 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 263 } 264 265 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher, 266 const u8 *key, unsigned int keylen) 267 { 268 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 269 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 270 struct flexi_crypto_context *fctx; 271 int aes_keylen; 272 273 if (keylen < CTR_RFC3686_NONCE_SIZE) 274 return -EINVAL; 275 276 fctx = nctx->u.fctx; 277 278 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), 279 CTR_RFC3686_NONCE_SIZE); 280 281 keylen -= CTR_RFC3686_NONCE_SIZE; 282 283 aes_keylen = flexi_aes_keylen(keylen); 284 if (aes_keylen < 0) { 285 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 286 return -EINVAL; 287 } 288 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 289 } 290 291 static struct skcipher_alg nitrox_skciphers[] = { { 292 .base = { 293 .cra_name = "cbc(aes)", 294 .cra_driver_name = "n5_cbc(aes)", 295 .cra_priority = PRIO, 296 .cra_flags = CRYPTO_ALG_ASYNC, 297 .cra_blocksize = AES_BLOCK_SIZE, 298 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 299 .cra_alignmask = 0, 300 .cra_module = THIS_MODULE, 301 }, 302 .min_keysize = AES_MIN_KEY_SIZE, 303 .max_keysize = AES_MAX_KEY_SIZE, 304 .ivsize = AES_BLOCK_SIZE, 305 .setkey = nitrox_aes_setkey, 306 .encrypt = nitrox_aes_encrypt, 307 .decrypt = nitrox_aes_decrypt, 308 .init = nitrox_skcipher_init, 309 .exit = nitrox_skcipher_exit, 310 }, { 311 .base = { 312 .cra_name = "ecb(aes)", 313 .cra_driver_name = "n5_ecb(aes)", 314 .cra_priority = PRIO, 315 .cra_flags = CRYPTO_ALG_ASYNC, 316 .cra_blocksize = AES_BLOCK_SIZE, 317 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 318 .cra_alignmask = 0, 319 .cra_module = THIS_MODULE, 320 }, 321 .min_keysize = AES_MIN_KEY_SIZE, 322 .max_keysize = AES_MAX_KEY_SIZE, 323 .ivsize = AES_BLOCK_SIZE, 324 .setkey = nitrox_aes_setkey, 325 .encrypt = nitrox_aes_encrypt, 326 .decrypt = nitrox_aes_decrypt, 327 .init = nitrox_skcipher_init, 328 .exit = nitrox_skcipher_exit, 329 }, { 330 .base = { 331 .cra_name = "cfb(aes)", 332 .cra_driver_name = "n5_cfb(aes)", 333 .cra_priority = PRIO, 334 .cra_flags = CRYPTO_ALG_ASYNC, 335 .cra_blocksize = AES_BLOCK_SIZE, 336 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 337 .cra_alignmask = 0, 338 .cra_module = THIS_MODULE, 339 }, 340 .min_keysize = AES_MIN_KEY_SIZE, 341 .max_keysize = AES_MAX_KEY_SIZE, 342 .ivsize = AES_BLOCK_SIZE, 343 .setkey = nitrox_aes_setkey, 344 .encrypt = nitrox_aes_encrypt, 345 .decrypt = nitrox_aes_decrypt, 346 .init = nitrox_skcipher_init, 347 .exit = nitrox_skcipher_exit, 348 }, { 349 .base = { 350 .cra_name = "xts(aes)", 351 .cra_driver_name = "n5_xts(aes)", 352 .cra_priority = PRIO, 353 .cra_flags = CRYPTO_ALG_ASYNC, 354 .cra_blocksize = AES_BLOCK_SIZE, 355 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 356 .cra_alignmask = 0, 357 .cra_module = THIS_MODULE, 358 }, 359 .min_keysize = 2 * AES_MIN_KEY_SIZE, 360 .max_keysize = 2 * AES_MAX_KEY_SIZE, 361 .ivsize = AES_BLOCK_SIZE, 362 .setkey = nitrox_aes_xts_setkey, 363 .encrypt = nitrox_aes_encrypt, 364 .decrypt = nitrox_aes_decrypt, 365 .init = nitrox_skcipher_init, 366 .exit = nitrox_skcipher_exit, 367 }, { 368 .base = { 369 .cra_name = "rfc3686(ctr(aes))", 370 .cra_driver_name = "n5_rfc3686(ctr(aes))", 371 .cra_priority = PRIO, 372 .cra_flags = CRYPTO_ALG_ASYNC, 373 .cra_blocksize = 1, 374 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 375 .cra_alignmask = 0, 376 .cra_module = THIS_MODULE, 377 }, 378 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 379 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 380 .ivsize = CTR_RFC3686_IV_SIZE, 381 .init = nitrox_skcipher_init, 382 .exit = nitrox_skcipher_exit, 383 .setkey = nitrox_aes_ctr_rfc3686_setkey, 384 .encrypt = nitrox_aes_encrypt, 385 .decrypt = nitrox_aes_decrypt, 386 }, { 387 .base = { 388 .cra_name = "cts(cbc(aes))", 389 .cra_driver_name = "n5_cts(cbc(aes))", 390 .cra_priority = PRIO, 391 .cra_flags = CRYPTO_ALG_ASYNC, 392 .cra_blocksize = AES_BLOCK_SIZE, 393 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 394 .cra_alignmask = 0, 395 .cra_type = &crypto_ablkcipher_type, 396 .cra_module = THIS_MODULE, 397 }, 398 .min_keysize = AES_MIN_KEY_SIZE, 399 .max_keysize = AES_MAX_KEY_SIZE, 400 .ivsize = AES_BLOCK_SIZE, 401 .setkey = nitrox_aes_setkey, 402 .encrypt = nitrox_aes_encrypt, 403 .decrypt = nitrox_aes_decrypt, 404 .init = nitrox_skcipher_init, 405 .exit = nitrox_skcipher_exit, 406 }, { 407 .base = { 408 .cra_name = "cbc(des3_ede)", 409 .cra_driver_name = "n5_cbc(des3_ede)", 410 .cra_priority = PRIO, 411 .cra_flags = CRYPTO_ALG_ASYNC, 412 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 413 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 414 .cra_alignmask = 0, 415 .cra_module = THIS_MODULE, 416 }, 417 .min_keysize = DES3_EDE_KEY_SIZE, 418 .max_keysize = DES3_EDE_KEY_SIZE, 419 .ivsize = DES3_EDE_BLOCK_SIZE, 420 .setkey = nitrox_3des_setkey, 421 .encrypt = nitrox_3des_encrypt, 422 .decrypt = nitrox_3des_decrypt, 423 .init = nitrox_skcipher_init, 424 .exit = nitrox_skcipher_exit, 425 }, { 426 .base = { 427 .cra_name = "ecb(des3_ede)", 428 .cra_driver_name = "n5_ecb(des3_ede)", 429 .cra_priority = PRIO, 430 .cra_flags = CRYPTO_ALG_ASYNC, 431 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 432 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 433 .cra_alignmask = 0, 434 .cra_module = THIS_MODULE, 435 }, 436 .min_keysize = DES3_EDE_KEY_SIZE, 437 .max_keysize = DES3_EDE_KEY_SIZE, 438 .ivsize = DES3_EDE_BLOCK_SIZE, 439 .setkey = nitrox_3des_setkey, 440 .encrypt = nitrox_3des_encrypt, 441 .decrypt = nitrox_3des_decrypt, 442 .init = nitrox_skcipher_init, 443 .exit = nitrox_skcipher_exit, 444 } 445 446 }; 447 448 int nitrox_crypto_register(void) 449 { 450 return crypto_register_skciphers(nitrox_skciphers, 451 ARRAY_SIZE(nitrox_skciphers)); 452 } 453 454 void nitrox_crypto_unregister(void) 455 { 456 crypto_unregister_skciphers(nitrox_skciphers, 457 ARRAY_SIZE(nitrox_skciphers)); 458 } 459