1 #include <linux/crypto.h> 2 #include <linux/kernel.h> 3 #include <linux/module.h> 4 #include <linux/printk.h> 5 6 #include <crypto/aes.h> 7 #include <crypto/skcipher.h> 8 #include <crypto/ctr.h> 9 #include <crypto/des.h> 10 #include <crypto/xts.h> 11 12 #include "nitrox_dev.h" 13 #include "nitrox_common.h" 14 #include "nitrox_req.h" 15 16 #define PRIO 4001 17 18 struct nitrox_cipher { 19 const char *name; 20 enum flexi_cipher value; 21 }; 22 23 /** 24 * supported cipher list 25 */ 26 static const struct nitrox_cipher flexi_cipher_table[] = { 27 { "null", CIPHER_NULL }, 28 { "cbc(des3_ede)", CIPHER_3DES_CBC }, 29 { "ecb(des3_ede)", CIPHER_3DES_ECB }, 30 { "cbc(aes)", CIPHER_AES_CBC }, 31 { "ecb(aes)", CIPHER_AES_ECB }, 32 { "cfb(aes)", CIPHER_AES_CFB }, 33 { "rfc3686(ctr(aes))", CIPHER_AES_CTR }, 34 { "xts(aes)", CIPHER_AES_XTS }, 35 { "cts(cbc(aes))", CIPHER_AES_CBC_CTS }, 36 { NULL, CIPHER_INVALID } 37 }; 38 39 static enum flexi_cipher flexi_cipher_type(const char *name) 40 { 41 const struct nitrox_cipher *cipher = flexi_cipher_table; 42 43 while (cipher->name) { 44 if (!strcmp(cipher->name, name)) 45 break; 46 cipher++; 47 } 48 return cipher->value; 49 } 50 51 static int flexi_aes_keylen(int keylen) 52 { 53 int aes_keylen; 54 55 switch (keylen) { 56 case AES_KEYSIZE_128: 57 aes_keylen = 1; 58 break; 59 case AES_KEYSIZE_192: 60 aes_keylen = 2; 61 break; 62 case AES_KEYSIZE_256: 63 aes_keylen = 3; 64 break; 65 default: 66 aes_keylen = -EINVAL; 67 break; 68 } 69 return aes_keylen; 70 } 71 72 static int nitrox_skcipher_init(struct crypto_skcipher *tfm) 73 { 74 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 75 void *fctx; 76 77 /* get the first device */ 78 nctx->ndev = nitrox_get_first_device(); 79 if (!nctx->ndev) 80 return -ENODEV; 81 82 /* allocate nitrox crypto context */ 83 fctx = crypto_alloc_context(nctx->ndev); 84 if (!fctx) { 85 nitrox_put_device(nctx->ndev); 86 return -ENOMEM; 87 } 88 nctx->u.ctx_handle = (uintptr_t)fctx; 89 crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(tfm) + 90 sizeof(struct nitrox_kcrypt_request)); 91 return 0; 92 } 93 94 static void nitrox_skcipher_exit(struct crypto_skcipher *tfm) 95 { 96 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(tfm); 97 98 /* free the nitrox crypto context */ 99 if (nctx->u.ctx_handle) { 100 struct flexi_crypto_context *fctx = nctx->u.fctx; 101 102 memset(&fctx->crypto, 0, sizeof(struct crypto_keys)); 103 memset(&fctx->auth, 0, sizeof(struct auth_keys)); 104 crypto_free_context((void *)fctx); 105 } 106 nitrox_put_device(nctx->ndev); 107 108 nctx->u.ctx_handle = 0; 109 nctx->ndev = NULL; 110 } 111 112 static inline int nitrox_skcipher_setkey(struct crypto_skcipher *cipher, 113 int aes_keylen, const u8 *key, 114 unsigned int keylen) 115 { 116 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 117 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 118 struct flexi_crypto_context *fctx; 119 enum flexi_cipher cipher_type; 120 const char *name; 121 122 name = crypto_tfm_alg_name(tfm); 123 cipher_type = flexi_cipher_type(name); 124 if (unlikely(cipher_type == CIPHER_INVALID)) { 125 pr_err("unsupported cipher: %s\n", name); 126 return -EINVAL; 127 } 128 129 /* fill crypto context */ 130 fctx = nctx->u.fctx; 131 fctx->flags = 0; 132 fctx->w0.cipher_type = cipher_type; 133 fctx->w0.aes_keylen = aes_keylen; 134 fctx->w0.iv_source = IV_FROM_DPTR; 135 fctx->flags = cpu_to_be64(*(u64 *)&fctx->w0); 136 /* copy the key to context */ 137 memcpy(fctx->crypto.u.key, key, keylen); 138 139 return 0; 140 } 141 142 static int nitrox_aes_setkey(struct crypto_skcipher *cipher, const u8 *key, 143 unsigned int keylen) 144 { 145 int aes_keylen; 146 147 aes_keylen = flexi_aes_keylen(keylen); 148 if (aes_keylen < 0) { 149 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 150 return -EINVAL; 151 } 152 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 153 } 154 155 static void nitrox_skcipher_callback(struct skcipher_request *skreq, 156 int err) 157 { 158 if (err) { 159 pr_err_ratelimited("request failed status 0x%0x\n", err); 160 err = -EINVAL; 161 } 162 skcipher_request_complete(skreq, err); 163 } 164 165 static int nitrox_skcipher_crypt(struct skcipher_request *skreq, bool enc) 166 { 167 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(skreq); 168 struct nitrox_crypto_ctx *nctx = crypto_skcipher_ctx(cipher); 169 struct nitrox_kcrypt_request *nkreq = skcipher_request_ctx(skreq); 170 int ivsize = crypto_skcipher_ivsize(cipher); 171 struct se_crypto_request *creq; 172 173 creq = &nkreq->creq; 174 creq->flags = skreq->base.flags; 175 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 176 GFP_KERNEL : GFP_ATOMIC; 177 178 /* fill the request */ 179 creq->ctrl.value = 0; 180 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 181 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); 182 /* param0: length of the data to be encrypted */ 183 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); 184 creq->gph.param1 = 0; 185 /* param2: encryption data offset */ 186 creq->gph.param2 = cpu_to_be16(ivsize); 187 creq->gph.param3 = 0; 188 189 creq->ctx_handle = nctx->u.ctx_handle; 190 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 191 192 /* copy the iv */ 193 memcpy(creq->iv, skreq->iv, ivsize); 194 creq->ivsize = ivsize; 195 creq->src = skreq->src; 196 creq->dst = skreq->dst; 197 198 nkreq->nctx = nctx; 199 nkreq->skreq = skreq; 200 201 /* send the crypto request */ 202 return nitrox_process_se_request(nctx->ndev, creq, 203 nitrox_skcipher_callback, skreq); 204 } 205 206 static int nitrox_aes_encrypt(struct skcipher_request *skreq) 207 { 208 return nitrox_skcipher_crypt(skreq, true); 209 } 210 211 static int nitrox_aes_decrypt(struct skcipher_request *skreq) 212 { 213 return nitrox_skcipher_crypt(skreq, false); 214 } 215 216 static int nitrox_3des_setkey(struct crypto_skcipher *cipher, 217 const u8 *key, unsigned int keylen) 218 { 219 if (keylen != DES3_EDE_KEY_SIZE) { 220 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 221 return -EINVAL; 222 } 223 224 return nitrox_skcipher_setkey(cipher, 0, key, keylen); 225 } 226 227 static int nitrox_3des_encrypt(struct skcipher_request *skreq) 228 { 229 return nitrox_skcipher_crypt(skreq, true); 230 } 231 232 static int nitrox_3des_decrypt(struct skcipher_request *skreq) 233 { 234 return nitrox_skcipher_crypt(skreq, false); 235 } 236 237 static int nitrox_aes_xts_setkey(struct crypto_skcipher *cipher, 238 const u8 *key, unsigned int keylen) 239 { 240 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 241 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 242 struct flexi_crypto_context *fctx; 243 int aes_keylen, ret; 244 245 ret = xts_check_key(tfm, key, keylen); 246 if (ret) 247 return ret; 248 249 keylen /= 2; 250 251 aes_keylen = flexi_aes_keylen(keylen); 252 if (aes_keylen < 0) { 253 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 254 return -EINVAL; 255 } 256 257 fctx = nctx->u.fctx; 258 /* copy KEY2 */ 259 memcpy(fctx->auth.u.key2, (key + keylen), keylen); 260 261 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 262 } 263 264 static int nitrox_aes_ctr_rfc3686_setkey(struct crypto_skcipher *cipher, 265 const u8 *key, unsigned int keylen) 266 { 267 struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher); 268 struct nitrox_crypto_ctx *nctx = crypto_tfm_ctx(tfm); 269 struct flexi_crypto_context *fctx; 270 int aes_keylen; 271 272 if (keylen < CTR_RFC3686_NONCE_SIZE) 273 return -EINVAL; 274 275 fctx = nctx->u.fctx; 276 277 memcpy(fctx->crypto.iv, key + (keylen - CTR_RFC3686_NONCE_SIZE), 278 CTR_RFC3686_NONCE_SIZE); 279 280 keylen -= CTR_RFC3686_NONCE_SIZE; 281 282 aes_keylen = flexi_aes_keylen(keylen); 283 if (aes_keylen < 0) { 284 crypto_skcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 285 return -EINVAL; 286 } 287 return nitrox_skcipher_setkey(cipher, aes_keylen, key, keylen); 288 } 289 290 static struct skcipher_alg nitrox_skciphers[] = { { 291 .base = { 292 .cra_name = "cbc(aes)", 293 .cra_driver_name = "n5_cbc(aes)", 294 .cra_priority = PRIO, 295 .cra_flags = CRYPTO_ALG_ASYNC, 296 .cra_blocksize = AES_BLOCK_SIZE, 297 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 298 .cra_alignmask = 0, 299 .cra_module = THIS_MODULE, 300 }, 301 .min_keysize = AES_MIN_KEY_SIZE, 302 .max_keysize = AES_MAX_KEY_SIZE, 303 .ivsize = AES_BLOCK_SIZE, 304 .setkey = nitrox_aes_setkey, 305 .encrypt = nitrox_aes_encrypt, 306 .decrypt = nitrox_aes_decrypt, 307 .init = nitrox_skcipher_init, 308 .exit = nitrox_skcipher_exit, 309 }, { 310 .base = { 311 .cra_name = "ecb(aes)", 312 .cra_driver_name = "n5_ecb(aes)", 313 .cra_priority = PRIO, 314 .cra_flags = CRYPTO_ALG_ASYNC, 315 .cra_blocksize = AES_BLOCK_SIZE, 316 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 317 .cra_alignmask = 0, 318 .cra_module = THIS_MODULE, 319 }, 320 .min_keysize = AES_MIN_KEY_SIZE, 321 .max_keysize = AES_MAX_KEY_SIZE, 322 .ivsize = AES_BLOCK_SIZE, 323 .setkey = nitrox_aes_setkey, 324 .encrypt = nitrox_aes_encrypt, 325 .decrypt = nitrox_aes_decrypt, 326 .init = nitrox_skcipher_init, 327 .exit = nitrox_skcipher_exit, 328 }, { 329 .base = { 330 .cra_name = "cfb(aes)", 331 .cra_driver_name = "n5_cfb(aes)", 332 .cra_priority = PRIO, 333 .cra_flags = CRYPTO_ALG_ASYNC, 334 .cra_blocksize = AES_BLOCK_SIZE, 335 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 336 .cra_alignmask = 0, 337 .cra_module = THIS_MODULE, 338 }, 339 .min_keysize = AES_MIN_KEY_SIZE, 340 .max_keysize = AES_MAX_KEY_SIZE, 341 .ivsize = AES_BLOCK_SIZE, 342 .setkey = nitrox_aes_setkey, 343 .encrypt = nitrox_aes_encrypt, 344 .decrypt = nitrox_aes_decrypt, 345 .init = nitrox_skcipher_init, 346 .exit = nitrox_skcipher_exit, 347 }, { 348 .base = { 349 .cra_name = "xts(aes)", 350 .cra_driver_name = "n5_xts(aes)", 351 .cra_priority = PRIO, 352 .cra_flags = CRYPTO_ALG_ASYNC, 353 .cra_blocksize = AES_BLOCK_SIZE, 354 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 355 .cra_alignmask = 0, 356 .cra_module = THIS_MODULE, 357 }, 358 .min_keysize = 2 * AES_MIN_KEY_SIZE, 359 .max_keysize = 2 * AES_MAX_KEY_SIZE, 360 .ivsize = AES_BLOCK_SIZE, 361 .setkey = nitrox_aes_xts_setkey, 362 .encrypt = nitrox_aes_encrypt, 363 .decrypt = nitrox_aes_decrypt, 364 .init = nitrox_skcipher_init, 365 .exit = nitrox_skcipher_exit, 366 }, { 367 .base = { 368 .cra_name = "rfc3686(ctr(aes))", 369 .cra_driver_name = "n5_rfc3686(ctr(aes))", 370 .cra_priority = PRIO, 371 .cra_flags = CRYPTO_ALG_ASYNC, 372 .cra_blocksize = 1, 373 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 374 .cra_alignmask = 0, 375 .cra_module = THIS_MODULE, 376 }, 377 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 378 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE, 379 .ivsize = CTR_RFC3686_IV_SIZE, 380 .init = nitrox_skcipher_init, 381 .exit = nitrox_skcipher_exit, 382 .setkey = nitrox_aes_ctr_rfc3686_setkey, 383 .encrypt = nitrox_aes_encrypt, 384 .decrypt = nitrox_aes_decrypt, 385 }, { 386 .base = { 387 .cra_name = "cts(cbc(aes))", 388 .cra_driver_name = "n5_cts(cbc(aes))", 389 .cra_priority = PRIO, 390 .cra_flags = CRYPTO_ALG_ASYNC, 391 .cra_blocksize = AES_BLOCK_SIZE, 392 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 393 .cra_alignmask = 0, 394 .cra_type = &crypto_ablkcipher_type, 395 .cra_module = THIS_MODULE, 396 }, 397 .min_keysize = AES_MIN_KEY_SIZE, 398 .max_keysize = AES_MAX_KEY_SIZE, 399 .ivsize = AES_BLOCK_SIZE, 400 .setkey = nitrox_aes_setkey, 401 .encrypt = nitrox_aes_encrypt, 402 .decrypt = nitrox_aes_decrypt, 403 .init = nitrox_skcipher_init, 404 .exit = nitrox_skcipher_exit, 405 }, { 406 .base = { 407 .cra_name = "cbc(des3_ede)", 408 .cra_driver_name = "n5_cbc(des3_ede)", 409 .cra_priority = PRIO, 410 .cra_flags = CRYPTO_ALG_ASYNC, 411 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 412 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 413 .cra_alignmask = 0, 414 .cra_module = THIS_MODULE, 415 }, 416 .min_keysize = DES3_EDE_KEY_SIZE, 417 .max_keysize = DES3_EDE_KEY_SIZE, 418 .ivsize = DES3_EDE_BLOCK_SIZE, 419 .setkey = nitrox_3des_setkey, 420 .encrypt = nitrox_3des_encrypt, 421 .decrypt = nitrox_3des_decrypt, 422 .init = nitrox_skcipher_init, 423 .exit = nitrox_skcipher_exit, 424 }, { 425 .base = { 426 .cra_name = "ecb(des3_ede)", 427 .cra_driver_name = "n5_ecb(des3_ede)", 428 .cra_priority = PRIO, 429 .cra_flags = CRYPTO_ALG_ASYNC, 430 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 431 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 432 .cra_alignmask = 0, 433 .cra_module = THIS_MODULE, 434 }, 435 .min_keysize = DES3_EDE_KEY_SIZE, 436 .max_keysize = DES3_EDE_KEY_SIZE, 437 .ivsize = DES3_EDE_BLOCK_SIZE, 438 .setkey = nitrox_3des_setkey, 439 .encrypt = nitrox_3des_encrypt, 440 .decrypt = nitrox_3des_decrypt, 441 .init = nitrox_skcipher_init, 442 .exit = nitrox_skcipher_exit, 443 } 444 445 }; 446 447 int nitrox_crypto_register(void) 448 { 449 return crypto_register_skciphers(nitrox_skciphers, 450 ARRAY_SIZE(nitrox_skciphers)); 451 } 452 453 void nitrox_crypto_unregister(void) 454 { 455 crypto_unregister_skciphers(nitrox_skciphers, 456 ARRAY_SIZE(nitrox_skciphers)); 457 } 458