1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/kernel.h> 3 #include <linux/printk.h> 4 #include <linux/crypto.h> 5 #include <linux/rtnetlink.h> 6 7 #include <crypto/aead.h> 8 #include <crypto/authenc.h> 9 #include <crypto/des.h> 10 #include <crypto/sha.h> 11 #include <crypto/internal/aead.h> 12 #include <crypto/scatterwalk.h> 13 #include <crypto/gcm.h> 14 15 #include "nitrox_dev.h" 16 #include "nitrox_common.h" 17 #include "nitrox_req.h" 18 19 #define GCM_AES_SALT_SIZE 4 20 21 union gph_p3 { 22 struct { 23 #ifdef __BIG_ENDIAN_BITFIELD 24 u16 iv_offset : 8; 25 u16 auth_offset : 8; 26 #else 27 u16 auth_offset : 8; 28 u16 iv_offset : 8; 29 #endif 30 }; 31 u16 param; 32 }; 33 34 static int nitrox_aes_gcm_setkey(struct crypto_aead *aead, const u8 *key, 35 unsigned int keylen) 36 { 37 int aes_keylen; 38 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 39 struct flexi_crypto_context *fctx; 40 union fc_ctx_flags flags; 41 42 aes_keylen = flexi_aes_keylen(keylen); 43 if (aes_keylen < 0) { 44 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 45 return -EINVAL; 46 } 47 48 /* fill crypto context */ 49 fctx = nctx->u.fctx; 50 flags.f = be64_to_cpu(fctx->flags.f); 51 flags.w0.aes_keylen = aes_keylen; 52 fctx->flags.f = cpu_to_be64(flags.f); 53 54 /* copy enc key to context */ 55 memset(&fctx->crypto, 0, sizeof(fctx->crypto)); 56 memcpy(fctx->crypto.u.key, key, keylen); 57 58 return 0; 59 } 60 61 static int nitrox_aead_setauthsize(struct crypto_aead *aead, 62 unsigned int authsize) 63 { 64 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 65 struct flexi_crypto_context *fctx = nctx->u.fctx; 66 union fc_ctx_flags flags; 67 68 flags.f = be64_to_cpu(fctx->flags.f); 69 flags.w0.mac_len = authsize; 70 fctx->flags.f = cpu_to_be64(flags.f); 71 72 aead->authsize = authsize; 73 74 return 0; 75 } 76 77 static int nitrox_aes_gcm_setauthsize(struct crypto_aead *aead, 78 unsigned int authsize) 79 { 80 switch (authsize) { 81 case 4: 82 case 8: 83 case 12: 84 case 13: 85 case 14: 86 case 15: 87 case 16: 88 break; 89 default: 90 return -EINVAL; 91 } 92 93 return nitrox_aead_setauthsize(aead, authsize); 94 } 95 96 static int alloc_src_sglist(struct nitrox_kcrypt_request *nkreq, 97 struct scatterlist *src, char *iv, int ivsize, 98 int buflen) 99 { 100 int nents = sg_nents_for_len(src, buflen); 101 int ret; 102 103 if (nents < 0) 104 return nents; 105 106 /* IV entry */ 107 nents += 1; 108 /* Allocate buffer to hold IV and input scatterlist array */ 109 ret = alloc_src_req_buf(nkreq, nents, ivsize); 110 if (ret) 111 return ret; 112 113 nitrox_creq_copy_iv(nkreq->src, iv, ivsize); 114 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); 115 116 return 0; 117 } 118 119 static int alloc_dst_sglist(struct nitrox_kcrypt_request *nkreq, 120 struct scatterlist *dst, int ivsize, int buflen) 121 { 122 int nents = sg_nents_for_len(dst, buflen); 123 int ret; 124 125 if (nents < 0) 126 return nents; 127 128 /* IV, ORH, COMPLETION entries */ 129 nents += 3; 130 /* Allocate buffer to hold ORH, COMPLETION and output scatterlist 131 * array 132 */ 133 ret = alloc_dst_req_buf(nkreq, nents); 134 if (ret) 135 return ret; 136 137 nitrox_creq_set_orh(nkreq); 138 nitrox_creq_set_comp(nkreq); 139 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen); 140 141 return 0; 142 } 143 144 static void free_src_sglist(struct nitrox_kcrypt_request *nkreq) 145 { 146 kfree(nkreq->src); 147 } 148 149 static void free_dst_sglist(struct nitrox_kcrypt_request *nkreq) 150 { 151 kfree(nkreq->dst); 152 } 153 154 static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) 155 { 156 struct se_crypto_request *creq = &rctx->nkreq.creq; 157 union gph_p3 param3; 158 int ret; 159 160 creq->flags = rctx->flags; 161 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : 162 GFP_ATOMIC; 163 164 creq->ctrl.value = 0; 165 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; 166 creq->ctrl.s.arg = rctx->ctrl_arg; 167 168 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); 169 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); 170 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); 171 param3.iv_offset = 0; 172 param3.auth_offset = rctx->ivsize; 173 creq->gph.param3 = cpu_to_be16(param3.param); 174 175 creq->ctx_handle = rctx->ctx_handle; 176 creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); 177 178 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize, 179 rctx->srclen); 180 if (ret) 181 return ret; 182 183 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, 184 rctx->dstlen); 185 if (ret) { 186 free_src_sglist(&rctx->nkreq); 187 return ret; 188 } 189 190 return 0; 191 } 192 193 static void nitrox_aead_callback(void *arg, int err) 194 { 195 struct aead_request *areq = arg; 196 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); 197 198 free_src_sglist(&rctx->nkreq); 199 free_dst_sglist(&rctx->nkreq); 200 if (err) { 201 pr_err_ratelimited("request failed status 0x%0x\n", err); 202 err = -EINVAL; 203 } 204 205 areq->base.complete(&areq->base, err); 206 } 207 208 static inline bool nitrox_aes_gcm_assoclen_supported(unsigned int assoclen) 209 { 210 if (assoclen <= 512) 211 return true; 212 213 return false; 214 } 215 216 static int nitrox_aes_gcm_enc(struct aead_request *areq) 217 { 218 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 219 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 220 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); 221 struct se_crypto_request *creq = &rctx->nkreq.creq; 222 struct flexi_crypto_context *fctx = nctx->u.fctx; 223 int ret; 224 225 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen)) 226 return -EINVAL; 227 228 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); 229 230 rctx->cryptlen = areq->cryptlen; 231 rctx->assoclen = areq->assoclen; 232 rctx->srclen = areq->assoclen + areq->cryptlen; 233 rctx->dstlen = rctx->srclen + aead->authsize; 234 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; 235 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; 236 rctx->flags = areq->base.flags; 237 rctx->ctx_handle = nctx->u.ctx_handle; 238 rctx->src = areq->src; 239 rctx->dst = areq->dst; 240 rctx->ctrl_arg = ENCRYPT; 241 ret = nitrox_set_creq(rctx); 242 if (ret) 243 return ret; 244 245 /* send the crypto request */ 246 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback, 247 areq); 248 } 249 250 static int nitrox_aes_gcm_dec(struct aead_request *areq) 251 { 252 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 253 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 254 struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); 255 struct se_crypto_request *creq = &rctx->nkreq.creq; 256 struct flexi_crypto_context *fctx = nctx->u.fctx; 257 int ret; 258 259 if (!nitrox_aes_gcm_assoclen_supported(areq->assoclen)) 260 return -EINVAL; 261 262 memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); 263 264 rctx->cryptlen = areq->cryptlen - aead->authsize; 265 rctx->assoclen = areq->assoclen; 266 rctx->srclen = areq->cryptlen + areq->assoclen; 267 rctx->dstlen = rctx->srclen - aead->authsize; 268 rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; 269 rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; 270 rctx->flags = areq->base.flags; 271 rctx->ctx_handle = nctx->u.ctx_handle; 272 rctx->src = areq->src; 273 rctx->dst = areq->dst; 274 rctx->ctrl_arg = DECRYPT; 275 ret = nitrox_set_creq(rctx); 276 if (ret) 277 return ret; 278 279 /* send the crypto request */ 280 return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback, 281 areq); 282 } 283 284 static int nitrox_aead_init(struct crypto_aead *aead) 285 { 286 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 287 struct crypto_ctx_hdr *chdr; 288 289 /* get the first device */ 290 nctx->ndev = nitrox_get_first_device(); 291 if (!nctx->ndev) 292 return -ENODEV; 293 294 /* allocate nitrox crypto context */ 295 chdr = crypto_alloc_context(nctx->ndev); 296 if (!chdr) { 297 nitrox_put_device(nctx->ndev); 298 return -ENOMEM; 299 } 300 nctx->chdr = chdr; 301 nctx->u.ctx_handle = (uintptr_t)((u8 *)chdr->vaddr + 302 sizeof(struct ctx_hdr)); 303 nctx->u.fctx->flags.f = 0; 304 305 return 0; 306 } 307 308 static int nitrox_gcm_common_init(struct crypto_aead *aead) 309 { 310 int ret; 311 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 312 union fc_ctx_flags *flags; 313 314 ret = nitrox_aead_init(aead); 315 if (ret) 316 return ret; 317 318 flags = &nctx->u.fctx->flags; 319 flags->w0.cipher_type = CIPHER_AES_GCM; 320 flags->w0.hash_type = AUTH_NULL; 321 flags->w0.iv_source = IV_FROM_DPTR; 322 /* ask microcode to calculate ipad/opad */ 323 flags->w0.auth_input_type = 1; 324 flags->f = be64_to_cpu(flags->f); 325 326 return 0; 327 } 328 329 static int nitrox_aes_gcm_init(struct crypto_aead *aead) 330 { 331 int ret; 332 333 ret = nitrox_gcm_common_init(aead); 334 if (ret) 335 return ret; 336 337 crypto_aead_set_reqsize(aead, 338 sizeof(struct aead_request) + 339 sizeof(struct nitrox_aead_rctx)); 340 341 return 0; 342 } 343 344 static void nitrox_aead_exit(struct crypto_aead *aead) 345 { 346 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 347 348 /* free the nitrox crypto context */ 349 if (nctx->u.ctx_handle) { 350 struct flexi_crypto_context *fctx = nctx->u.fctx; 351 352 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); 353 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); 354 crypto_free_context((void *)nctx->chdr); 355 } 356 nitrox_put_device(nctx->ndev); 357 358 nctx->u.ctx_handle = 0; 359 nctx->ndev = NULL; 360 } 361 362 static int nitrox_rfc4106_setkey(struct crypto_aead *aead, const u8 *key, 363 unsigned int keylen) 364 { 365 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 366 struct flexi_crypto_context *fctx = nctx->u.fctx; 367 int ret; 368 369 if (keylen < GCM_AES_SALT_SIZE) 370 return -EINVAL; 371 372 keylen -= GCM_AES_SALT_SIZE; 373 ret = nitrox_aes_gcm_setkey(aead, key, keylen); 374 if (ret) 375 return ret; 376 377 memcpy(fctx->crypto.iv, key + keylen, GCM_AES_SALT_SIZE); 378 return 0; 379 } 380 381 static int nitrox_rfc4106_setauthsize(struct crypto_aead *aead, 382 unsigned int authsize) 383 { 384 switch (authsize) { 385 case 8: 386 case 12: 387 case 16: 388 break; 389 default: 390 return -EINVAL; 391 } 392 393 return nitrox_aead_setauthsize(aead, authsize); 394 } 395 396 static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq) 397 { 398 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); 399 struct nitrox_aead_rctx *aead_rctx = &rctx->base; 400 unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; 401 struct scatterlist *sg; 402 403 if (areq->assoclen != 16 && areq->assoclen != 20) 404 return -EINVAL; 405 406 scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0); 407 sg_init_table(rctx->src, 3); 408 sg_set_buf(rctx->src, rctx->assoc, assoclen); 409 sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen); 410 if (sg != rctx->src + 1) 411 sg_chain(rctx->src, 2, sg); 412 413 if (areq->src != areq->dst) { 414 sg_init_table(rctx->dst, 3); 415 sg_set_buf(rctx->dst, rctx->assoc, assoclen); 416 sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen); 417 if (sg != rctx->dst + 1) 418 sg_chain(rctx->dst, 2, sg); 419 } 420 421 aead_rctx->src = rctx->src; 422 aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst; 423 424 return 0; 425 } 426 427 static void nitrox_rfc4106_callback(void *arg, int err) 428 { 429 struct aead_request *areq = arg; 430 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); 431 struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq; 432 433 free_src_sglist(nkreq); 434 free_dst_sglist(nkreq); 435 if (err) { 436 pr_err_ratelimited("request failed status 0x%0x\n", err); 437 err = -EINVAL; 438 } 439 440 areq->base.complete(&areq->base, err); 441 } 442 443 static int nitrox_rfc4106_enc(struct aead_request *areq) 444 { 445 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 446 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 447 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); 448 struct nitrox_aead_rctx *aead_rctx = &rctx->base; 449 struct se_crypto_request *creq = &aead_rctx->nkreq.creq; 450 int ret; 451 452 aead_rctx->cryptlen = areq->cryptlen; 453 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; 454 aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; 455 aead_rctx->dstlen = aead_rctx->srclen + aead->authsize; 456 aead_rctx->iv = areq->iv; 457 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE; 458 aead_rctx->flags = areq->base.flags; 459 aead_rctx->ctx_handle = nctx->u.ctx_handle; 460 aead_rctx->ctrl_arg = ENCRYPT; 461 462 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); 463 if (ret) 464 return ret; 465 466 ret = nitrox_set_creq(aead_rctx); 467 if (ret) 468 return ret; 469 470 /* send the crypto request */ 471 return nitrox_process_se_request(nctx->ndev, creq, 472 nitrox_rfc4106_callback, areq); 473 } 474 475 static int nitrox_rfc4106_dec(struct aead_request *areq) 476 { 477 struct crypto_aead *aead = crypto_aead_reqtfm(areq); 478 struct nitrox_crypto_ctx *nctx = crypto_aead_ctx(aead); 479 struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); 480 struct nitrox_aead_rctx *aead_rctx = &rctx->base; 481 struct se_crypto_request *creq = &aead_rctx->nkreq.creq; 482 int ret; 483 484 aead_rctx->cryptlen = areq->cryptlen - aead->authsize; 485 aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; 486 aead_rctx->srclen = 487 areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; 488 aead_rctx->dstlen = aead_rctx->srclen - aead->authsize; 489 aead_rctx->iv = areq->iv; 490 aead_rctx->ivsize = GCM_RFC4106_IV_SIZE; 491 aead_rctx->flags = areq->base.flags; 492 aead_rctx->ctx_handle = nctx->u.ctx_handle; 493 aead_rctx->ctrl_arg = DECRYPT; 494 495 ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); 496 if (ret) 497 return ret; 498 499 ret = nitrox_set_creq(aead_rctx); 500 if (ret) 501 return ret; 502 503 /* send the crypto request */ 504 return nitrox_process_se_request(nctx->ndev, creq, 505 nitrox_rfc4106_callback, areq); 506 } 507 508 static int nitrox_rfc4106_init(struct crypto_aead *aead) 509 { 510 int ret; 511 512 ret = nitrox_gcm_common_init(aead); 513 if (ret) 514 return ret; 515 516 crypto_aead_set_reqsize(aead, sizeof(struct aead_request) + 517 sizeof(struct nitrox_rfc4106_rctx)); 518 519 return 0; 520 } 521 522 static struct aead_alg nitrox_aeads[] = { { 523 .base = { 524 .cra_name = "gcm(aes)", 525 .cra_driver_name = "n5_aes_gcm", 526 .cra_priority = PRIO, 527 .cra_flags = CRYPTO_ALG_ASYNC, 528 .cra_blocksize = 1, 529 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 530 .cra_alignmask = 0, 531 .cra_module = THIS_MODULE, 532 }, 533 .setkey = nitrox_aes_gcm_setkey, 534 .setauthsize = nitrox_aes_gcm_setauthsize, 535 .encrypt = nitrox_aes_gcm_enc, 536 .decrypt = nitrox_aes_gcm_dec, 537 .init = nitrox_aes_gcm_init, 538 .exit = nitrox_aead_exit, 539 .ivsize = GCM_AES_IV_SIZE, 540 .maxauthsize = AES_BLOCK_SIZE, 541 }, { 542 .base = { 543 .cra_name = "rfc4106(gcm(aes))", 544 .cra_driver_name = "n5_rfc4106", 545 .cra_priority = PRIO, 546 .cra_flags = CRYPTO_ALG_ASYNC, 547 .cra_blocksize = 1, 548 .cra_ctxsize = sizeof(struct nitrox_crypto_ctx), 549 .cra_alignmask = 0, 550 .cra_module = THIS_MODULE, 551 }, 552 .setkey = nitrox_rfc4106_setkey, 553 .setauthsize = nitrox_rfc4106_setauthsize, 554 .encrypt = nitrox_rfc4106_enc, 555 .decrypt = nitrox_rfc4106_dec, 556 .init = nitrox_rfc4106_init, 557 .exit = nitrox_aead_exit, 558 .ivsize = GCM_RFC4106_IV_SIZE, 559 .maxauthsize = AES_BLOCK_SIZE, 560 } }; 561 562 int nitrox_register_aeads(void) 563 { 564 return crypto_register_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads)); 565 } 566 567 void nitrox_unregister_aeads(void) 568 { 569 crypto_unregister_aeads(nitrox_aeads, ARRAY_SIZE(nitrox_aeads)); 570 } 571