1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/internal/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/sm4.h> 11 #include <crypto/scatterwalk.h> 12 13 #include "cc_driver.h" 14 #include "cc_lli_defs.h" 15 #include "cc_buffer_mgr.h" 16 #include "cc_cipher.h" 17 #include "cc_request_mgr.h" 18 19 #define MAX_SKCIPHER_SEQ_LEN 6 20 21 #define template_skcipher template_u.skcipher 22 23 struct cc_user_key_info { 24 u8 *key; 25 dma_addr_t key_dma_addr; 26 }; 27 28 struct cc_hw_key_info { 29 enum cc_hw_crypto_key key1_slot; 30 enum cc_hw_crypto_key key2_slot; 31 }; 32 33 struct cc_cpp_key_info { 34 u8 slot; 35 enum cc_cpp_alg alg; 36 }; 37 38 enum cc_key_type { 39 CC_UNPROTECTED_KEY, /* User key */ 40 CC_HW_PROTECTED_KEY, /* HW (FDE) key */ 41 CC_POLICY_PROTECTED_KEY, /* CPP key */ 42 CC_INVALID_PROTECTED_KEY /* Invalid key */ 43 }; 44 45 struct cc_cipher_ctx { 46 struct cc_drvdata *drvdata; 47 int keylen; 48 int cipher_mode; 49 int flow_mode; 50 unsigned int flags; 51 enum cc_key_type key_type; 52 struct cc_user_key_info user; 53 union { 54 struct cc_hw_key_info hw; 55 struct cc_cpp_key_info cpp; 56 }; 57 struct crypto_shash *shash_tfm; 58 struct crypto_skcipher *fallback_tfm; 59 bool fallback_on; 60 }; 61 62 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 63 64 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm) 65 { 66 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 67 68 return ctx_p->key_type; 69 } 70 71 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 72 { 73 switch (ctx_p->flow_mode) { 74 case S_DIN_to_AES: 75 switch (size) { 76 case CC_AES_128_BIT_KEY_SIZE: 77 case CC_AES_192_BIT_KEY_SIZE: 78 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 79 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 80 return 0; 81 break; 82 case CC_AES_256_BIT_KEY_SIZE: 83 return 0; 84 case (CC_AES_192_BIT_KEY_SIZE * 2): 85 case (CC_AES_256_BIT_KEY_SIZE * 2): 86 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 87 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 88 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 89 return 0; 90 break; 91 default: 92 break; 93 } 94 break; 95 case S_DIN_to_DES: 96 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 97 return 0; 98 break; 99 case S_DIN_to_SM4: 100 if (size == SM4_KEY_SIZE) 101 return 0; 102 default: 103 break; 104 } 105 return -EINVAL; 106 } 107 108 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 109 unsigned int size) 110 { 111 switch (ctx_p->flow_mode) { 112 case S_DIN_to_AES: 113 switch (ctx_p->cipher_mode) { 114 case DRV_CIPHER_XTS: 115 case DRV_CIPHER_CBC_CTS: 116 if (size >= AES_BLOCK_SIZE) 117 return 0; 118 break; 119 case DRV_CIPHER_OFB: 120 case DRV_CIPHER_CTR: 121 return 0; 122 case DRV_CIPHER_ECB: 123 case DRV_CIPHER_CBC: 124 case DRV_CIPHER_ESSIV: 125 case DRV_CIPHER_BITLOCKER: 126 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 127 return 0; 128 break; 129 default: 130 break; 131 } 132 break; 133 case S_DIN_to_DES: 134 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 135 return 0; 136 break; 137 case S_DIN_to_SM4: 138 switch (ctx_p->cipher_mode) { 139 case DRV_CIPHER_CTR: 140 return 0; 141 case DRV_CIPHER_ECB: 142 case DRV_CIPHER_CBC: 143 if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) 144 return 0; 145 default: 146 break; 147 } 148 default: 149 break; 150 } 151 return -EINVAL; 152 } 153 154 static int cc_cipher_init(struct crypto_tfm *tfm) 155 { 156 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 157 struct cc_crypto_alg *cc_alg = 158 container_of(tfm->__crt_alg, struct cc_crypto_alg, 159 skcipher_alg.base); 160 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 161 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 162 unsigned int fallback_req_size = 0; 163 164 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 165 crypto_tfm_alg_name(tfm)); 166 167 ctx_p->cipher_mode = cc_alg->cipher_mode; 168 ctx_p->flow_mode = cc_alg->flow_mode; 169 ctx_p->drvdata = cc_alg->drvdata; 170 171 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 172 const char *name = crypto_tfm_alg_name(tfm); 173 174 /* Alloc hash tfm for essiv */ 175 ctx_p->shash_tfm = crypto_alloc_shash("sha256", 0, 0); 176 if (IS_ERR(ctx_p->shash_tfm)) { 177 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 178 return PTR_ERR(ctx_p->shash_tfm); 179 } 180 max_key_buf_size <<= 1; 181 182 /* Alloc fallabck tfm or essiv when key size != 256 bit */ 183 ctx_p->fallback_tfm = 184 crypto_alloc_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC); 185 186 if (IS_ERR(ctx_p->fallback_tfm)) { 187 /* Note we're still allowing registration with no fallback since it's 188 * better to have most modes supported than none at all. 189 */ 190 dev_warn(dev, "Error allocating fallback algo %s. Some modes may be available.\n", 191 name); 192 ctx_p->fallback_tfm = NULL; 193 } else { 194 fallback_req_size = crypto_skcipher_reqsize(ctx_p->fallback_tfm); 195 } 196 } 197 198 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 199 sizeof(struct cipher_req_ctx) + fallback_req_size); 200 201 /* Allocate key buffer, cache line aligned */ 202 ctx_p->user.key = kzalloc(max_key_buf_size, GFP_KERNEL); 203 if (!ctx_p->user.key) 204 goto free_fallback; 205 206 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 207 ctx_p->user.key); 208 209 /* Map key buffer */ 210 ctx_p->user.key_dma_addr = dma_map_single(dev, ctx_p->user.key, 211 max_key_buf_size, 212 DMA_TO_DEVICE); 213 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 214 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 215 max_key_buf_size, ctx_p->user.key); 216 goto free_key; 217 } 218 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 219 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 220 221 return 0; 222 223 free_key: 224 kfree(ctx_p->user.key); 225 free_fallback: 226 crypto_free_skcipher(ctx_p->fallback_tfm); 227 crypto_free_shash(ctx_p->shash_tfm); 228 229 return -ENOMEM; 230 } 231 232 static void cc_cipher_exit(struct crypto_tfm *tfm) 233 { 234 struct crypto_alg *alg = tfm->__crt_alg; 235 struct cc_crypto_alg *cc_alg = 236 container_of(alg, struct cc_crypto_alg, 237 skcipher_alg.base); 238 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 239 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 240 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 241 242 dev_dbg(dev, "Clearing context @%p for %s\n", 243 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 244 245 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 246 /* Free hash tfm for essiv */ 247 crypto_free_shash(ctx_p->shash_tfm); 248 ctx_p->shash_tfm = NULL; 249 crypto_free_skcipher(ctx_p->fallback_tfm); 250 ctx_p->fallback_tfm = NULL; 251 } 252 253 /* Unmap key buffer */ 254 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 255 DMA_TO_DEVICE); 256 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 257 &ctx_p->user.key_dma_addr); 258 259 /* Free key buffer in context */ 260 kfree_sensitive(ctx_p->user.key); 261 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 262 } 263 264 struct tdes_keys { 265 u8 key1[DES_KEY_SIZE]; 266 u8 key2[DES_KEY_SIZE]; 267 u8 key3[DES_KEY_SIZE]; 268 }; 269 270 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num) 271 { 272 switch (slot_num) { 273 case 0: 274 return KFDE0_KEY; 275 case 1: 276 return KFDE1_KEY; 277 case 2: 278 return KFDE2_KEY; 279 case 3: 280 return KFDE3_KEY; 281 } 282 return END_OF_KEYS; 283 } 284 285 static u8 cc_slot_to_cpp_key(u8 slot_num) 286 { 287 return (slot_num - CC_FIRST_CPP_KEY_SLOT); 288 } 289 290 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num) 291 { 292 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT) 293 return CC_HW_PROTECTED_KEY; 294 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT && 295 slot_num <= CC_LAST_CPP_KEY_SLOT) 296 return CC_POLICY_PROTECTED_KEY; 297 else 298 return CC_INVALID_PROTECTED_KEY; 299 } 300 301 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 302 unsigned int keylen) 303 { 304 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 305 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 306 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 307 struct cc_hkey_info hki; 308 309 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 310 ctx_p, crypto_tfm_alg_name(tfm), keylen); 311 dump_byte_array("key", key, keylen); 312 313 /* STAT_PHASE_0: Init and sanity checks */ 314 315 /* This check the size of the protected key token */ 316 if (keylen != sizeof(hki)) { 317 dev_err(dev, "Unsupported protected key size %d.\n", keylen); 318 return -EINVAL; 319 } 320 321 memcpy(&hki, key, keylen); 322 323 /* The real key len for crypto op is the size of the HW key 324 * referenced by the HW key slot, not the hardware key token 325 */ 326 keylen = hki.keylen; 327 328 if (validate_keys_sizes(ctx_p, keylen)) { 329 dev_dbg(dev, "Unsupported key size %d.\n", keylen); 330 return -EINVAL; 331 } 332 333 ctx_p->keylen = keylen; 334 ctx_p->fallback_on = false; 335 336 switch (cc_slot_to_key_type(hki.hw_key1)) { 337 case CC_HW_PROTECTED_KEY: 338 if (ctx_p->flow_mode == S_DIN_to_SM4) { 339 dev_err(dev, "Only AES HW protected keys are supported\n"); 340 return -EINVAL; 341 } 342 343 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 344 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 345 dev_err(dev, "Unsupported hw key1 number (%d)\n", 346 hki.hw_key1); 347 return -EINVAL; 348 } 349 350 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 351 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 352 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 353 if (hki.hw_key1 == hki.hw_key2) { 354 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 355 hki.hw_key1, hki.hw_key2); 356 return -EINVAL; 357 } 358 359 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 360 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 361 dev_err(dev, "Unsupported hw key2 number (%d)\n", 362 hki.hw_key2); 363 return -EINVAL; 364 } 365 } 366 367 ctx_p->key_type = CC_HW_PROTECTED_KEY; 368 dev_dbg(dev, "HW protected key %d/%d set\n.", 369 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); 370 break; 371 372 case CC_POLICY_PROTECTED_KEY: 373 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { 374 dev_err(dev, "CPP keys not supported in this hardware revision.\n"); 375 return -EINVAL; 376 } 377 378 if (ctx_p->cipher_mode != DRV_CIPHER_CBC && 379 ctx_p->cipher_mode != DRV_CIPHER_CTR) { 380 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n"); 381 return -EINVAL; 382 } 383 384 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); 385 if (ctx_p->flow_mode == S_DIN_to_AES) 386 ctx_p->cpp.alg = CC_CPP_AES; 387 else /* Must be SM4 since due to sethkey registration */ 388 ctx_p->cpp.alg = CC_CPP_SM4; 389 ctx_p->key_type = CC_POLICY_PROTECTED_KEY; 390 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n", 391 ctx_p->cpp.alg, ctx_p->cpp.slot); 392 break; 393 394 default: 395 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1); 396 return -EINVAL; 397 } 398 399 return 0; 400 } 401 402 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 403 unsigned int keylen) 404 { 405 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 406 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 407 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 408 struct cc_crypto_alg *cc_alg = 409 container_of(tfm->__crt_alg, struct cc_crypto_alg, 410 skcipher_alg.base); 411 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 412 413 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 414 ctx_p, crypto_tfm_alg_name(tfm), keylen); 415 dump_byte_array("key", key, keylen); 416 417 /* STAT_PHASE_0: Init and sanity checks */ 418 419 if (validate_keys_sizes(ctx_p, keylen)) { 420 dev_dbg(dev, "Invalid key size %d.\n", keylen); 421 return -EINVAL; 422 } 423 424 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 425 426 /* We only support 256 bit ESSIV-CBC-AES keys */ 427 if (keylen != AES_KEYSIZE_256) { 428 unsigned int flags = crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_MASK; 429 430 if (likely(ctx_p->fallback_tfm)) { 431 ctx_p->fallback_on = true; 432 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, 433 CRYPTO_TFM_REQ_MASK); 434 crypto_skcipher_clear_flags(ctx_p->fallback_tfm, flags); 435 return crypto_skcipher_setkey(ctx_p->fallback_tfm, key, keylen); 436 } 437 438 dev_dbg(dev, "Unsupported key size %d and no fallback.\n", keylen); 439 return -EINVAL; 440 } 441 442 /* Internal ESSIV key buffer is double sized */ 443 max_key_buf_size <<= 1; 444 } 445 446 ctx_p->fallback_on = false; 447 ctx_p->key_type = CC_UNPROTECTED_KEY; 448 449 /* 450 * Verify DES weak keys 451 * Note that we're dropping the expanded key since the 452 * HW does the expansion on its own. 453 */ 454 if (ctx_p->flow_mode == S_DIN_to_DES) { 455 if ((keylen == DES3_EDE_KEY_SIZE && 456 verify_skcipher_des3_key(sktfm, key)) || 457 verify_skcipher_des_key(sktfm, key)) { 458 dev_dbg(dev, "weak DES key"); 459 return -EINVAL; 460 } 461 } 462 463 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 464 xts_check_key(tfm, key, keylen)) { 465 dev_dbg(dev, "weak XTS key"); 466 return -EINVAL; 467 } 468 469 /* STAT_PHASE_1: Copy key to ctx */ 470 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 471 max_key_buf_size, DMA_TO_DEVICE); 472 473 memcpy(ctx_p->user.key, key, keylen); 474 475 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 476 /* sha256 for key2 - use sw implementation */ 477 int err; 478 479 err = crypto_shash_tfm_digest(ctx_p->shash_tfm, 480 ctx_p->user.key, keylen, 481 ctx_p->user.key + keylen); 482 if (err) { 483 dev_err(dev, "Failed to hash ESSIV key.\n"); 484 return err; 485 } 486 487 keylen <<= 1; 488 } 489 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 490 max_key_buf_size, DMA_TO_DEVICE); 491 ctx_p->keylen = keylen; 492 493 dev_dbg(dev, "return safely"); 494 return 0; 495 } 496 497 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) 498 { 499 switch (ctx_p->flow_mode) { 500 case S_DIN_to_AES: 501 return S_AES_to_DOUT; 502 case S_DIN_to_DES: 503 return S_DES_to_DOUT; 504 case S_DIN_to_SM4: 505 return S_SM4_to_DOUT; 506 default: 507 return ctx_p->flow_mode; 508 } 509 } 510 511 static void cc_setup_readiv_desc(struct crypto_tfm *tfm, 512 struct cipher_req_ctx *req_ctx, 513 unsigned int ivsize, struct cc_hw_desc desc[], 514 unsigned int *seq_size) 515 { 516 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 517 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 518 int cipher_mode = ctx_p->cipher_mode; 519 int flow_mode = cc_out_setup_mode(ctx_p); 520 int direction = req_ctx->gen_ctx.op_type; 521 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 522 523 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) 524 return; 525 526 switch (cipher_mode) { 527 case DRV_CIPHER_ECB: 528 break; 529 case DRV_CIPHER_CBC: 530 case DRV_CIPHER_CBC_CTS: 531 case DRV_CIPHER_CTR: 532 case DRV_CIPHER_OFB: 533 /* Read next IV */ 534 hw_desc_init(&desc[*seq_size]); 535 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1); 536 set_cipher_config0(&desc[*seq_size], direction); 537 set_flow_mode(&desc[*seq_size], flow_mode); 538 set_cipher_mode(&desc[*seq_size], cipher_mode); 539 if (cipher_mode == DRV_CIPHER_CTR || 540 cipher_mode == DRV_CIPHER_OFB) { 541 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); 542 } else { 543 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0); 544 } 545 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 546 (*seq_size)++; 547 break; 548 case DRV_CIPHER_XTS: 549 case DRV_CIPHER_ESSIV: 550 case DRV_CIPHER_BITLOCKER: 551 /* IV */ 552 hw_desc_init(&desc[*seq_size]); 553 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); 554 set_cipher_mode(&desc[*seq_size], cipher_mode); 555 set_cipher_config0(&desc[*seq_size], direction); 556 set_flow_mode(&desc[*seq_size], flow_mode); 557 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE, 558 NS_BIT, 1); 559 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 560 (*seq_size)++; 561 break; 562 default: 563 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 564 } 565 } 566 567 568 static void cc_setup_state_desc(struct crypto_tfm *tfm, 569 struct cipher_req_ctx *req_ctx, 570 unsigned int ivsize, unsigned int nbytes, 571 struct cc_hw_desc desc[], 572 unsigned int *seq_size) 573 { 574 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 575 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 576 int cipher_mode = ctx_p->cipher_mode; 577 int flow_mode = ctx_p->flow_mode; 578 int direction = req_ctx->gen_ctx.op_type; 579 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 580 581 switch (cipher_mode) { 582 case DRV_CIPHER_ECB: 583 break; 584 case DRV_CIPHER_CBC: 585 case DRV_CIPHER_CBC_CTS: 586 case DRV_CIPHER_CTR: 587 case DRV_CIPHER_OFB: 588 /* Load IV */ 589 hw_desc_init(&desc[*seq_size]); 590 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 591 NS_BIT); 592 set_cipher_config0(&desc[*seq_size], direction); 593 set_flow_mode(&desc[*seq_size], flow_mode); 594 set_cipher_mode(&desc[*seq_size], cipher_mode); 595 if (cipher_mode == DRV_CIPHER_CTR || 596 cipher_mode == DRV_CIPHER_OFB) { 597 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 598 } else { 599 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 600 } 601 (*seq_size)++; 602 break; 603 case DRV_CIPHER_XTS: 604 case DRV_CIPHER_ESSIV: 605 case DRV_CIPHER_BITLOCKER: 606 break; 607 default: 608 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 609 } 610 } 611 612 613 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm, 614 struct cipher_req_ctx *req_ctx, 615 unsigned int ivsize, unsigned int nbytes, 616 struct cc_hw_desc desc[], 617 unsigned int *seq_size) 618 { 619 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 620 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 621 int cipher_mode = ctx_p->cipher_mode; 622 int flow_mode = ctx_p->flow_mode; 623 int direction = req_ctx->gen_ctx.op_type; 624 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 625 unsigned int key_len = (ctx_p->keylen / 2); 626 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 627 unsigned int du_size = nbytes; 628 unsigned int key_offset = key_len; 629 630 struct cc_crypto_alg *cc_alg = 631 container_of(tfm->__crt_alg, struct cc_crypto_alg, 632 skcipher_alg.base); 633 634 if (cc_alg->data_unit) 635 du_size = cc_alg->data_unit; 636 637 switch (cipher_mode) { 638 case DRV_CIPHER_ECB: 639 break; 640 case DRV_CIPHER_CBC: 641 case DRV_CIPHER_CBC_CTS: 642 case DRV_CIPHER_CTR: 643 case DRV_CIPHER_OFB: 644 break; 645 case DRV_CIPHER_XTS: 646 case DRV_CIPHER_ESSIV: 647 case DRV_CIPHER_BITLOCKER: 648 649 if (cipher_mode == DRV_CIPHER_ESSIV) 650 key_len = SHA256_DIGEST_SIZE; 651 652 /* load XEX key */ 653 hw_desc_init(&desc[*seq_size]); 654 set_cipher_mode(&desc[*seq_size], cipher_mode); 655 set_cipher_config0(&desc[*seq_size], direction); 656 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 657 set_hw_crypto_key(&desc[*seq_size], 658 ctx_p->hw.key2_slot); 659 } else { 660 set_din_type(&desc[*seq_size], DMA_DLLI, 661 (key_dma_addr + key_offset), 662 key_len, NS_BIT); 663 } 664 set_xex_data_unit_size(&desc[*seq_size], du_size); 665 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 666 set_key_size_aes(&desc[*seq_size], key_len); 667 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 668 (*seq_size)++; 669 670 /* Load IV */ 671 hw_desc_init(&desc[*seq_size]); 672 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 673 set_cipher_mode(&desc[*seq_size], cipher_mode); 674 set_cipher_config0(&desc[*seq_size], direction); 675 set_key_size_aes(&desc[*seq_size], key_len); 676 set_flow_mode(&desc[*seq_size], flow_mode); 677 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 678 CC_AES_BLOCK_SIZE, NS_BIT); 679 (*seq_size)++; 680 break; 681 default: 682 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 683 } 684 } 685 686 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) 687 { 688 switch (ctx_p->flow_mode) { 689 case S_DIN_to_AES: 690 return DIN_AES_DOUT; 691 case S_DIN_to_DES: 692 return DIN_DES_DOUT; 693 case S_DIN_to_SM4: 694 return DIN_SM4_DOUT; 695 default: 696 return ctx_p->flow_mode; 697 } 698 } 699 700 static void cc_setup_key_desc(struct crypto_tfm *tfm, 701 struct cipher_req_ctx *req_ctx, 702 unsigned int nbytes, struct cc_hw_desc desc[], 703 unsigned int *seq_size) 704 { 705 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 706 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 707 int cipher_mode = ctx_p->cipher_mode; 708 int flow_mode = ctx_p->flow_mode; 709 int direction = req_ctx->gen_ctx.op_type; 710 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 711 unsigned int key_len = ctx_p->keylen; 712 unsigned int din_size; 713 714 switch (cipher_mode) { 715 case DRV_CIPHER_CBC: 716 case DRV_CIPHER_CBC_CTS: 717 case DRV_CIPHER_CTR: 718 case DRV_CIPHER_OFB: 719 case DRV_CIPHER_ECB: 720 /* Load key */ 721 hw_desc_init(&desc[*seq_size]); 722 set_cipher_mode(&desc[*seq_size], cipher_mode); 723 set_cipher_config0(&desc[*seq_size], direction); 724 725 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) { 726 /* We use the AES key size coding for all CPP algs */ 727 set_key_size_aes(&desc[*seq_size], key_len); 728 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); 729 flow_mode = cc_out_flow_mode(ctx_p); 730 } else { 731 if (flow_mode == S_DIN_to_AES) { 732 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 733 set_hw_crypto_key(&desc[*seq_size], 734 ctx_p->hw.key1_slot); 735 } else { 736 /* CC_POLICY_UNPROTECTED_KEY 737 * Invalid keys are filtered out in 738 * sethkey() 739 */ 740 din_size = (key_len == 24) ? 741 AES_MAX_KEY_SIZE : key_len; 742 743 set_din_type(&desc[*seq_size], DMA_DLLI, 744 key_dma_addr, din_size, 745 NS_BIT); 746 } 747 set_key_size_aes(&desc[*seq_size], key_len); 748 } else { 749 /*des*/ 750 set_din_type(&desc[*seq_size], DMA_DLLI, 751 key_dma_addr, key_len, NS_BIT); 752 set_key_size_des(&desc[*seq_size], key_len); 753 } 754 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 755 } 756 set_flow_mode(&desc[*seq_size], flow_mode); 757 (*seq_size)++; 758 break; 759 case DRV_CIPHER_XTS: 760 case DRV_CIPHER_ESSIV: 761 case DRV_CIPHER_BITLOCKER: 762 /* Load AES key */ 763 hw_desc_init(&desc[*seq_size]); 764 set_cipher_mode(&desc[*seq_size], cipher_mode); 765 set_cipher_config0(&desc[*seq_size], direction); 766 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 767 set_hw_crypto_key(&desc[*seq_size], 768 ctx_p->hw.key1_slot); 769 } else { 770 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 771 (key_len / 2), NS_BIT); 772 } 773 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 774 set_flow_mode(&desc[*seq_size], flow_mode); 775 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 776 (*seq_size)++; 777 break; 778 default: 779 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 780 } 781 } 782 783 static void cc_setup_mlli_desc(struct crypto_tfm *tfm, 784 struct cipher_req_ctx *req_ctx, 785 struct scatterlist *dst, struct scatterlist *src, 786 unsigned int nbytes, void *areq, 787 struct cc_hw_desc desc[], unsigned int *seq_size) 788 { 789 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 790 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 791 792 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { 793 /* bypass */ 794 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 795 &req_ctx->mlli_params.mlli_dma_addr, 796 req_ctx->mlli_params.mlli_len, 797 ctx_p->drvdata->mlli_sram_addr); 798 hw_desc_init(&desc[*seq_size]); 799 set_din_type(&desc[*seq_size], DMA_DLLI, 800 req_ctx->mlli_params.mlli_dma_addr, 801 req_ctx->mlli_params.mlli_len, NS_BIT); 802 set_dout_sram(&desc[*seq_size], 803 ctx_p->drvdata->mlli_sram_addr, 804 req_ctx->mlli_params.mlli_len); 805 set_flow_mode(&desc[*seq_size], BYPASS); 806 (*seq_size)++; 807 } 808 } 809 810 static void cc_setup_flow_desc(struct crypto_tfm *tfm, 811 struct cipher_req_ctx *req_ctx, 812 struct scatterlist *dst, struct scatterlist *src, 813 unsigned int nbytes, struct cc_hw_desc desc[], 814 unsigned int *seq_size) 815 { 816 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 817 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 818 unsigned int flow_mode = cc_out_flow_mode(ctx_p); 819 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || 820 ctx_p->cipher_mode == DRV_CIPHER_ECB); 821 822 /* Process */ 823 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 824 dev_dbg(dev, " data params addr %pad length 0x%X\n", 825 &sg_dma_address(src), nbytes); 826 dev_dbg(dev, " data params addr %pad length 0x%X\n", 827 &sg_dma_address(dst), nbytes); 828 hw_desc_init(&desc[*seq_size]); 829 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 830 nbytes, NS_BIT); 831 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 832 nbytes, NS_BIT, (!last_desc ? 0 : 1)); 833 if (last_desc) 834 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 835 836 set_flow_mode(&desc[*seq_size], flow_mode); 837 (*seq_size)++; 838 } else { 839 hw_desc_init(&desc[*seq_size]); 840 set_din_type(&desc[*seq_size], DMA_MLLI, 841 ctx_p->drvdata->mlli_sram_addr, 842 req_ctx->in_mlli_nents, NS_BIT); 843 if (req_ctx->out_nents == 0) { 844 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 845 ctx_p->drvdata->mlli_sram_addr, 846 ctx_p->drvdata->mlli_sram_addr); 847 set_dout_mlli(&desc[*seq_size], 848 ctx_p->drvdata->mlli_sram_addr, 849 req_ctx->in_mlli_nents, NS_BIT, 850 (!last_desc ? 0 : 1)); 851 } else { 852 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 853 ctx_p->drvdata->mlli_sram_addr, 854 ctx_p->drvdata->mlli_sram_addr + 855 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 856 set_dout_mlli(&desc[*seq_size], 857 (ctx_p->drvdata->mlli_sram_addr + 858 (LLI_ENTRY_BYTE_SIZE * 859 req_ctx->in_mlli_nents)), 860 req_ctx->out_mlli_nents, NS_BIT, 861 (!last_desc ? 0 : 1)); 862 } 863 if (last_desc) 864 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 865 866 set_flow_mode(&desc[*seq_size], flow_mode); 867 (*seq_size)++; 868 } 869 } 870 871 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 872 { 873 struct skcipher_request *req = (struct skcipher_request *)cc_req; 874 struct scatterlist *dst = req->dst; 875 struct scatterlist *src = req->src; 876 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 877 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 878 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 879 880 if (err != -EINPROGRESS) { 881 /* Not a BACKLOG notification */ 882 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 883 memcpy(req->iv, req_ctx->iv, ivsize); 884 kfree_sensitive(req_ctx->iv); 885 } 886 887 skcipher_request_complete(req, err); 888 } 889 890 static int cc_cipher_process(struct skcipher_request *req, 891 enum drv_crypto_direction direction) 892 { 893 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 894 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 895 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 896 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 897 struct scatterlist *dst = req->dst; 898 struct scatterlist *src = req->src; 899 unsigned int nbytes = req->cryptlen; 900 void *iv = req->iv; 901 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 902 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 903 struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN]; 904 struct cc_crypto_req cc_req = {}; 905 int rc; 906 unsigned int seq_len = 0; 907 gfp_t flags = cc_gfp_flags(&req->base); 908 909 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 910 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 911 "Encrypt" : "Decrypt"), req, iv, nbytes); 912 913 /* STAT_PHASE_0: Init and sanity checks */ 914 915 if (validate_data_size(ctx_p, nbytes)) { 916 dev_dbg(dev, "Unsupported data size %d.\n", nbytes); 917 rc = -EINVAL; 918 goto exit_process; 919 } 920 if (nbytes == 0) { 921 /* No data to process is valid */ 922 rc = 0; 923 goto exit_process; 924 } 925 926 if (ctx_p->fallback_on) { 927 struct skcipher_request *subreq = skcipher_request_ctx(req); 928 929 *subreq = *req; 930 skcipher_request_set_tfm(subreq, ctx_p->fallback_tfm); 931 if (direction == DRV_CRYPTO_DIRECTION_ENCRYPT) 932 return crypto_skcipher_encrypt(subreq); 933 else 934 return crypto_skcipher_decrypt(subreq); 935 } 936 937 /* The IV we are handed may be allocted from the stack so 938 * we must copy it to a DMAable buffer before use. 939 */ 940 req_ctx->iv = kmemdup(iv, ivsize, flags); 941 if (!req_ctx->iv) { 942 rc = -ENOMEM; 943 goto exit_process; 944 } 945 946 /* Setup request structure */ 947 cc_req.user_cb = cc_cipher_complete; 948 cc_req.user_arg = req; 949 950 /* Setup CPP operation details */ 951 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { 952 cc_req.cpp.is_cpp = true; 953 cc_req.cpp.alg = ctx_p->cpp.alg; 954 cc_req.cpp.slot = ctx_p->cpp.slot; 955 } 956 957 /* Setup request context */ 958 req_ctx->gen_ctx.op_type = direction; 959 960 /* STAT_PHASE_1: Map buffers */ 961 962 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 963 req_ctx->iv, src, dst, flags); 964 if (rc) { 965 dev_err(dev, "map_request() failed\n"); 966 goto exit_process; 967 } 968 969 /* STAT_PHASE_2: Create sequence */ 970 971 /* Setup state (IV) */ 972 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 973 /* Setup MLLI line, if needed */ 974 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len); 975 /* Setup key */ 976 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len); 977 /* Setup state (IV and XEX key) */ 978 cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 979 /* Data processing */ 980 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len); 981 /* Read next IV */ 982 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len); 983 984 /* STAT_PHASE_3: Lock HW and push sequence */ 985 986 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 987 &req->base); 988 if (rc != -EINPROGRESS && rc != -EBUSY) { 989 /* Failed to send the request or request completed 990 * synchronously 991 */ 992 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 993 } 994 995 exit_process: 996 if (rc != -EINPROGRESS && rc != -EBUSY) { 997 kfree_sensitive(req_ctx->iv); 998 } 999 1000 return rc; 1001 } 1002 1003 static int cc_cipher_encrypt(struct skcipher_request *req) 1004 { 1005 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 1006 1007 memset(req_ctx, 0, sizeof(*req_ctx)); 1008 1009 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 1010 } 1011 1012 static int cc_cipher_decrypt(struct skcipher_request *req) 1013 { 1014 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 1015 1016 memset(req_ctx, 0, sizeof(*req_ctx)); 1017 1018 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 1019 } 1020 1021 /* Block cipher alg */ 1022 static const struct cc_alg_template skcipher_algs[] = { 1023 { 1024 .name = "xts(paes)", 1025 .driver_name = "xts-paes-ccree", 1026 .blocksize = 1, 1027 .template_skcipher = { 1028 .setkey = cc_cipher_sethkey, 1029 .encrypt = cc_cipher_encrypt, 1030 .decrypt = cc_cipher_decrypt, 1031 .min_keysize = CC_HW_KEY_SIZE, 1032 .max_keysize = CC_HW_KEY_SIZE, 1033 .ivsize = AES_BLOCK_SIZE, 1034 }, 1035 .cipher_mode = DRV_CIPHER_XTS, 1036 .flow_mode = S_DIN_to_AES, 1037 .min_hw_rev = CC_HW_REV_630, 1038 .std_body = CC_STD_NIST, 1039 .sec_func = true, 1040 }, 1041 { 1042 .name = "xts512(paes)", 1043 .driver_name = "xts-paes-du512-ccree", 1044 .blocksize = 1, 1045 .template_skcipher = { 1046 .setkey = cc_cipher_sethkey, 1047 .encrypt = cc_cipher_encrypt, 1048 .decrypt = cc_cipher_decrypt, 1049 .min_keysize = CC_HW_KEY_SIZE, 1050 .max_keysize = CC_HW_KEY_SIZE, 1051 .ivsize = AES_BLOCK_SIZE, 1052 }, 1053 .cipher_mode = DRV_CIPHER_XTS, 1054 .flow_mode = S_DIN_to_AES, 1055 .data_unit = 512, 1056 .min_hw_rev = CC_HW_REV_712, 1057 .std_body = CC_STD_NIST, 1058 .sec_func = true, 1059 }, 1060 { 1061 .name = "xts4096(paes)", 1062 .driver_name = "xts-paes-du4096-ccree", 1063 .blocksize = 1, 1064 .template_skcipher = { 1065 .setkey = cc_cipher_sethkey, 1066 .encrypt = cc_cipher_encrypt, 1067 .decrypt = cc_cipher_decrypt, 1068 .min_keysize = CC_HW_KEY_SIZE, 1069 .max_keysize = CC_HW_KEY_SIZE, 1070 .ivsize = AES_BLOCK_SIZE, 1071 }, 1072 .cipher_mode = DRV_CIPHER_XTS, 1073 .flow_mode = S_DIN_to_AES, 1074 .data_unit = 4096, 1075 .min_hw_rev = CC_HW_REV_712, 1076 .std_body = CC_STD_NIST, 1077 .sec_func = true, 1078 }, 1079 { 1080 .name = "essiv(cbc(paes),sha256)", 1081 .driver_name = "essiv-paes-ccree", 1082 .blocksize = AES_BLOCK_SIZE, 1083 .template_skcipher = { 1084 .setkey = cc_cipher_sethkey, 1085 .encrypt = cc_cipher_encrypt, 1086 .decrypt = cc_cipher_decrypt, 1087 .min_keysize = CC_HW_KEY_SIZE, 1088 .max_keysize = CC_HW_KEY_SIZE, 1089 .ivsize = AES_BLOCK_SIZE, 1090 }, 1091 .cipher_mode = DRV_CIPHER_ESSIV, 1092 .flow_mode = S_DIN_to_AES, 1093 .min_hw_rev = CC_HW_REV_712, 1094 .std_body = CC_STD_NIST, 1095 .sec_func = true, 1096 }, 1097 { 1098 .name = "essiv512(cbc(paes),sha256)", 1099 .driver_name = "essiv-paes-du512-ccree", 1100 .blocksize = AES_BLOCK_SIZE, 1101 .template_skcipher = { 1102 .setkey = cc_cipher_sethkey, 1103 .encrypt = cc_cipher_encrypt, 1104 .decrypt = cc_cipher_decrypt, 1105 .min_keysize = CC_HW_KEY_SIZE, 1106 .max_keysize = CC_HW_KEY_SIZE, 1107 .ivsize = AES_BLOCK_SIZE, 1108 }, 1109 .cipher_mode = DRV_CIPHER_ESSIV, 1110 .flow_mode = S_DIN_to_AES, 1111 .data_unit = 512, 1112 .min_hw_rev = CC_HW_REV_712, 1113 .std_body = CC_STD_NIST, 1114 .sec_func = true, 1115 }, 1116 { 1117 .name = "essiv4096(cbc(paes),sha256)", 1118 .driver_name = "essiv-paes-du4096-ccree", 1119 .blocksize = AES_BLOCK_SIZE, 1120 .template_skcipher = { 1121 .setkey = cc_cipher_sethkey, 1122 .encrypt = cc_cipher_encrypt, 1123 .decrypt = cc_cipher_decrypt, 1124 .min_keysize = CC_HW_KEY_SIZE, 1125 .max_keysize = CC_HW_KEY_SIZE, 1126 .ivsize = AES_BLOCK_SIZE, 1127 }, 1128 .cipher_mode = DRV_CIPHER_ESSIV, 1129 .flow_mode = S_DIN_to_AES, 1130 .data_unit = 4096, 1131 .min_hw_rev = CC_HW_REV_712, 1132 .std_body = CC_STD_NIST, 1133 .sec_func = true, 1134 }, 1135 { 1136 .name = "bitlocker(paes)", 1137 .driver_name = "bitlocker-paes-ccree", 1138 .blocksize = AES_BLOCK_SIZE, 1139 .template_skcipher = { 1140 .setkey = cc_cipher_sethkey, 1141 .encrypt = cc_cipher_encrypt, 1142 .decrypt = cc_cipher_decrypt, 1143 .min_keysize = CC_HW_KEY_SIZE, 1144 .max_keysize = CC_HW_KEY_SIZE, 1145 .ivsize = AES_BLOCK_SIZE, 1146 }, 1147 .cipher_mode = DRV_CIPHER_BITLOCKER, 1148 .flow_mode = S_DIN_to_AES, 1149 .min_hw_rev = CC_HW_REV_712, 1150 .std_body = CC_STD_NIST, 1151 .sec_func = true, 1152 }, 1153 { 1154 .name = "bitlocker512(paes)", 1155 .driver_name = "bitlocker-paes-du512-ccree", 1156 .blocksize = AES_BLOCK_SIZE, 1157 .template_skcipher = { 1158 .setkey = cc_cipher_sethkey, 1159 .encrypt = cc_cipher_encrypt, 1160 .decrypt = cc_cipher_decrypt, 1161 .min_keysize = CC_HW_KEY_SIZE, 1162 .max_keysize = CC_HW_KEY_SIZE, 1163 .ivsize = AES_BLOCK_SIZE, 1164 }, 1165 .cipher_mode = DRV_CIPHER_BITLOCKER, 1166 .flow_mode = S_DIN_to_AES, 1167 .data_unit = 512, 1168 .min_hw_rev = CC_HW_REV_712, 1169 .std_body = CC_STD_NIST, 1170 .sec_func = true, 1171 }, 1172 { 1173 .name = "bitlocker4096(paes)", 1174 .driver_name = "bitlocker-paes-du4096-ccree", 1175 .blocksize = AES_BLOCK_SIZE, 1176 .template_skcipher = { 1177 .setkey = cc_cipher_sethkey, 1178 .encrypt = cc_cipher_encrypt, 1179 .decrypt = cc_cipher_decrypt, 1180 .min_keysize = CC_HW_KEY_SIZE, 1181 .max_keysize = CC_HW_KEY_SIZE, 1182 .ivsize = AES_BLOCK_SIZE, 1183 }, 1184 .cipher_mode = DRV_CIPHER_BITLOCKER, 1185 .flow_mode = S_DIN_to_AES, 1186 .data_unit = 4096, 1187 .min_hw_rev = CC_HW_REV_712, 1188 .std_body = CC_STD_NIST, 1189 .sec_func = true, 1190 }, 1191 { 1192 .name = "ecb(paes)", 1193 .driver_name = "ecb-paes-ccree", 1194 .blocksize = AES_BLOCK_SIZE, 1195 .template_skcipher = { 1196 .setkey = cc_cipher_sethkey, 1197 .encrypt = cc_cipher_encrypt, 1198 .decrypt = cc_cipher_decrypt, 1199 .min_keysize = CC_HW_KEY_SIZE, 1200 .max_keysize = CC_HW_KEY_SIZE, 1201 .ivsize = 0, 1202 }, 1203 .cipher_mode = DRV_CIPHER_ECB, 1204 .flow_mode = S_DIN_to_AES, 1205 .min_hw_rev = CC_HW_REV_712, 1206 .std_body = CC_STD_NIST, 1207 .sec_func = true, 1208 }, 1209 { 1210 .name = "cbc(paes)", 1211 .driver_name = "cbc-paes-ccree", 1212 .blocksize = AES_BLOCK_SIZE, 1213 .template_skcipher = { 1214 .setkey = cc_cipher_sethkey, 1215 .encrypt = cc_cipher_encrypt, 1216 .decrypt = cc_cipher_decrypt, 1217 .min_keysize = CC_HW_KEY_SIZE, 1218 .max_keysize = CC_HW_KEY_SIZE, 1219 .ivsize = AES_BLOCK_SIZE, 1220 }, 1221 .cipher_mode = DRV_CIPHER_CBC, 1222 .flow_mode = S_DIN_to_AES, 1223 .min_hw_rev = CC_HW_REV_712, 1224 .std_body = CC_STD_NIST, 1225 .sec_func = true, 1226 }, 1227 { 1228 .name = "ofb(paes)", 1229 .driver_name = "ofb-paes-ccree", 1230 .blocksize = AES_BLOCK_SIZE, 1231 .template_skcipher = { 1232 .setkey = cc_cipher_sethkey, 1233 .encrypt = cc_cipher_encrypt, 1234 .decrypt = cc_cipher_decrypt, 1235 .min_keysize = CC_HW_KEY_SIZE, 1236 .max_keysize = CC_HW_KEY_SIZE, 1237 .ivsize = AES_BLOCK_SIZE, 1238 }, 1239 .cipher_mode = DRV_CIPHER_OFB, 1240 .flow_mode = S_DIN_to_AES, 1241 .min_hw_rev = CC_HW_REV_712, 1242 .std_body = CC_STD_NIST, 1243 .sec_func = true, 1244 }, 1245 { 1246 .name = "cts(cbc(paes))", 1247 .driver_name = "cts-cbc-paes-ccree", 1248 .blocksize = AES_BLOCK_SIZE, 1249 .template_skcipher = { 1250 .setkey = cc_cipher_sethkey, 1251 .encrypt = cc_cipher_encrypt, 1252 .decrypt = cc_cipher_decrypt, 1253 .min_keysize = CC_HW_KEY_SIZE, 1254 .max_keysize = CC_HW_KEY_SIZE, 1255 .ivsize = AES_BLOCK_SIZE, 1256 }, 1257 .cipher_mode = DRV_CIPHER_CBC_CTS, 1258 .flow_mode = S_DIN_to_AES, 1259 .min_hw_rev = CC_HW_REV_712, 1260 .std_body = CC_STD_NIST, 1261 .sec_func = true, 1262 }, 1263 { 1264 .name = "ctr(paes)", 1265 .driver_name = "ctr-paes-ccree", 1266 .blocksize = 1, 1267 .template_skcipher = { 1268 .setkey = cc_cipher_sethkey, 1269 .encrypt = cc_cipher_encrypt, 1270 .decrypt = cc_cipher_decrypt, 1271 .min_keysize = CC_HW_KEY_SIZE, 1272 .max_keysize = CC_HW_KEY_SIZE, 1273 .ivsize = AES_BLOCK_SIZE, 1274 }, 1275 .cipher_mode = DRV_CIPHER_CTR, 1276 .flow_mode = S_DIN_to_AES, 1277 .min_hw_rev = CC_HW_REV_712, 1278 .std_body = CC_STD_NIST, 1279 .sec_func = true, 1280 }, 1281 { 1282 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html 1283 * for the reason why this differs from the generic 1284 * implementation. 1285 */ 1286 .name = "xts(aes)", 1287 .driver_name = "xts-aes-ccree", 1288 .blocksize = 1, 1289 .template_skcipher = { 1290 .setkey = cc_cipher_setkey, 1291 .encrypt = cc_cipher_encrypt, 1292 .decrypt = cc_cipher_decrypt, 1293 .min_keysize = AES_MIN_KEY_SIZE * 2, 1294 .max_keysize = AES_MAX_KEY_SIZE * 2, 1295 .ivsize = AES_BLOCK_SIZE, 1296 }, 1297 .cipher_mode = DRV_CIPHER_XTS, 1298 .flow_mode = S_DIN_to_AES, 1299 .min_hw_rev = CC_HW_REV_630, 1300 .std_body = CC_STD_NIST, 1301 }, 1302 { 1303 .name = "xts512(aes)", 1304 .driver_name = "xts-aes-du512-ccree", 1305 .blocksize = 1, 1306 .template_skcipher = { 1307 .setkey = cc_cipher_setkey, 1308 .encrypt = cc_cipher_encrypt, 1309 .decrypt = cc_cipher_decrypt, 1310 .min_keysize = AES_MIN_KEY_SIZE * 2, 1311 .max_keysize = AES_MAX_KEY_SIZE * 2, 1312 .ivsize = AES_BLOCK_SIZE, 1313 }, 1314 .cipher_mode = DRV_CIPHER_XTS, 1315 .flow_mode = S_DIN_to_AES, 1316 .data_unit = 512, 1317 .min_hw_rev = CC_HW_REV_712, 1318 .std_body = CC_STD_NIST, 1319 }, 1320 { 1321 .name = "xts4096(aes)", 1322 .driver_name = "xts-aes-du4096-ccree", 1323 .blocksize = 1, 1324 .template_skcipher = { 1325 .setkey = cc_cipher_setkey, 1326 .encrypt = cc_cipher_encrypt, 1327 .decrypt = cc_cipher_decrypt, 1328 .min_keysize = AES_MIN_KEY_SIZE * 2, 1329 .max_keysize = AES_MAX_KEY_SIZE * 2, 1330 .ivsize = AES_BLOCK_SIZE, 1331 }, 1332 .cipher_mode = DRV_CIPHER_XTS, 1333 .flow_mode = S_DIN_to_AES, 1334 .data_unit = 4096, 1335 .min_hw_rev = CC_HW_REV_712, 1336 .std_body = CC_STD_NIST, 1337 }, 1338 { 1339 .name = "essiv(cbc(aes),sha256)", 1340 .driver_name = "essiv-aes-ccree", 1341 .blocksize = AES_BLOCK_SIZE, 1342 .template_skcipher = { 1343 .setkey = cc_cipher_setkey, 1344 .encrypt = cc_cipher_encrypt, 1345 .decrypt = cc_cipher_decrypt, 1346 .min_keysize = AES_MIN_KEY_SIZE, 1347 .max_keysize = AES_MAX_KEY_SIZE, 1348 .ivsize = AES_BLOCK_SIZE, 1349 }, 1350 .cipher_mode = DRV_CIPHER_ESSIV, 1351 .flow_mode = S_DIN_to_AES, 1352 .min_hw_rev = CC_HW_REV_712, 1353 .std_body = CC_STD_NIST, 1354 }, 1355 { 1356 .name = "essiv512(cbc(aes),sha256)", 1357 .driver_name = "essiv-aes-du512-ccree", 1358 .blocksize = AES_BLOCK_SIZE, 1359 .template_skcipher = { 1360 .setkey = cc_cipher_setkey, 1361 .encrypt = cc_cipher_encrypt, 1362 .decrypt = cc_cipher_decrypt, 1363 .min_keysize = AES_MIN_KEY_SIZE, 1364 .max_keysize = AES_MAX_KEY_SIZE, 1365 .ivsize = AES_BLOCK_SIZE, 1366 }, 1367 .cipher_mode = DRV_CIPHER_ESSIV, 1368 .flow_mode = S_DIN_to_AES, 1369 .data_unit = 512, 1370 .min_hw_rev = CC_HW_REV_712, 1371 .std_body = CC_STD_NIST, 1372 }, 1373 { 1374 .name = "essiv4096(cbc(aes),sha256)", 1375 .driver_name = "essiv-aes-du4096-ccree", 1376 .blocksize = AES_BLOCK_SIZE, 1377 .template_skcipher = { 1378 .setkey = cc_cipher_setkey, 1379 .encrypt = cc_cipher_encrypt, 1380 .decrypt = cc_cipher_decrypt, 1381 .min_keysize = AES_MIN_KEY_SIZE, 1382 .max_keysize = AES_MAX_KEY_SIZE, 1383 .ivsize = AES_BLOCK_SIZE, 1384 }, 1385 .cipher_mode = DRV_CIPHER_ESSIV, 1386 .flow_mode = S_DIN_to_AES, 1387 .data_unit = 4096, 1388 .min_hw_rev = CC_HW_REV_712, 1389 .std_body = CC_STD_NIST, 1390 }, 1391 { 1392 .name = "bitlocker(aes)", 1393 .driver_name = "bitlocker-aes-ccree", 1394 .blocksize = AES_BLOCK_SIZE, 1395 .template_skcipher = { 1396 .setkey = cc_cipher_setkey, 1397 .encrypt = cc_cipher_encrypt, 1398 .decrypt = cc_cipher_decrypt, 1399 .min_keysize = AES_MIN_KEY_SIZE * 2, 1400 .max_keysize = AES_MAX_KEY_SIZE * 2, 1401 .ivsize = AES_BLOCK_SIZE, 1402 }, 1403 .cipher_mode = DRV_CIPHER_BITLOCKER, 1404 .flow_mode = S_DIN_to_AES, 1405 .min_hw_rev = CC_HW_REV_712, 1406 .std_body = CC_STD_NIST, 1407 }, 1408 { 1409 .name = "bitlocker512(aes)", 1410 .driver_name = "bitlocker-aes-du512-ccree", 1411 .blocksize = AES_BLOCK_SIZE, 1412 .template_skcipher = { 1413 .setkey = cc_cipher_setkey, 1414 .encrypt = cc_cipher_encrypt, 1415 .decrypt = cc_cipher_decrypt, 1416 .min_keysize = AES_MIN_KEY_SIZE * 2, 1417 .max_keysize = AES_MAX_KEY_SIZE * 2, 1418 .ivsize = AES_BLOCK_SIZE, 1419 }, 1420 .cipher_mode = DRV_CIPHER_BITLOCKER, 1421 .flow_mode = S_DIN_to_AES, 1422 .data_unit = 512, 1423 .min_hw_rev = CC_HW_REV_712, 1424 .std_body = CC_STD_NIST, 1425 }, 1426 { 1427 .name = "bitlocker4096(aes)", 1428 .driver_name = "bitlocker-aes-du4096-ccree", 1429 .blocksize = AES_BLOCK_SIZE, 1430 .template_skcipher = { 1431 .setkey = cc_cipher_setkey, 1432 .encrypt = cc_cipher_encrypt, 1433 .decrypt = cc_cipher_decrypt, 1434 .min_keysize = AES_MIN_KEY_SIZE * 2, 1435 .max_keysize = AES_MAX_KEY_SIZE * 2, 1436 .ivsize = AES_BLOCK_SIZE, 1437 }, 1438 .cipher_mode = DRV_CIPHER_BITLOCKER, 1439 .flow_mode = S_DIN_to_AES, 1440 .data_unit = 4096, 1441 .min_hw_rev = CC_HW_REV_712, 1442 .std_body = CC_STD_NIST, 1443 }, 1444 { 1445 .name = "ecb(aes)", 1446 .driver_name = "ecb-aes-ccree", 1447 .blocksize = AES_BLOCK_SIZE, 1448 .template_skcipher = { 1449 .setkey = cc_cipher_setkey, 1450 .encrypt = cc_cipher_encrypt, 1451 .decrypt = cc_cipher_decrypt, 1452 .min_keysize = AES_MIN_KEY_SIZE, 1453 .max_keysize = AES_MAX_KEY_SIZE, 1454 .ivsize = 0, 1455 }, 1456 .cipher_mode = DRV_CIPHER_ECB, 1457 .flow_mode = S_DIN_to_AES, 1458 .min_hw_rev = CC_HW_REV_630, 1459 .std_body = CC_STD_NIST, 1460 }, 1461 { 1462 .name = "cbc(aes)", 1463 .driver_name = "cbc-aes-ccree", 1464 .blocksize = AES_BLOCK_SIZE, 1465 .template_skcipher = { 1466 .setkey = cc_cipher_setkey, 1467 .encrypt = cc_cipher_encrypt, 1468 .decrypt = cc_cipher_decrypt, 1469 .min_keysize = AES_MIN_KEY_SIZE, 1470 .max_keysize = AES_MAX_KEY_SIZE, 1471 .ivsize = AES_BLOCK_SIZE, 1472 }, 1473 .cipher_mode = DRV_CIPHER_CBC, 1474 .flow_mode = S_DIN_to_AES, 1475 .min_hw_rev = CC_HW_REV_630, 1476 .std_body = CC_STD_NIST, 1477 }, 1478 { 1479 .name = "ofb(aes)", 1480 .driver_name = "ofb-aes-ccree", 1481 .blocksize = 1, 1482 .template_skcipher = { 1483 .setkey = cc_cipher_setkey, 1484 .encrypt = cc_cipher_encrypt, 1485 .decrypt = cc_cipher_decrypt, 1486 .min_keysize = AES_MIN_KEY_SIZE, 1487 .max_keysize = AES_MAX_KEY_SIZE, 1488 .ivsize = AES_BLOCK_SIZE, 1489 }, 1490 .cipher_mode = DRV_CIPHER_OFB, 1491 .flow_mode = S_DIN_to_AES, 1492 .min_hw_rev = CC_HW_REV_630, 1493 .std_body = CC_STD_NIST, 1494 }, 1495 { 1496 .name = "cts(cbc(aes))", 1497 .driver_name = "cts-cbc-aes-ccree", 1498 .blocksize = AES_BLOCK_SIZE, 1499 .template_skcipher = { 1500 .setkey = cc_cipher_setkey, 1501 .encrypt = cc_cipher_encrypt, 1502 .decrypt = cc_cipher_decrypt, 1503 .min_keysize = AES_MIN_KEY_SIZE, 1504 .max_keysize = AES_MAX_KEY_SIZE, 1505 .ivsize = AES_BLOCK_SIZE, 1506 }, 1507 .cipher_mode = DRV_CIPHER_CBC_CTS, 1508 .flow_mode = S_DIN_to_AES, 1509 .min_hw_rev = CC_HW_REV_630, 1510 .std_body = CC_STD_NIST, 1511 }, 1512 { 1513 .name = "ctr(aes)", 1514 .driver_name = "ctr-aes-ccree", 1515 .blocksize = 1, 1516 .template_skcipher = { 1517 .setkey = cc_cipher_setkey, 1518 .encrypt = cc_cipher_encrypt, 1519 .decrypt = cc_cipher_decrypt, 1520 .min_keysize = AES_MIN_KEY_SIZE, 1521 .max_keysize = AES_MAX_KEY_SIZE, 1522 .ivsize = AES_BLOCK_SIZE, 1523 }, 1524 .cipher_mode = DRV_CIPHER_CTR, 1525 .flow_mode = S_DIN_to_AES, 1526 .min_hw_rev = CC_HW_REV_630, 1527 .std_body = CC_STD_NIST, 1528 }, 1529 { 1530 .name = "cbc(des3_ede)", 1531 .driver_name = "cbc-3des-ccree", 1532 .blocksize = DES3_EDE_BLOCK_SIZE, 1533 .template_skcipher = { 1534 .setkey = cc_cipher_setkey, 1535 .encrypt = cc_cipher_encrypt, 1536 .decrypt = cc_cipher_decrypt, 1537 .min_keysize = DES3_EDE_KEY_SIZE, 1538 .max_keysize = DES3_EDE_KEY_SIZE, 1539 .ivsize = DES3_EDE_BLOCK_SIZE, 1540 }, 1541 .cipher_mode = DRV_CIPHER_CBC, 1542 .flow_mode = S_DIN_to_DES, 1543 .min_hw_rev = CC_HW_REV_630, 1544 .std_body = CC_STD_NIST, 1545 }, 1546 { 1547 .name = "ecb(des3_ede)", 1548 .driver_name = "ecb-3des-ccree", 1549 .blocksize = DES3_EDE_BLOCK_SIZE, 1550 .template_skcipher = { 1551 .setkey = cc_cipher_setkey, 1552 .encrypt = cc_cipher_encrypt, 1553 .decrypt = cc_cipher_decrypt, 1554 .min_keysize = DES3_EDE_KEY_SIZE, 1555 .max_keysize = DES3_EDE_KEY_SIZE, 1556 .ivsize = 0, 1557 }, 1558 .cipher_mode = DRV_CIPHER_ECB, 1559 .flow_mode = S_DIN_to_DES, 1560 .min_hw_rev = CC_HW_REV_630, 1561 .std_body = CC_STD_NIST, 1562 }, 1563 { 1564 .name = "cbc(des)", 1565 .driver_name = "cbc-des-ccree", 1566 .blocksize = DES_BLOCK_SIZE, 1567 .template_skcipher = { 1568 .setkey = cc_cipher_setkey, 1569 .encrypt = cc_cipher_encrypt, 1570 .decrypt = cc_cipher_decrypt, 1571 .min_keysize = DES_KEY_SIZE, 1572 .max_keysize = DES_KEY_SIZE, 1573 .ivsize = DES_BLOCK_SIZE, 1574 }, 1575 .cipher_mode = DRV_CIPHER_CBC, 1576 .flow_mode = S_DIN_to_DES, 1577 .min_hw_rev = CC_HW_REV_630, 1578 .std_body = CC_STD_NIST, 1579 }, 1580 { 1581 .name = "ecb(des)", 1582 .driver_name = "ecb-des-ccree", 1583 .blocksize = DES_BLOCK_SIZE, 1584 .template_skcipher = { 1585 .setkey = cc_cipher_setkey, 1586 .encrypt = cc_cipher_encrypt, 1587 .decrypt = cc_cipher_decrypt, 1588 .min_keysize = DES_KEY_SIZE, 1589 .max_keysize = DES_KEY_SIZE, 1590 .ivsize = 0, 1591 }, 1592 .cipher_mode = DRV_CIPHER_ECB, 1593 .flow_mode = S_DIN_to_DES, 1594 .min_hw_rev = CC_HW_REV_630, 1595 .std_body = CC_STD_NIST, 1596 }, 1597 { 1598 .name = "cbc(sm4)", 1599 .driver_name = "cbc-sm4-ccree", 1600 .blocksize = SM4_BLOCK_SIZE, 1601 .template_skcipher = { 1602 .setkey = cc_cipher_setkey, 1603 .encrypt = cc_cipher_encrypt, 1604 .decrypt = cc_cipher_decrypt, 1605 .min_keysize = SM4_KEY_SIZE, 1606 .max_keysize = SM4_KEY_SIZE, 1607 .ivsize = SM4_BLOCK_SIZE, 1608 }, 1609 .cipher_mode = DRV_CIPHER_CBC, 1610 .flow_mode = S_DIN_to_SM4, 1611 .min_hw_rev = CC_HW_REV_713, 1612 .std_body = CC_STD_OSCCA, 1613 }, 1614 { 1615 .name = "ecb(sm4)", 1616 .driver_name = "ecb-sm4-ccree", 1617 .blocksize = SM4_BLOCK_SIZE, 1618 .template_skcipher = { 1619 .setkey = cc_cipher_setkey, 1620 .encrypt = cc_cipher_encrypt, 1621 .decrypt = cc_cipher_decrypt, 1622 .min_keysize = SM4_KEY_SIZE, 1623 .max_keysize = SM4_KEY_SIZE, 1624 .ivsize = 0, 1625 }, 1626 .cipher_mode = DRV_CIPHER_ECB, 1627 .flow_mode = S_DIN_to_SM4, 1628 .min_hw_rev = CC_HW_REV_713, 1629 .std_body = CC_STD_OSCCA, 1630 }, 1631 { 1632 .name = "ctr(sm4)", 1633 .driver_name = "ctr-sm4-ccree", 1634 .blocksize = 1, 1635 .template_skcipher = { 1636 .setkey = cc_cipher_setkey, 1637 .encrypt = cc_cipher_encrypt, 1638 .decrypt = cc_cipher_decrypt, 1639 .min_keysize = SM4_KEY_SIZE, 1640 .max_keysize = SM4_KEY_SIZE, 1641 .ivsize = SM4_BLOCK_SIZE, 1642 }, 1643 .cipher_mode = DRV_CIPHER_CTR, 1644 .flow_mode = S_DIN_to_SM4, 1645 .min_hw_rev = CC_HW_REV_713, 1646 .std_body = CC_STD_OSCCA, 1647 }, 1648 { 1649 .name = "cbc(psm4)", 1650 .driver_name = "cbc-psm4-ccree", 1651 .blocksize = SM4_BLOCK_SIZE, 1652 .template_skcipher = { 1653 .setkey = cc_cipher_sethkey, 1654 .encrypt = cc_cipher_encrypt, 1655 .decrypt = cc_cipher_decrypt, 1656 .min_keysize = CC_HW_KEY_SIZE, 1657 .max_keysize = CC_HW_KEY_SIZE, 1658 .ivsize = SM4_BLOCK_SIZE, 1659 }, 1660 .cipher_mode = DRV_CIPHER_CBC, 1661 .flow_mode = S_DIN_to_SM4, 1662 .min_hw_rev = CC_HW_REV_713, 1663 .std_body = CC_STD_OSCCA, 1664 .sec_func = true, 1665 }, 1666 { 1667 .name = "ctr(psm4)", 1668 .driver_name = "ctr-psm4-ccree", 1669 .blocksize = SM4_BLOCK_SIZE, 1670 .template_skcipher = { 1671 .setkey = cc_cipher_sethkey, 1672 .encrypt = cc_cipher_encrypt, 1673 .decrypt = cc_cipher_decrypt, 1674 .min_keysize = CC_HW_KEY_SIZE, 1675 .max_keysize = CC_HW_KEY_SIZE, 1676 .ivsize = SM4_BLOCK_SIZE, 1677 }, 1678 .cipher_mode = DRV_CIPHER_CTR, 1679 .flow_mode = S_DIN_to_SM4, 1680 .min_hw_rev = CC_HW_REV_713, 1681 .std_body = CC_STD_OSCCA, 1682 .sec_func = true, 1683 }, 1684 }; 1685 1686 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1687 struct device *dev) 1688 { 1689 struct cc_crypto_alg *t_alg; 1690 struct skcipher_alg *alg; 1691 1692 t_alg = devm_kzalloc(dev, sizeof(*t_alg), GFP_KERNEL); 1693 if (!t_alg) 1694 return ERR_PTR(-ENOMEM); 1695 1696 alg = &t_alg->skcipher_alg; 1697 1698 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1699 1700 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1701 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1702 tmpl->driver_name); 1703 alg->base.cra_module = THIS_MODULE; 1704 alg->base.cra_priority = CC_CRA_PRIO; 1705 alg->base.cra_blocksize = tmpl->blocksize; 1706 alg->base.cra_alignmask = 0; 1707 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1708 1709 alg->base.cra_init = cc_cipher_init; 1710 alg->base.cra_exit = cc_cipher_exit; 1711 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 1712 1713 t_alg->cipher_mode = tmpl->cipher_mode; 1714 t_alg->flow_mode = tmpl->flow_mode; 1715 t_alg->data_unit = tmpl->data_unit; 1716 1717 return t_alg; 1718 } 1719 1720 int cc_cipher_free(struct cc_drvdata *drvdata) 1721 { 1722 struct cc_crypto_alg *t_alg, *n; 1723 1724 /* Remove registered algs */ 1725 list_for_each_entry_safe(t_alg, n, &drvdata->alg_list, entry) { 1726 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1727 list_del(&t_alg->entry); 1728 } 1729 return 0; 1730 } 1731 1732 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1733 { 1734 struct cc_crypto_alg *t_alg; 1735 struct device *dev = drvdata_to_dev(drvdata); 1736 int rc = -ENOMEM; 1737 int alg; 1738 1739 INIT_LIST_HEAD(&drvdata->alg_list); 1740 1741 /* Linux crypto */ 1742 dev_dbg(dev, "Number of algorithms = %zu\n", 1743 ARRAY_SIZE(skcipher_algs)); 1744 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1745 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || 1746 !(drvdata->std_bodies & skcipher_algs[alg].std_body) || 1747 (drvdata->sec_disabled && skcipher_algs[alg].sec_func)) 1748 continue; 1749 1750 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1751 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1752 if (IS_ERR(t_alg)) { 1753 rc = PTR_ERR(t_alg); 1754 dev_err(dev, "%s alg allocation failed\n", 1755 skcipher_algs[alg].driver_name); 1756 goto fail0; 1757 } 1758 t_alg->drvdata = drvdata; 1759 1760 dev_dbg(dev, "registering %s\n", 1761 skcipher_algs[alg].driver_name); 1762 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1763 dev_dbg(dev, "%s alg registration rc = %x\n", 1764 t_alg->skcipher_alg.base.cra_driver_name, rc); 1765 if (rc) { 1766 dev_err(dev, "%s alg registration failed\n", 1767 t_alg->skcipher_alg.base.cra_driver_name); 1768 goto fail0; 1769 } 1770 1771 list_add_tail(&t_alg->entry, &drvdata->alg_list); 1772 dev_dbg(dev, "Registered %s\n", 1773 t_alg->skcipher_alg.base.cra_driver_name); 1774 } 1775 return 0; 1776 1777 fail0: 1778 cc_cipher_free(drvdata); 1779 return rc; 1780 } 1781