1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/internal/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/sm4.h> 11 #include <crypto/scatterwalk.h> 12 13 #include "cc_driver.h" 14 #include "cc_lli_defs.h" 15 #include "cc_buffer_mgr.h" 16 #include "cc_cipher.h" 17 #include "cc_request_mgr.h" 18 19 #define MAX_SKCIPHER_SEQ_LEN 6 20 21 #define template_skcipher template_u.skcipher 22 23 struct cc_cipher_handle { 24 struct list_head alg_list; 25 }; 26 27 struct cc_user_key_info { 28 u8 *key; 29 dma_addr_t key_dma_addr; 30 }; 31 32 struct cc_hw_key_info { 33 enum cc_hw_crypto_key key1_slot; 34 enum cc_hw_crypto_key key2_slot; 35 }; 36 37 struct cc_cpp_key_info { 38 u8 slot; 39 enum cc_cpp_alg alg; 40 }; 41 42 enum cc_key_type { 43 CC_UNPROTECTED_KEY, /* User key */ 44 CC_HW_PROTECTED_KEY, /* HW (FDE) key */ 45 CC_POLICY_PROTECTED_KEY, /* CPP key */ 46 CC_INVALID_PROTECTED_KEY /* Invalid key */ 47 }; 48 49 struct cc_cipher_ctx { 50 struct cc_drvdata *drvdata; 51 int keylen; 52 int key_round_number; 53 int cipher_mode; 54 int flow_mode; 55 unsigned int flags; 56 enum cc_key_type key_type; 57 struct cc_user_key_info user; 58 union { 59 struct cc_hw_key_info hw; 60 struct cc_cpp_key_info cpp; 61 }; 62 struct crypto_shash *shash_tfm; 63 }; 64 65 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 66 67 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm) 68 { 69 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 70 71 return ctx_p->key_type; 72 } 73 74 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 75 { 76 switch (ctx_p->flow_mode) { 77 case S_DIN_to_AES: 78 switch (size) { 79 case CC_AES_128_BIT_KEY_SIZE: 80 case CC_AES_192_BIT_KEY_SIZE: 81 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 82 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && 83 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 84 return 0; 85 break; 86 case CC_AES_256_BIT_KEY_SIZE: 87 return 0; 88 case (CC_AES_192_BIT_KEY_SIZE * 2): 89 case (CC_AES_256_BIT_KEY_SIZE * 2): 90 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 91 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 92 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 93 return 0; 94 break; 95 default: 96 break; 97 } 98 break; 99 case S_DIN_to_DES: 100 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 101 return 0; 102 break; 103 case S_DIN_to_SM4: 104 if (size == SM4_KEY_SIZE) 105 return 0; 106 default: 107 break; 108 } 109 return -EINVAL; 110 } 111 112 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 113 unsigned int size) 114 { 115 switch (ctx_p->flow_mode) { 116 case S_DIN_to_AES: 117 switch (ctx_p->cipher_mode) { 118 case DRV_CIPHER_XTS: 119 case DRV_CIPHER_CBC_CTS: 120 if (size >= AES_BLOCK_SIZE) 121 return 0; 122 break; 123 case DRV_CIPHER_OFB: 124 case DRV_CIPHER_CTR: 125 return 0; 126 case DRV_CIPHER_ECB: 127 case DRV_CIPHER_CBC: 128 case DRV_CIPHER_ESSIV: 129 case DRV_CIPHER_BITLOCKER: 130 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 131 return 0; 132 break; 133 default: 134 break; 135 } 136 break; 137 case S_DIN_to_DES: 138 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 139 return 0; 140 break; 141 case S_DIN_to_SM4: 142 switch (ctx_p->cipher_mode) { 143 case DRV_CIPHER_CTR: 144 return 0; 145 case DRV_CIPHER_ECB: 146 case DRV_CIPHER_CBC: 147 if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) 148 return 0; 149 default: 150 break; 151 } 152 default: 153 break; 154 } 155 return -EINVAL; 156 } 157 158 static int cc_cipher_init(struct crypto_tfm *tfm) 159 { 160 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 161 struct cc_crypto_alg *cc_alg = 162 container_of(tfm->__crt_alg, struct cc_crypto_alg, 163 skcipher_alg.base); 164 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 165 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 166 int rc = 0; 167 168 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 169 crypto_tfm_alg_name(tfm)); 170 171 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 172 sizeof(struct cipher_req_ctx)); 173 174 ctx_p->cipher_mode = cc_alg->cipher_mode; 175 ctx_p->flow_mode = cc_alg->flow_mode; 176 ctx_p->drvdata = cc_alg->drvdata; 177 178 /* Allocate key buffer, cache line aligned */ 179 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); 180 if (!ctx_p->user.key) 181 return -ENOMEM; 182 183 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 184 ctx_p->user.key); 185 186 /* Map key buffer */ 187 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, 188 max_key_buf_size, 189 DMA_TO_DEVICE); 190 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 191 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 192 max_key_buf_size, ctx_p->user.key); 193 return -ENOMEM; 194 } 195 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 196 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 197 198 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 199 /* Alloc hash tfm for essiv */ 200 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); 201 if (IS_ERR(ctx_p->shash_tfm)) { 202 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 203 return PTR_ERR(ctx_p->shash_tfm); 204 } 205 } 206 207 return rc; 208 } 209 210 static void cc_cipher_exit(struct crypto_tfm *tfm) 211 { 212 struct crypto_alg *alg = tfm->__crt_alg; 213 struct cc_crypto_alg *cc_alg = 214 container_of(alg, struct cc_crypto_alg, 215 skcipher_alg.base); 216 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 217 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 218 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 219 220 dev_dbg(dev, "Clearing context @%p for %s\n", 221 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 222 223 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 224 /* Free hash tfm for essiv */ 225 crypto_free_shash(ctx_p->shash_tfm); 226 ctx_p->shash_tfm = NULL; 227 } 228 229 /* Unmap key buffer */ 230 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 231 DMA_TO_DEVICE); 232 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 233 &ctx_p->user.key_dma_addr); 234 235 /* Free key buffer in context */ 236 kzfree(ctx_p->user.key); 237 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 238 } 239 240 struct tdes_keys { 241 u8 key1[DES_KEY_SIZE]; 242 u8 key2[DES_KEY_SIZE]; 243 u8 key3[DES_KEY_SIZE]; 244 }; 245 246 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num) 247 { 248 switch (slot_num) { 249 case 0: 250 return KFDE0_KEY; 251 case 1: 252 return KFDE1_KEY; 253 case 2: 254 return KFDE2_KEY; 255 case 3: 256 return KFDE3_KEY; 257 } 258 return END_OF_KEYS; 259 } 260 261 static u8 cc_slot_to_cpp_key(u8 slot_num) 262 { 263 return (slot_num - CC_FIRST_CPP_KEY_SLOT); 264 } 265 266 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num) 267 { 268 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT) 269 return CC_HW_PROTECTED_KEY; 270 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT && 271 slot_num <= CC_LAST_CPP_KEY_SLOT) 272 return CC_POLICY_PROTECTED_KEY; 273 else 274 return CC_INVALID_PROTECTED_KEY; 275 } 276 277 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 278 unsigned int keylen) 279 { 280 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 281 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 282 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 283 struct cc_hkey_info hki; 284 285 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 286 ctx_p, crypto_tfm_alg_name(tfm), keylen); 287 dump_byte_array("key", (u8 *)key, keylen); 288 289 /* STAT_PHASE_0: Init and sanity checks */ 290 291 /* This check the size of the protected key token */ 292 if (keylen != sizeof(hki)) { 293 dev_err(dev, "Unsupported protected key size %d.\n", keylen); 294 return -EINVAL; 295 } 296 297 memcpy(&hki, key, keylen); 298 299 /* The real key len for crypto op is the size of the HW key 300 * referenced by the HW key slot, not the hardware key token 301 */ 302 keylen = hki.keylen; 303 304 if (validate_keys_sizes(ctx_p, keylen)) { 305 dev_dbg(dev, "Unsupported key size %d.\n", keylen); 306 return -EINVAL; 307 } 308 309 ctx_p->keylen = keylen; 310 311 switch (cc_slot_to_key_type(hki.hw_key1)) { 312 case CC_HW_PROTECTED_KEY: 313 if (ctx_p->flow_mode == S_DIN_to_SM4) { 314 dev_err(dev, "Only AES HW protected keys are supported\n"); 315 return -EINVAL; 316 } 317 318 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 319 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 320 dev_err(dev, "Unsupported hw key1 number (%d)\n", 321 hki.hw_key1); 322 return -EINVAL; 323 } 324 325 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 326 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 327 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 328 if (hki.hw_key1 == hki.hw_key2) { 329 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 330 hki.hw_key1, hki.hw_key2); 331 return -EINVAL; 332 } 333 334 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 335 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 336 dev_err(dev, "Unsupported hw key2 number (%d)\n", 337 hki.hw_key2); 338 return -EINVAL; 339 } 340 } 341 342 ctx_p->key_type = CC_HW_PROTECTED_KEY; 343 dev_dbg(dev, "HW protected key %d/%d set\n.", 344 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot); 345 break; 346 347 case CC_POLICY_PROTECTED_KEY: 348 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) { 349 dev_err(dev, "CPP keys not supported in this hardware revision.\n"); 350 return -EINVAL; 351 } 352 353 if (ctx_p->cipher_mode != DRV_CIPHER_CBC && 354 ctx_p->cipher_mode != DRV_CIPHER_CTR) { 355 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n"); 356 return -EINVAL; 357 } 358 359 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1); 360 if (ctx_p->flow_mode == S_DIN_to_AES) 361 ctx_p->cpp.alg = CC_CPP_AES; 362 else /* Must be SM4 since due to sethkey registration */ 363 ctx_p->cpp.alg = CC_CPP_SM4; 364 ctx_p->key_type = CC_POLICY_PROTECTED_KEY; 365 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n", 366 ctx_p->cpp.alg, ctx_p->cpp.slot); 367 break; 368 369 default: 370 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1); 371 return -EINVAL; 372 } 373 374 return 0; 375 } 376 377 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 378 unsigned int keylen) 379 { 380 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 381 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 382 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 383 struct cc_crypto_alg *cc_alg = 384 container_of(tfm->__crt_alg, struct cc_crypto_alg, 385 skcipher_alg.base); 386 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 387 388 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 389 ctx_p, crypto_tfm_alg_name(tfm), keylen); 390 dump_byte_array("key", (u8 *)key, keylen); 391 392 /* STAT_PHASE_0: Init and sanity checks */ 393 394 if (validate_keys_sizes(ctx_p, keylen)) { 395 dev_dbg(dev, "Unsupported key size %d.\n", keylen); 396 return -EINVAL; 397 } 398 399 ctx_p->key_type = CC_UNPROTECTED_KEY; 400 401 /* 402 * Verify DES weak keys 403 * Note that we're dropping the expanded key since the 404 * HW does the expansion on its own. 405 */ 406 if (ctx_p->flow_mode == S_DIN_to_DES) { 407 if ((keylen == DES3_EDE_KEY_SIZE && 408 verify_skcipher_des3_key(sktfm, key)) || 409 verify_skcipher_des_key(sktfm, key)) { 410 dev_dbg(dev, "weak DES key"); 411 return -EINVAL; 412 } 413 } 414 415 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 416 xts_check_key(tfm, key, keylen)) { 417 dev_dbg(dev, "weak XTS key"); 418 return -EINVAL; 419 } 420 421 /* STAT_PHASE_1: Copy key to ctx */ 422 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 423 max_key_buf_size, DMA_TO_DEVICE); 424 425 memcpy(ctx_p->user.key, key, keylen); 426 if (keylen == 24) 427 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); 428 429 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 430 /* sha256 for key2 - use sw implementation */ 431 int key_len = keylen >> 1; 432 int err; 433 434 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); 435 436 desc->tfm = ctx_p->shash_tfm; 437 438 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, 439 ctx_p->user.key + key_len); 440 if (err) { 441 dev_err(dev, "Failed to hash ESSIV key.\n"); 442 return err; 443 } 444 } 445 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 446 max_key_buf_size, DMA_TO_DEVICE); 447 ctx_p->keylen = keylen; 448 449 dev_dbg(dev, "return safely"); 450 return 0; 451 } 452 453 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p) 454 { 455 switch (ctx_p->flow_mode) { 456 case S_DIN_to_AES: 457 return S_AES_to_DOUT; 458 case S_DIN_to_DES: 459 return S_DES_to_DOUT; 460 case S_DIN_to_SM4: 461 return S_SM4_to_DOUT; 462 default: 463 return ctx_p->flow_mode; 464 } 465 } 466 467 static void cc_setup_readiv_desc(struct crypto_tfm *tfm, 468 struct cipher_req_ctx *req_ctx, 469 unsigned int ivsize, struct cc_hw_desc desc[], 470 unsigned int *seq_size) 471 { 472 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 473 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 474 int cipher_mode = ctx_p->cipher_mode; 475 int flow_mode = cc_out_setup_mode(ctx_p); 476 int direction = req_ctx->gen_ctx.op_type; 477 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 478 479 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) 480 return; 481 482 switch (cipher_mode) { 483 case DRV_CIPHER_ECB: 484 break; 485 case DRV_CIPHER_CBC: 486 case DRV_CIPHER_CBC_CTS: 487 case DRV_CIPHER_CTR: 488 case DRV_CIPHER_OFB: 489 /* Read next IV */ 490 hw_desc_init(&desc[*seq_size]); 491 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1); 492 set_cipher_config0(&desc[*seq_size], direction); 493 set_flow_mode(&desc[*seq_size], flow_mode); 494 set_cipher_mode(&desc[*seq_size], cipher_mode); 495 if (cipher_mode == DRV_CIPHER_CTR || 496 cipher_mode == DRV_CIPHER_OFB) { 497 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); 498 } else { 499 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0); 500 } 501 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 502 (*seq_size)++; 503 break; 504 case DRV_CIPHER_XTS: 505 case DRV_CIPHER_ESSIV: 506 case DRV_CIPHER_BITLOCKER: 507 /* IV */ 508 hw_desc_init(&desc[*seq_size]); 509 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1); 510 set_cipher_mode(&desc[*seq_size], cipher_mode); 511 set_cipher_config0(&desc[*seq_size], direction); 512 set_flow_mode(&desc[*seq_size], flow_mode); 513 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE, 514 NS_BIT, 1); 515 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 516 (*seq_size)++; 517 break; 518 default: 519 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 520 } 521 } 522 523 524 static void cc_setup_state_desc(struct crypto_tfm *tfm, 525 struct cipher_req_ctx *req_ctx, 526 unsigned int ivsize, unsigned int nbytes, 527 struct cc_hw_desc desc[], 528 unsigned int *seq_size) 529 { 530 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 531 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 532 int cipher_mode = ctx_p->cipher_mode; 533 int flow_mode = ctx_p->flow_mode; 534 int direction = req_ctx->gen_ctx.op_type; 535 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 536 unsigned int du_size = nbytes; 537 538 struct cc_crypto_alg *cc_alg = 539 container_of(tfm->__crt_alg, struct cc_crypto_alg, 540 skcipher_alg.base); 541 542 if (cc_alg->data_unit) 543 du_size = cc_alg->data_unit; 544 545 switch (cipher_mode) { 546 case DRV_CIPHER_ECB: 547 break; 548 case DRV_CIPHER_CBC: 549 case DRV_CIPHER_CBC_CTS: 550 case DRV_CIPHER_CTR: 551 case DRV_CIPHER_OFB: 552 /* Load IV */ 553 hw_desc_init(&desc[*seq_size]); 554 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 555 NS_BIT); 556 set_cipher_config0(&desc[*seq_size], direction); 557 set_flow_mode(&desc[*seq_size], flow_mode); 558 set_cipher_mode(&desc[*seq_size], cipher_mode); 559 if (cipher_mode == DRV_CIPHER_CTR || 560 cipher_mode == DRV_CIPHER_OFB) { 561 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 562 } else { 563 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 564 } 565 (*seq_size)++; 566 break; 567 case DRV_CIPHER_XTS: 568 case DRV_CIPHER_ESSIV: 569 case DRV_CIPHER_BITLOCKER: 570 break; 571 default: 572 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 573 } 574 } 575 576 577 static void cc_setup_xex_state_desc(struct crypto_tfm *tfm, 578 struct cipher_req_ctx *req_ctx, 579 unsigned int ivsize, unsigned int nbytes, 580 struct cc_hw_desc desc[], 581 unsigned int *seq_size) 582 { 583 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 584 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 585 int cipher_mode = ctx_p->cipher_mode; 586 int flow_mode = ctx_p->flow_mode; 587 int direction = req_ctx->gen_ctx.op_type; 588 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 589 unsigned int key_len = ctx_p->keylen; 590 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 591 unsigned int du_size = nbytes; 592 593 struct cc_crypto_alg *cc_alg = 594 container_of(tfm->__crt_alg, struct cc_crypto_alg, 595 skcipher_alg.base); 596 597 if (cc_alg->data_unit) 598 du_size = cc_alg->data_unit; 599 600 switch (cipher_mode) { 601 case DRV_CIPHER_ECB: 602 break; 603 case DRV_CIPHER_CBC: 604 case DRV_CIPHER_CBC_CTS: 605 case DRV_CIPHER_CTR: 606 case DRV_CIPHER_OFB: 607 break; 608 case DRV_CIPHER_XTS: 609 case DRV_CIPHER_ESSIV: 610 case DRV_CIPHER_BITLOCKER: 611 /* load XEX key */ 612 hw_desc_init(&desc[*seq_size]); 613 set_cipher_mode(&desc[*seq_size], cipher_mode); 614 set_cipher_config0(&desc[*seq_size], direction); 615 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 616 set_hw_crypto_key(&desc[*seq_size], 617 ctx_p->hw.key2_slot); 618 } else { 619 set_din_type(&desc[*seq_size], DMA_DLLI, 620 (key_dma_addr + (key_len / 2)), 621 (key_len / 2), NS_BIT); 622 } 623 set_xex_data_unit_size(&desc[*seq_size], du_size); 624 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 625 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 626 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 627 (*seq_size)++; 628 629 /* Load IV */ 630 hw_desc_init(&desc[*seq_size]); 631 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 632 set_cipher_mode(&desc[*seq_size], cipher_mode); 633 set_cipher_config0(&desc[*seq_size], direction); 634 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 635 set_flow_mode(&desc[*seq_size], flow_mode); 636 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 637 CC_AES_BLOCK_SIZE, NS_BIT); 638 (*seq_size)++; 639 break; 640 default: 641 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 642 } 643 } 644 645 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p) 646 { 647 switch (ctx_p->flow_mode) { 648 case S_DIN_to_AES: 649 return DIN_AES_DOUT; 650 case S_DIN_to_DES: 651 return DIN_DES_DOUT; 652 case S_DIN_to_SM4: 653 return DIN_SM4_DOUT; 654 default: 655 return ctx_p->flow_mode; 656 } 657 } 658 659 static void cc_setup_key_desc(struct crypto_tfm *tfm, 660 struct cipher_req_ctx *req_ctx, 661 unsigned int nbytes, struct cc_hw_desc desc[], 662 unsigned int *seq_size) 663 { 664 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 665 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 666 int cipher_mode = ctx_p->cipher_mode; 667 int flow_mode = ctx_p->flow_mode; 668 int direction = req_ctx->gen_ctx.op_type; 669 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 670 unsigned int key_len = ctx_p->keylen; 671 unsigned int din_size; 672 673 switch (cipher_mode) { 674 case DRV_CIPHER_CBC: 675 case DRV_CIPHER_CBC_CTS: 676 case DRV_CIPHER_CTR: 677 case DRV_CIPHER_OFB: 678 case DRV_CIPHER_ECB: 679 /* Load key */ 680 hw_desc_init(&desc[*seq_size]); 681 set_cipher_mode(&desc[*seq_size], cipher_mode); 682 set_cipher_config0(&desc[*seq_size], direction); 683 684 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) { 685 /* We use the AES key size coding for all CPP algs */ 686 set_key_size_aes(&desc[*seq_size], key_len); 687 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot); 688 flow_mode = cc_out_flow_mode(ctx_p); 689 } else { 690 if (flow_mode == S_DIN_to_AES) { 691 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 692 set_hw_crypto_key(&desc[*seq_size], 693 ctx_p->hw.key1_slot); 694 } else { 695 /* CC_POLICY_UNPROTECTED_KEY 696 * Invalid keys are filtered out in 697 * sethkey() 698 */ 699 din_size = (key_len == 24) ? 700 AES_MAX_KEY_SIZE : key_len; 701 702 set_din_type(&desc[*seq_size], DMA_DLLI, 703 key_dma_addr, din_size, 704 NS_BIT); 705 } 706 set_key_size_aes(&desc[*seq_size], key_len); 707 } else { 708 /*des*/ 709 set_din_type(&desc[*seq_size], DMA_DLLI, 710 key_dma_addr, key_len, NS_BIT); 711 set_key_size_des(&desc[*seq_size], key_len); 712 } 713 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 714 } 715 set_flow_mode(&desc[*seq_size], flow_mode); 716 (*seq_size)++; 717 break; 718 case DRV_CIPHER_XTS: 719 case DRV_CIPHER_ESSIV: 720 case DRV_CIPHER_BITLOCKER: 721 /* Load AES key */ 722 hw_desc_init(&desc[*seq_size]); 723 set_cipher_mode(&desc[*seq_size], cipher_mode); 724 set_cipher_config0(&desc[*seq_size], direction); 725 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) { 726 set_hw_crypto_key(&desc[*seq_size], 727 ctx_p->hw.key1_slot); 728 } else { 729 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 730 (key_len / 2), NS_BIT); 731 } 732 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 733 set_flow_mode(&desc[*seq_size], flow_mode); 734 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 735 (*seq_size)++; 736 break; 737 default: 738 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 739 } 740 } 741 742 static void cc_setup_mlli_desc(struct crypto_tfm *tfm, 743 struct cipher_req_ctx *req_ctx, 744 struct scatterlist *dst, struct scatterlist *src, 745 unsigned int nbytes, void *areq, 746 struct cc_hw_desc desc[], unsigned int *seq_size) 747 { 748 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 749 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 750 751 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) { 752 /* bypass */ 753 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 754 &req_ctx->mlli_params.mlli_dma_addr, 755 req_ctx->mlli_params.mlli_len, 756 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 757 hw_desc_init(&desc[*seq_size]); 758 set_din_type(&desc[*seq_size], DMA_DLLI, 759 req_ctx->mlli_params.mlli_dma_addr, 760 req_ctx->mlli_params.mlli_len, NS_BIT); 761 set_dout_sram(&desc[*seq_size], 762 ctx_p->drvdata->mlli_sram_addr, 763 req_ctx->mlli_params.mlli_len); 764 set_flow_mode(&desc[*seq_size], BYPASS); 765 (*seq_size)++; 766 } 767 } 768 769 static void cc_setup_flow_desc(struct crypto_tfm *tfm, 770 struct cipher_req_ctx *req_ctx, 771 struct scatterlist *dst, struct scatterlist *src, 772 unsigned int nbytes, struct cc_hw_desc desc[], 773 unsigned int *seq_size) 774 { 775 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 776 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 777 unsigned int flow_mode = cc_out_flow_mode(ctx_p); 778 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY || 779 ctx_p->cipher_mode == DRV_CIPHER_ECB); 780 781 /* Process */ 782 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 783 dev_dbg(dev, " data params addr %pad length 0x%X\n", 784 &sg_dma_address(src), nbytes); 785 dev_dbg(dev, " data params addr %pad length 0x%X\n", 786 &sg_dma_address(dst), nbytes); 787 hw_desc_init(&desc[*seq_size]); 788 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 789 nbytes, NS_BIT); 790 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 791 nbytes, NS_BIT, (!last_desc ? 0 : 1)); 792 if (last_desc) 793 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 794 795 set_flow_mode(&desc[*seq_size], flow_mode); 796 (*seq_size)++; 797 } else { 798 hw_desc_init(&desc[*seq_size]); 799 set_din_type(&desc[*seq_size], DMA_MLLI, 800 ctx_p->drvdata->mlli_sram_addr, 801 req_ctx->in_mlli_nents, NS_BIT); 802 if (req_ctx->out_nents == 0) { 803 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 804 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 805 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 806 set_dout_mlli(&desc[*seq_size], 807 ctx_p->drvdata->mlli_sram_addr, 808 req_ctx->in_mlli_nents, NS_BIT, 809 (!last_desc ? 0 : 1)); 810 } else { 811 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 812 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 813 (unsigned int)ctx_p->drvdata->mlli_sram_addr + 814 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 815 set_dout_mlli(&desc[*seq_size], 816 (ctx_p->drvdata->mlli_sram_addr + 817 (LLI_ENTRY_BYTE_SIZE * 818 req_ctx->in_mlli_nents)), 819 req_ctx->out_mlli_nents, NS_BIT, 820 (!last_desc ? 0 : 1)); 821 } 822 if (last_desc) 823 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 824 825 set_flow_mode(&desc[*seq_size], flow_mode); 826 (*seq_size)++; 827 } 828 } 829 830 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 831 { 832 struct skcipher_request *req = (struct skcipher_request *)cc_req; 833 struct scatterlist *dst = req->dst; 834 struct scatterlist *src = req->src; 835 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 836 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 837 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 838 839 if (err != -EINPROGRESS) { 840 /* Not a BACKLOG notification */ 841 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 842 memcpy(req->iv, req_ctx->iv, ivsize); 843 kzfree(req_ctx->iv); 844 } 845 846 skcipher_request_complete(req, err); 847 } 848 849 static int cc_cipher_process(struct skcipher_request *req, 850 enum drv_crypto_direction direction) 851 { 852 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 853 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 854 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 855 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 856 struct scatterlist *dst = req->dst; 857 struct scatterlist *src = req->src; 858 unsigned int nbytes = req->cryptlen; 859 void *iv = req->iv; 860 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 861 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 862 struct cc_hw_desc desc[MAX_SKCIPHER_SEQ_LEN]; 863 struct cc_crypto_req cc_req = {}; 864 int rc; 865 unsigned int seq_len = 0; 866 gfp_t flags = cc_gfp_flags(&req->base); 867 868 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 869 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 870 "Encrypt" : "Decrypt"), req, iv, nbytes); 871 872 /* STAT_PHASE_0: Init and sanity checks */ 873 874 /* TODO: check data length according to mode */ 875 if (validate_data_size(ctx_p, nbytes)) { 876 dev_dbg(dev, "Unsupported data size %d.\n", nbytes); 877 rc = -EINVAL; 878 goto exit_process; 879 } 880 if (nbytes == 0) { 881 /* No data to process is valid */ 882 rc = 0; 883 goto exit_process; 884 } 885 886 /* The IV we are handed may be allocted from the stack so 887 * we must copy it to a DMAable buffer before use. 888 */ 889 req_ctx->iv = kmemdup(iv, ivsize, flags); 890 if (!req_ctx->iv) { 891 rc = -ENOMEM; 892 goto exit_process; 893 } 894 895 /* Setup request structure */ 896 cc_req.user_cb = (void *)cc_cipher_complete; 897 cc_req.user_arg = (void *)req; 898 899 /* Setup CPP operation details */ 900 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) { 901 cc_req.cpp.is_cpp = true; 902 cc_req.cpp.alg = ctx_p->cpp.alg; 903 cc_req.cpp.slot = ctx_p->cpp.slot; 904 } 905 906 /* Setup request context */ 907 req_ctx->gen_ctx.op_type = direction; 908 909 /* STAT_PHASE_1: Map buffers */ 910 911 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 912 req_ctx->iv, src, dst, flags); 913 if (rc) { 914 dev_err(dev, "map_request() failed\n"); 915 goto exit_process; 916 } 917 918 /* STAT_PHASE_2: Create sequence */ 919 920 /* Setup state (IV) */ 921 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 922 /* Setup MLLI line, if needed */ 923 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len); 924 /* Setup key */ 925 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len); 926 /* Setup state (IV and XEX key) */ 927 cc_setup_xex_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 928 /* Data processing */ 929 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len); 930 /* Read next IV */ 931 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len); 932 933 /* STAT_PHASE_3: Lock HW and push sequence */ 934 935 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 936 &req->base); 937 if (rc != -EINPROGRESS && rc != -EBUSY) { 938 /* Failed to send the request or request completed 939 * synchronously 940 */ 941 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 942 } 943 944 exit_process: 945 if (rc != -EINPROGRESS && rc != -EBUSY) { 946 kzfree(req_ctx->iv); 947 } 948 949 return rc; 950 } 951 952 static int cc_cipher_encrypt(struct skcipher_request *req) 953 { 954 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 955 956 memset(req_ctx, 0, sizeof(*req_ctx)); 957 958 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 959 } 960 961 static int cc_cipher_decrypt(struct skcipher_request *req) 962 { 963 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 964 965 memset(req_ctx, 0, sizeof(*req_ctx)); 966 967 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 968 } 969 970 /* Block cipher alg */ 971 static const struct cc_alg_template skcipher_algs[] = { 972 { 973 .name = "xts(paes)", 974 .driver_name = "xts-paes-ccree", 975 .blocksize = 1, 976 .template_skcipher = { 977 .setkey = cc_cipher_sethkey, 978 .encrypt = cc_cipher_encrypt, 979 .decrypt = cc_cipher_decrypt, 980 .min_keysize = CC_HW_KEY_SIZE, 981 .max_keysize = CC_HW_KEY_SIZE, 982 .ivsize = AES_BLOCK_SIZE, 983 }, 984 .cipher_mode = DRV_CIPHER_XTS, 985 .flow_mode = S_DIN_to_AES, 986 .min_hw_rev = CC_HW_REV_630, 987 .std_body = CC_STD_NIST, 988 .sec_func = true, 989 }, 990 { 991 .name = "xts512(paes)", 992 .driver_name = "xts-paes-du512-ccree", 993 .blocksize = 1, 994 .template_skcipher = { 995 .setkey = cc_cipher_sethkey, 996 .encrypt = cc_cipher_encrypt, 997 .decrypt = cc_cipher_decrypt, 998 .min_keysize = CC_HW_KEY_SIZE, 999 .max_keysize = CC_HW_KEY_SIZE, 1000 .ivsize = AES_BLOCK_SIZE, 1001 }, 1002 .cipher_mode = DRV_CIPHER_XTS, 1003 .flow_mode = S_DIN_to_AES, 1004 .data_unit = 512, 1005 .min_hw_rev = CC_HW_REV_712, 1006 .std_body = CC_STD_NIST, 1007 .sec_func = true, 1008 }, 1009 { 1010 .name = "xts4096(paes)", 1011 .driver_name = "xts-paes-du4096-ccree", 1012 .blocksize = 1, 1013 .template_skcipher = { 1014 .setkey = cc_cipher_sethkey, 1015 .encrypt = cc_cipher_encrypt, 1016 .decrypt = cc_cipher_decrypt, 1017 .min_keysize = CC_HW_KEY_SIZE, 1018 .max_keysize = CC_HW_KEY_SIZE, 1019 .ivsize = AES_BLOCK_SIZE, 1020 }, 1021 .cipher_mode = DRV_CIPHER_XTS, 1022 .flow_mode = S_DIN_to_AES, 1023 .data_unit = 4096, 1024 .min_hw_rev = CC_HW_REV_712, 1025 .std_body = CC_STD_NIST, 1026 .sec_func = true, 1027 }, 1028 { 1029 .name = "essiv(paes)", 1030 .driver_name = "essiv-paes-ccree", 1031 .blocksize = AES_BLOCK_SIZE, 1032 .template_skcipher = { 1033 .setkey = cc_cipher_sethkey, 1034 .encrypt = cc_cipher_encrypt, 1035 .decrypt = cc_cipher_decrypt, 1036 .min_keysize = CC_HW_KEY_SIZE, 1037 .max_keysize = CC_HW_KEY_SIZE, 1038 .ivsize = AES_BLOCK_SIZE, 1039 }, 1040 .cipher_mode = DRV_CIPHER_ESSIV, 1041 .flow_mode = S_DIN_to_AES, 1042 .min_hw_rev = CC_HW_REV_712, 1043 .std_body = CC_STD_NIST, 1044 .sec_func = true, 1045 }, 1046 { 1047 .name = "essiv512(paes)", 1048 .driver_name = "essiv-paes-du512-ccree", 1049 .blocksize = AES_BLOCK_SIZE, 1050 .template_skcipher = { 1051 .setkey = cc_cipher_sethkey, 1052 .encrypt = cc_cipher_encrypt, 1053 .decrypt = cc_cipher_decrypt, 1054 .min_keysize = CC_HW_KEY_SIZE, 1055 .max_keysize = CC_HW_KEY_SIZE, 1056 .ivsize = AES_BLOCK_SIZE, 1057 }, 1058 .cipher_mode = DRV_CIPHER_ESSIV, 1059 .flow_mode = S_DIN_to_AES, 1060 .data_unit = 512, 1061 .min_hw_rev = CC_HW_REV_712, 1062 .std_body = CC_STD_NIST, 1063 .sec_func = true, 1064 }, 1065 { 1066 .name = "essiv4096(paes)", 1067 .driver_name = "essiv-paes-du4096-ccree", 1068 .blocksize = AES_BLOCK_SIZE, 1069 .template_skcipher = { 1070 .setkey = cc_cipher_sethkey, 1071 .encrypt = cc_cipher_encrypt, 1072 .decrypt = cc_cipher_decrypt, 1073 .min_keysize = CC_HW_KEY_SIZE, 1074 .max_keysize = CC_HW_KEY_SIZE, 1075 .ivsize = AES_BLOCK_SIZE, 1076 }, 1077 .cipher_mode = DRV_CIPHER_ESSIV, 1078 .flow_mode = S_DIN_to_AES, 1079 .data_unit = 4096, 1080 .min_hw_rev = CC_HW_REV_712, 1081 .std_body = CC_STD_NIST, 1082 .sec_func = true, 1083 }, 1084 { 1085 .name = "bitlocker(paes)", 1086 .driver_name = "bitlocker-paes-ccree", 1087 .blocksize = AES_BLOCK_SIZE, 1088 .template_skcipher = { 1089 .setkey = cc_cipher_sethkey, 1090 .encrypt = cc_cipher_encrypt, 1091 .decrypt = cc_cipher_decrypt, 1092 .min_keysize = CC_HW_KEY_SIZE, 1093 .max_keysize = CC_HW_KEY_SIZE, 1094 .ivsize = AES_BLOCK_SIZE, 1095 }, 1096 .cipher_mode = DRV_CIPHER_BITLOCKER, 1097 .flow_mode = S_DIN_to_AES, 1098 .min_hw_rev = CC_HW_REV_712, 1099 .std_body = CC_STD_NIST, 1100 .sec_func = true, 1101 }, 1102 { 1103 .name = "bitlocker512(paes)", 1104 .driver_name = "bitlocker-paes-du512-ccree", 1105 .blocksize = AES_BLOCK_SIZE, 1106 .template_skcipher = { 1107 .setkey = cc_cipher_sethkey, 1108 .encrypt = cc_cipher_encrypt, 1109 .decrypt = cc_cipher_decrypt, 1110 .min_keysize = CC_HW_KEY_SIZE, 1111 .max_keysize = CC_HW_KEY_SIZE, 1112 .ivsize = AES_BLOCK_SIZE, 1113 }, 1114 .cipher_mode = DRV_CIPHER_BITLOCKER, 1115 .flow_mode = S_DIN_to_AES, 1116 .data_unit = 512, 1117 .min_hw_rev = CC_HW_REV_712, 1118 .std_body = CC_STD_NIST, 1119 .sec_func = true, 1120 }, 1121 { 1122 .name = "bitlocker4096(paes)", 1123 .driver_name = "bitlocker-paes-du4096-ccree", 1124 .blocksize = AES_BLOCK_SIZE, 1125 .template_skcipher = { 1126 .setkey = cc_cipher_sethkey, 1127 .encrypt = cc_cipher_encrypt, 1128 .decrypt = cc_cipher_decrypt, 1129 .min_keysize = CC_HW_KEY_SIZE, 1130 .max_keysize = CC_HW_KEY_SIZE, 1131 .ivsize = AES_BLOCK_SIZE, 1132 }, 1133 .cipher_mode = DRV_CIPHER_BITLOCKER, 1134 .flow_mode = S_DIN_to_AES, 1135 .data_unit = 4096, 1136 .min_hw_rev = CC_HW_REV_712, 1137 .std_body = CC_STD_NIST, 1138 .sec_func = true, 1139 }, 1140 { 1141 .name = "ecb(paes)", 1142 .driver_name = "ecb-paes-ccree", 1143 .blocksize = AES_BLOCK_SIZE, 1144 .template_skcipher = { 1145 .setkey = cc_cipher_sethkey, 1146 .encrypt = cc_cipher_encrypt, 1147 .decrypt = cc_cipher_decrypt, 1148 .min_keysize = CC_HW_KEY_SIZE, 1149 .max_keysize = CC_HW_KEY_SIZE, 1150 .ivsize = 0, 1151 }, 1152 .cipher_mode = DRV_CIPHER_ECB, 1153 .flow_mode = S_DIN_to_AES, 1154 .min_hw_rev = CC_HW_REV_712, 1155 .std_body = CC_STD_NIST, 1156 .sec_func = true, 1157 }, 1158 { 1159 .name = "cbc(paes)", 1160 .driver_name = "cbc-paes-ccree", 1161 .blocksize = AES_BLOCK_SIZE, 1162 .template_skcipher = { 1163 .setkey = cc_cipher_sethkey, 1164 .encrypt = cc_cipher_encrypt, 1165 .decrypt = cc_cipher_decrypt, 1166 .min_keysize = CC_HW_KEY_SIZE, 1167 .max_keysize = CC_HW_KEY_SIZE, 1168 .ivsize = AES_BLOCK_SIZE, 1169 }, 1170 .cipher_mode = DRV_CIPHER_CBC, 1171 .flow_mode = S_DIN_to_AES, 1172 .min_hw_rev = CC_HW_REV_712, 1173 .std_body = CC_STD_NIST, 1174 .sec_func = true, 1175 }, 1176 { 1177 .name = "ofb(paes)", 1178 .driver_name = "ofb-paes-ccree", 1179 .blocksize = AES_BLOCK_SIZE, 1180 .template_skcipher = { 1181 .setkey = cc_cipher_sethkey, 1182 .encrypt = cc_cipher_encrypt, 1183 .decrypt = cc_cipher_decrypt, 1184 .min_keysize = CC_HW_KEY_SIZE, 1185 .max_keysize = CC_HW_KEY_SIZE, 1186 .ivsize = AES_BLOCK_SIZE, 1187 }, 1188 .cipher_mode = DRV_CIPHER_OFB, 1189 .flow_mode = S_DIN_to_AES, 1190 .min_hw_rev = CC_HW_REV_712, 1191 .std_body = CC_STD_NIST, 1192 .sec_func = true, 1193 }, 1194 { 1195 .name = "cts(cbc(paes))", 1196 .driver_name = "cts-cbc-paes-ccree", 1197 .blocksize = AES_BLOCK_SIZE, 1198 .template_skcipher = { 1199 .setkey = cc_cipher_sethkey, 1200 .encrypt = cc_cipher_encrypt, 1201 .decrypt = cc_cipher_decrypt, 1202 .min_keysize = CC_HW_KEY_SIZE, 1203 .max_keysize = CC_HW_KEY_SIZE, 1204 .ivsize = AES_BLOCK_SIZE, 1205 }, 1206 .cipher_mode = DRV_CIPHER_CBC_CTS, 1207 .flow_mode = S_DIN_to_AES, 1208 .min_hw_rev = CC_HW_REV_712, 1209 .std_body = CC_STD_NIST, 1210 .sec_func = true, 1211 }, 1212 { 1213 .name = "ctr(paes)", 1214 .driver_name = "ctr-paes-ccree", 1215 .blocksize = 1, 1216 .template_skcipher = { 1217 .setkey = cc_cipher_sethkey, 1218 .encrypt = cc_cipher_encrypt, 1219 .decrypt = cc_cipher_decrypt, 1220 .min_keysize = CC_HW_KEY_SIZE, 1221 .max_keysize = CC_HW_KEY_SIZE, 1222 .ivsize = AES_BLOCK_SIZE, 1223 }, 1224 .cipher_mode = DRV_CIPHER_CTR, 1225 .flow_mode = S_DIN_to_AES, 1226 .min_hw_rev = CC_HW_REV_712, 1227 .std_body = CC_STD_NIST, 1228 .sec_func = true, 1229 }, 1230 { 1231 .name = "xts(aes)", 1232 .driver_name = "xts-aes-ccree", 1233 .blocksize = 1, 1234 .template_skcipher = { 1235 .setkey = cc_cipher_setkey, 1236 .encrypt = cc_cipher_encrypt, 1237 .decrypt = cc_cipher_decrypt, 1238 .min_keysize = AES_MIN_KEY_SIZE * 2, 1239 .max_keysize = AES_MAX_KEY_SIZE * 2, 1240 .ivsize = AES_BLOCK_SIZE, 1241 }, 1242 .cipher_mode = DRV_CIPHER_XTS, 1243 .flow_mode = S_DIN_to_AES, 1244 .min_hw_rev = CC_HW_REV_630, 1245 .std_body = CC_STD_NIST, 1246 }, 1247 { 1248 .name = "xts512(aes)", 1249 .driver_name = "xts-aes-du512-ccree", 1250 .blocksize = 1, 1251 .template_skcipher = { 1252 .setkey = cc_cipher_setkey, 1253 .encrypt = cc_cipher_encrypt, 1254 .decrypt = cc_cipher_decrypt, 1255 .min_keysize = AES_MIN_KEY_SIZE * 2, 1256 .max_keysize = AES_MAX_KEY_SIZE * 2, 1257 .ivsize = AES_BLOCK_SIZE, 1258 }, 1259 .cipher_mode = DRV_CIPHER_XTS, 1260 .flow_mode = S_DIN_to_AES, 1261 .data_unit = 512, 1262 .min_hw_rev = CC_HW_REV_712, 1263 .std_body = CC_STD_NIST, 1264 }, 1265 { 1266 .name = "xts4096(aes)", 1267 .driver_name = "xts-aes-du4096-ccree", 1268 .blocksize = 1, 1269 .template_skcipher = { 1270 .setkey = cc_cipher_setkey, 1271 .encrypt = cc_cipher_encrypt, 1272 .decrypt = cc_cipher_decrypt, 1273 .min_keysize = AES_MIN_KEY_SIZE * 2, 1274 .max_keysize = AES_MAX_KEY_SIZE * 2, 1275 .ivsize = AES_BLOCK_SIZE, 1276 }, 1277 .cipher_mode = DRV_CIPHER_XTS, 1278 .flow_mode = S_DIN_to_AES, 1279 .data_unit = 4096, 1280 .min_hw_rev = CC_HW_REV_712, 1281 .std_body = CC_STD_NIST, 1282 }, 1283 { 1284 .name = "essiv(aes)", 1285 .driver_name = "essiv-aes-ccree", 1286 .blocksize = AES_BLOCK_SIZE, 1287 .template_skcipher = { 1288 .setkey = cc_cipher_setkey, 1289 .encrypt = cc_cipher_encrypt, 1290 .decrypt = cc_cipher_decrypt, 1291 .min_keysize = AES_MIN_KEY_SIZE * 2, 1292 .max_keysize = AES_MAX_KEY_SIZE * 2, 1293 .ivsize = AES_BLOCK_SIZE, 1294 }, 1295 .cipher_mode = DRV_CIPHER_ESSIV, 1296 .flow_mode = S_DIN_to_AES, 1297 .min_hw_rev = CC_HW_REV_712, 1298 .std_body = CC_STD_NIST, 1299 }, 1300 { 1301 .name = "essiv512(aes)", 1302 .driver_name = "essiv-aes-du512-ccree", 1303 .blocksize = AES_BLOCK_SIZE, 1304 .template_skcipher = { 1305 .setkey = cc_cipher_setkey, 1306 .encrypt = cc_cipher_encrypt, 1307 .decrypt = cc_cipher_decrypt, 1308 .min_keysize = AES_MIN_KEY_SIZE * 2, 1309 .max_keysize = AES_MAX_KEY_SIZE * 2, 1310 .ivsize = AES_BLOCK_SIZE, 1311 }, 1312 .cipher_mode = DRV_CIPHER_ESSIV, 1313 .flow_mode = S_DIN_to_AES, 1314 .data_unit = 512, 1315 .min_hw_rev = CC_HW_REV_712, 1316 .std_body = CC_STD_NIST, 1317 }, 1318 { 1319 .name = "essiv4096(aes)", 1320 .driver_name = "essiv-aes-du4096-ccree", 1321 .blocksize = AES_BLOCK_SIZE, 1322 .template_skcipher = { 1323 .setkey = cc_cipher_setkey, 1324 .encrypt = cc_cipher_encrypt, 1325 .decrypt = cc_cipher_decrypt, 1326 .min_keysize = AES_MIN_KEY_SIZE * 2, 1327 .max_keysize = AES_MAX_KEY_SIZE * 2, 1328 .ivsize = AES_BLOCK_SIZE, 1329 }, 1330 .cipher_mode = DRV_CIPHER_ESSIV, 1331 .flow_mode = S_DIN_to_AES, 1332 .data_unit = 4096, 1333 .min_hw_rev = CC_HW_REV_712, 1334 .std_body = CC_STD_NIST, 1335 }, 1336 { 1337 .name = "bitlocker(aes)", 1338 .driver_name = "bitlocker-aes-ccree", 1339 .blocksize = AES_BLOCK_SIZE, 1340 .template_skcipher = { 1341 .setkey = cc_cipher_setkey, 1342 .encrypt = cc_cipher_encrypt, 1343 .decrypt = cc_cipher_decrypt, 1344 .min_keysize = AES_MIN_KEY_SIZE * 2, 1345 .max_keysize = AES_MAX_KEY_SIZE * 2, 1346 .ivsize = AES_BLOCK_SIZE, 1347 }, 1348 .cipher_mode = DRV_CIPHER_BITLOCKER, 1349 .flow_mode = S_DIN_to_AES, 1350 .min_hw_rev = CC_HW_REV_712, 1351 .std_body = CC_STD_NIST, 1352 }, 1353 { 1354 .name = "bitlocker512(aes)", 1355 .driver_name = "bitlocker-aes-du512-ccree", 1356 .blocksize = AES_BLOCK_SIZE, 1357 .template_skcipher = { 1358 .setkey = cc_cipher_setkey, 1359 .encrypt = cc_cipher_encrypt, 1360 .decrypt = cc_cipher_decrypt, 1361 .min_keysize = AES_MIN_KEY_SIZE * 2, 1362 .max_keysize = AES_MAX_KEY_SIZE * 2, 1363 .ivsize = AES_BLOCK_SIZE, 1364 }, 1365 .cipher_mode = DRV_CIPHER_BITLOCKER, 1366 .flow_mode = S_DIN_to_AES, 1367 .data_unit = 512, 1368 .min_hw_rev = CC_HW_REV_712, 1369 .std_body = CC_STD_NIST, 1370 }, 1371 { 1372 .name = "bitlocker4096(aes)", 1373 .driver_name = "bitlocker-aes-du4096-ccree", 1374 .blocksize = AES_BLOCK_SIZE, 1375 .template_skcipher = { 1376 .setkey = cc_cipher_setkey, 1377 .encrypt = cc_cipher_encrypt, 1378 .decrypt = cc_cipher_decrypt, 1379 .min_keysize = AES_MIN_KEY_SIZE * 2, 1380 .max_keysize = AES_MAX_KEY_SIZE * 2, 1381 .ivsize = AES_BLOCK_SIZE, 1382 }, 1383 .cipher_mode = DRV_CIPHER_BITLOCKER, 1384 .flow_mode = S_DIN_to_AES, 1385 .data_unit = 4096, 1386 .min_hw_rev = CC_HW_REV_712, 1387 .std_body = CC_STD_NIST, 1388 }, 1389 { 1390 .name = "ecb(aes)", 1391 .driver_name = "ecb-aes-ccree", 1392 .blocksize = AES_BLOCK_SIZE, 1393 .template_skcipher = { 1394 .setkey = cc_cipher_setkey, 1395 .encrypt = cc_cipher_encrypt, 1396 .decrypt = cc_cipher_decrypt, 1397 .min_keysize = AES_MIN_KEY_SIZE, 1398 .max_keysize = AES_MAX_KEY_SIZE, 1399 .ivsize = 0, 1400 }, 1401 .cipher_mode = DRV_CIPHER_ECB, 1402 .flow_mode = S_DIN_to_AES, 1403 .min_hw_rev = CC_HW_REV_630, 1404 .std_body = CC_STD_NIST, 1405 }, 1406 { 1407 .name = "cbc(aes)", 1408 .driver_name = "cbc-aes-ccree", 1409 .blocksize = AES_BLOCK_SIZE, 1410 .template_skcipher = { 1411 .setkey = cc_cipher_setkey, 1412 .encrypt = cc_cipher_encrypt, 1413 .decrypt = cc_cipher_decrypt, 1414 .min_keysize = AES_MIN_KEY_SIZE, 1415 .max_keysize = AES_MAX_KEY_SIZE, 1416 .ivsize = AES_BLOCK_SIZE, 1417 }, 1418 .cipher_mode = DRV_CIPHER_CBC, 1419 .flow_mode = S_DIN_to_AES, 1420 .min_hw_rev = CC_HW_REV_630, 1421 .std_body = CC_STD_NIST, 1422 }, 1423 { 1424 .name = "ofb(aes)", 1425 .driver_name = "ofb-aes-ccree", 1426 .blocksize = AES_BLOCK_SIZE, 1427 .template_skcipher = { 1428 .setkey = cc_cipher_setkey, 1429 .encrypt = cc_cipher_encrypt, 1430 .decrypt = cc_cipher_decrypt, 1431 .min_keysize = AES_MIN_KEY_SIZE, 1432 .max_keysize = AES_MAX_KEY_SIZE, 1433 .ivsize = AES_BLOCK_SIZE, 1434 }, 1435 .cipher_mode = DRV_CIPHER_OFB, 1436 .flow_mode = S_DIN_to_AES, 1437 .min_hw_rev = CC_HW_REV_630, 1438 .std_body = CC_STD_NIST, 1439 }, 1440 { 1441 .name = "cts(cbc(aes))", 1442 .driver_name = "cts-cbc-aes-ccree", 1443 .blocksize = AES_BLOCK_SIZE, 1444 .template_skcipher = { 1445 .setkey = cc_cipher_setkey, 1446 .encrypt = cc_cipher_encrypt, 1447 .decrypt = cc_cipher_decrypt, 1448 .min_keysize = AES_MIN_KEY_SIZE, 1449 .max_keysize = AES_MAX_KEY_SIZE, 1450 .ivsize = AES_BLOCK_SIZE, 1451 }, 1452 .cipher_mode = DRV_CIPHER_CBC_CTS, 1453 .flow_mode = S_DIN_to_AES, 1454 .min_hw_rev = CC_HW_REV_630, 1455 .std_body = CC_STD_NIST, 1456 }, 1457 { 1458 .name = "ctr(aes)", 1459 .driver_name = "ctr-aes-ccree", 1460 .blocksize = 1, 1461 .template_skcipher = { 1462 .setkey = cc_cipher_setkey, 1463 .encrypt = cc_cipher_encrypt, 1464 .decrypt = cc_cipher_decrypt, 1465 .min_keysize = AES_MIN_KEY_SIZE, 1466 .max_keysize = AES_MAX_KEY_SIZE, 1467 .ivsize = AES_BLOCK_SIZE, 1468 }, 1469 .cipher_mode = DRV_CIPHER_CTR, 1470 .flow_mode = S_DIN_to_AES, 1471 .min_hw_rev = CC_HW_REV_630, 1472 .std_body = CC_STD_NIST, 1473 }, 1474 { 1475 .name = "cbc(des3_ede)", 1476 .driver_name = "cbc-3des-ccree", 1477 .blocksize = DES3_EDE_BLOCK_SIZE, 1478 .template_skcipher = { 1479 .setkey = cc_cipher_setkey, 1480 .encrypt = cc_cipher_encrypt, 1481 .decrypt = cc_cipher_decrypt, 1482 .min_keysize = DES3_EDE_KEY_SIZE, 1483 .max_keysize = DES3_EDE_KEY_SIZE, 1484 .ivsize = DES3_EDE_BLOCK_SIZE, 1485 }, 1486 .cipher_mode = DRV_CIPHER_CBC, 1487 .flow_mode = S_DIN_to_DES, 1488 .min_hw_rev = CC_HW_REV_630, 1489 .std_body = CC_STD_NIST, 1490 }, 1491 { 1492 .name = "ecb(des3_ede)", 1493 .driver_name = "ecb-3des-ccree", 1494 .blocksize = DES3_EDE_BLOCK_SIZE, 1495 .template_skcipher = { 1496 .setkey = cc_cipher_setkey, 1497 .encrypt = cc_cipher_encrypt, 1498 .decrypt = cc_cipher_decrypt, 1499 .min_keysize = DES3_EDE_KEY_SIZE, 1500 .max_keysize = DES3_EDE_KEY_SIZE, 1501 .ivsize = 0, 1502 }, 1503 .cipher_mode = DRV_CIPHER_ECB, 1504 .flow_mode = S_DIN_to_DES, 1505 .min_hw_rev = CC_HW_REV_630, 1506 .std_body = CC_STD_NIST, 1507 }, 1508 { 1509 .name = "cbc(des)", 1510 .driver_name = "cbc-des-ccree", 1511 .blocksize = DES_BLOCK_SIZE, 1512 .template_skcipher = { 1513 .setkey = cc_cipher_setkey, 1514 .encrypt = cc_cipher_encrypt, 1515 .decrypt = cc_cipher_decrypt, 1516 .min_keysize = DES_KEY_SIZE, 1517 .max_keysize = DES_KEY_SIZE, 1518 .ivsize = DES_BLOCK_SIZE, 1519 }, 1520 .cipher_mode = DRV_CIPHER_CBC, 1521 .flow_mode = S_DIN_to_DES, 1522 .min_hw_rev = CC_HW_REV_630, 1523 .std_body = CC_STD_NIST, 1524 }, 1525 { 1526 .name = "ecb(des)", 1527 .driver_name = "ecb-des-ccree", 1528 .blocksize = DES_BLOCK_SIZE, 1529 .template_skcipher = { 1530 .setkey = cc_cipher_setkey, 1531 .encrypt = cc_cipher_encrypt, 1532 .decrypt = cc_cipher_decrypt, 1533 .min_keysize = DES_KEY_SIZE, 1534 .max_keysize = DES_KEY_SIZE, 1535 .ivsize = 0, 1536 }, 1537 .cipher_mode = DRV_CIPHER_ECB, 1538 .flow_mode = S_DIN_to_DES, 1539 .min_hw_rev = CC_HW_REV_630, 1540 .std_body = CC_STD_NIST, 1541 }, 1542 { 1543 .name = "cbc(sm4)", 1544 .driver_name = "cbc-sm4-ccree", 1545 .blocksize = SM4_BLOCK_SIZE, 1546 .template_skcipher = { 1547 .setkey = cc_cipher_setkey, 1548 .encrypt = cc_cipher_encrypt, 1549 .decrypt = cc_cipher_decrypt, 1550 .min_keysize = SM4_KEY_SIZE, 1551 .max_keysize = SM4_KEY_SIZE, 1552 .ivsize = SM4_BLOCK_SIZE, 1553 }, 1554 .cipher_mode = DRV_CIPHER_CBC, 1555 .flow_mode = S_DIN_to_SM4, 1556 .min_hw_rev = CC_HW_REV_713, 1557 .std_body = CC_STD_OSCCA, 1558 }, 1559 { 1560 .name = "ecb(sm4)", 1561 .driver_name = "ecb-sm4-ccree", 1562 .blocksize = SM4_BLOCK_SIZE, 1563 .template_skcipher = { 1564 .setkey = cc_cipher_setkey, 1565 .encrypt = cc_cipher_encrypt, 1566 .decrypt = cc_cipher_decrypt, 1567 .min_keysize = SM4_KEY_SIZE, 1568 .max_keysize = SM4_KEY_SIZE, 1569 .ivsize = 0, 1570 }, 1571 .cipher_mode = DRV_CIPHER_ECB, 1572 .flow_mode = S_DIN_to_SM4, 1573 .min_hw_rev = CC_HW_REV_713, 1574 .std_body = CC_STD_OSCCA, 1575 }, 1576 { 1577 .name = "ctr(sm4)", 1578 .driver_name = "ctr-sm4-ccree", 1579 .blocksize = SM4_BLOCK_SIZE, 1580 .template_skcipher = { 1581 .setkey = cc_cipher_setkey, 1582 .encrypt = cc_cipher_encrypt, 1583 .decrypt = cc_cipher_decrypt, 1584 .min_keysize = SM4_KEY_SIZE, 1585 .max_keysize = SM4_KEY_SIZE, 1586 .ivsize = SM4_BLOCK_SIZE, 1587 }, 1588 .cipher_mode = DRV_CIPHER_CTR, 1589 .flow_mode = S_DIN_to_SM4, 1590 .min_hw_rev = CC_HW_REV_713, 1591 .std_body = CC_STD_OSCCA, 1592 }, 1593 { 1594 .name = "cbc(psm4)", 1595 .driver_name = "cbc-psm4-ccree", 1596 .blocksize = SM4_BLOCK_SIZE, 1597 .template_skcipher = { 1598 .setkey = cc_cipher_sethkey, 1599 .encrypt = cc_cipher_encrypt, 1600 .decrypt = cc_cipher_decrypt, 1601 .min_keysize = CC_HW_KEY_SIZE, 1602 .max_keysize = CC_HW_KEY_SIZE, 1603 .ivsize = SM4_BLOCK_SIZE, 1604 }, 1605 .cipher_mode = DRV_CIPHER_CBC, 1606 .flow_mode = S_DIN_to_SM4, 1607 .min_hw_rev = CC_HW_REV_713, 1608 .std_body = CC_STD_OSCCA, 1609 .sec_func = true, 1610 }, 1611 { 1612 .name = "ctr(psm4)", 1613 .driver_name = "ctr-psm4-ccree", 1614 .blocksize = SM4_BLOCK_SIZE, 1615 .template_skcipher = { 1616 .setkey = cc_cipher_sethkey, 1617 .encrypt = cc_cipher_encrypt, 1618 .decrypt = cc_cipher_decrypt, 1619 .min_keysize = CC_HW_KEY_SIZE, 1620 .max_keysize = CC_HW_KEY_SIZE, 1621 .ivsize = SM4_BLOCK_SIZE, 1622 }, 1623 .cipher_mode = DRV_CIPHER_CTR, 1624 .flow_mode = S_DIN_to_SM4, 1625 .min_hw_rev = CC_HW_REV_713, 1626 .std_body = CC_STD_OSCCA, 1627 .sec_func = true, 1628 }, 1629 }; 1630 1631 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1632 struct device *dev) 1633 { 1634 struct cc_crypto_alg *t_alg; 1635 struct skcipher_alg *alg; 1636 1637 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 1638 if (!t_alg) 1639 return ERR_PTR(-ENOMEM); 1640 1641 alg = &t_alg->skcipher_alg; 1642 1643 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1644 1645 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1646 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1647 tmpl->driver_name); 1648 alg->base.cra_module = THIS_MODULE; 1649 alg->base.cra_priority = CC_CRA_PRIO; 1650 alg->base.cra_blocksize = tmpl->blocksize; 1651 alg->base.cra_alignmask = 0; 1652 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1653 1654 alg->base.cra_init = cc_cipher_init; 1655 alg->base.cra_exit = cc_cipher_exit; 1656 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 1657 1658 t_alg->cipher_mode = tmpl->cipher_mode; 1659 t_alg->flow_mode = tmpl->flow_mode; 1660 t_alg->data_unit = tmpl->data_unit; 1661 1662 return t_alg; 1663 } 1664 1665 int cc_cipher_free(struct cc_drvdata *drvdata) 1666 { 1667 struct cc_crypto_alg *t_alg, *n; 1668 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; 1669 1670 if (cipher_handle) { 1671 /* Remove registered algs */ 1672 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, 1673 entry) { 1674 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1675 list_del(&t_alg->entry); 1676 kfree(t_alg); 1677 } 1678 kfree(cipher_handle); 1679 drvdata->cipher_handle = NULL; 1680 } 1681 return 0; 1682 } 1683 1684 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1685 { 1686 struct cc_cipher_handle *cipher_handle; 1687 struct cc_crypto_alg *t_alg; 1688 struct device *dev = drvdata_to_dev(drvdata); 1689 int rc = -ENOMEM; 1690 int alg; 1691 1692 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); 1693 if (!cipher_handle) 1694 return -ENOMEM; 1695 1696 INIT_LIST_HEAD(&cipher_handle->alg_list); 1697 drvdata->cipher_handle = cipher_handle; 1698 1699 /* Linux crypto */ 1700 dev_dbg(dev, "Number of algorithms = %zu\n", 1701 ARRAY_SIZE(skcipher_algs)); 1702 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1703 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || 1704 !(drvdata->std_bodies & skcipher_algs[alg].std_body) || 1705 (drvdata->sec_disabled && skcipher_algs[alg].sec_func)) 1706 continue; 1707 1708 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1709 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1710 if (IS_ERR(t_alg)) { 1711 rc = PTR_ERR(t_alg); 1712 dev_err(dev, "%s alg allocation failed\n", 1713 skcipher_algs[alg].driver_name); 1714 goto fail0; 1715 } 1716 t_alg->drvdata = drvdata; 1717 1718 dev_dbg(dev, "registering %s\n", 1719 skcipher_algs[alg].driver_name); 1720 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1721 dev_dbg(dev, "%s alg registration rc = %x\n", 1722 t_alg->skcipher_alg.base.cra_driver_name, rc); 1723 if (rc) { 1724 dev_err(dev, "%s alg registration failed\n", 1725 t_alg->skcipher_alg.base.cra_driver_name); 1726 kfree(t_alg); 1727 goto fail0; 1728 } else { 1729 list_add_tail(&t_alg->entry, 1730 &cipher_handle->alg_list); 1731 dev_dbg(dev, "Registered %s\n", 1732 t_alg->skcipher_alg.base.cra_driver_name); 1733 } 1734 } 1735 return 0; 1736 1737 fail0: 1738 cc_cipher_free(drvdata); 1739 return rc; 1740 } 1741