1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/sm4.h> 11 #include <crypto/scatterwalk.h> 12 13 #include "cc_driver.h" 14 #include "cc_lli_defs.h" 15 #include "cc_buffer_mgr.h" 16 #include "cc_cipher.h" 17 #include "cc_request_mgr.h" 18 19 #define MAX_ABLKCIPHER_SEQ_LEN 6 20 21 #define template_skcipher template_u.skcipher 22 23 struct cc_cipher_handle { 24 struct list_head alg_list; 25 }; 26 27 struct cc_user_key_info { 28 u8 *key; 29 dma_addr_t key_dma_addr; 30 }; 31 32 struct cc_hw_key_info { 33 enum cc_hw_crypto_key key1_slot; 34 enum cc_hw_crypto_key key2_slot; 35 }; 36 37 struct cc_cipher_ctx { 38 struct cc_drvdata *drvdata; 39 int keylen; 40 int key_round_number; 41 int cipher_mode; 42 int flow_mode; 43 unsigned int flags; 44 bool hw_key; 45 struct cc_user_key_info user; 46 struct cc_hw_key_info hw; 47 struct crypto_shash *shash_tfm; 48 }; 49 50 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 51 52 static inline bool cc_is_hw_key(struct crypto_tfm *tfm) 53 { 54 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 55 56 return ctx_p->hw_key; 57 } 58 59 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 60 { 61 switch (ctx_p->flow_mode) { 62 case S_DIN_to_AES: 63 switch (size) { 64 case CC_AES_128_BIT_KEY_SIZE: 65 case CC_AES_192_BIT_KEY_SIZE: 66 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 67 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && 68 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 69 return 0; 70 break; 71 case CC_AES_256_BIT_KEY_SIZE: 72 return 0; 73 case (CC_AES_192_BIT_KEY_SIZE * 2): 74 case (CC_AES_256_BIT_KEY_SIZE * 2): 75 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 76 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 77 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 78 return 0; 79 break; 80 default: 81 break; 82 } 83 break; 84 case S_DIN_to_DES: 85 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 86 return 0; 87 break; 88 case S_DIN_to_SM4: 89 if (size == SM4_KEY_SIZE) 90 return 0; 91 default: 92 break; 93 } 94 return -EINVAL; 95 } 96 97 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 98 unsigned int size) 99 { 100 switch (ctx_p->flow_mode) { 101 case S_DIN_to_AES: 102 switch (ctx_p->cipher_mode) { 103 case DRV_CIPHER_XTS: 104 if (size >= AES_BLOCK_SIZE && 105 IS_ALIGNED(size, AES_BLOCK_SIZE)) 106 return 0; 107 break; 108 case DRV_CIPHER_CBC_CTS: 109 if (size >= AES_BLOCK_SIZE) 110 return 0; 111 break; 112 case DRV_CIPHER_OFB: 113 case DRV_CIPHER_CTR: 114 return 0; 115 case DRV_CIPHER_ECB: 116 case DRV_CIPHER_CBC: 117 case DRV_CIPHER_ESSIV: 118 case DRV_CIPHER_BITLOCKER: 119 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 120 return 0; 121 break; 122 default: 123 break; 124 } 125 break; 126 case S_DIN_to_DES: 127 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 128 return 0; 129 break; 130 case S_DIN_to_SM4: 131 switch (ctx_p->cipher_mode) { 132 case DRV_CIPHER_CTR: 133 return 0; 134 case DRV_CIPHER_ECB: 135 case DRV_CIPHER_CBC: 136 if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) 137 return 0; 138 default: 139 break; 140 } 141 default: 142 break; 143 } 144 return -EINVAL; 145 } 146 147 static int cc_cipher_init(struct crypto_tfm *tfm) 148 { 149 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 150 struct cc_crypto_alg *cc_alg = 151 container_of(tfm->__crt_alg, struct cc_crypto_alg, 152 skcipher_alg.base); 153 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 154 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 155 int rc = 0; 156 157 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 158 crypto_tfm_alg_name(tfm)); 159 160 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 161 sizeof(struct cipher_req_ctx)); 162 163 ctx_p->cipher_mode = cc_alg->cipher_mode; 164 ctx_p->flow_mode = cc_alg->flow_mode; 165 ctx_p->drvdata = cc_alg->drvdata; 166 167 /* Allocate key buffer, cache line aligned */ 168 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); 169 if (!ctx_p->user.key) 170 return -ENOMEM; 171 172 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 173 ctx_p->user.key); 174 175 /* Map key buffer */ 176 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, 177 max_key_buf_size, 178 DMA_TO_DEVICE); 179 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 180 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 181 max_key_buf_size, ctx_p->user.key); 182 return -ENOMEM; 183 } 184 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 185 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 186 187 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 188 /* Alloc hash tfm for essiv */ 189 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); 190 if (IS_ERR(ctx_p->shash_tfm)) { 191 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 192 return PTR_ERR(ctx_p->shash_tfm); 193 } 194 } 195 196 return rc; 197 } 198 199 static void cc_cipher_exit(struct crypto_tfm *tfm) 200 { 201 struct crypto_alg *alg = tfm->__crt_alg; 202 struct cc_crypto_alg *cc_alg = 203 container_of(alg, struct cc_crypto_alg, 204 skcipher_alg.base); 205 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 206 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 207 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 208 209 dev_dbg(dev, "Clearing context @%p for %s\n", 210 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 211 212 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 213 /* Free hash tfm for essiv */ 214 crypto_free_shash(ctx_p->shash_tfm); 215 ctx_p->shash_tfm = NULL; 216 } 217 218 /* Unmap key buffer */ 219 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 220 DMA_TO_DEVICE); 221 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 222 &ctx_p->user.key_dma_addr); 223 224 /* Free key buffer in context */ 225 kzfree(ctx_p->user.key); 226 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 227 } 228 229 struct tdes_keys { 230 u8 key1[DES_KEY_SIZE]; 231 u8 key2[DES_KEY_SIZE]; 232 u8 key3[DES_KEY_SIZE]; 233 }; 234 235 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num) 236 { 237 switch (slot_num) { 238 case 0: 239 return KFDE0_KEY; 240 case 1: 241 return KFDE1_KEY; 242 case 2: 243 return KFDE2_KEY; 244 case 3: 245 return KFDE3_KEY; 246 } 247 return END_OF_KEYS; 248 } 249 250 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 251 unsigned int keylen) 252 { 253 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 254 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 255 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 256 struct cc_hkey_info hki; 257 258 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 259 ctx_p, crypto_tfm_alg_name(tfm), keylen); 260 dump_byte_array("key", (u8 *)key, keylen); 261 262 /* STAT_PHASE_0: Init and sanity checks */ 263 264 /* This check the size of the hardware key token */ 265 if (keylen != sizeof(hki)) { 266 dev_err(dev, "Unsupported HW key size %d.\n", keylen); 267 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 268 return -EINVAL; 269 } 270 271 if (ctx_p->flow_mode != S_DIN_to_AES) { 272 dev_err(dev, "HW key not supported for non-AES flows\n"); 273 return -EINVAL; 274 } 275 276 memcpy(&hki, key, keylen); 277 278 /* The real key len for crypto op is the size of the HW key 279 * referenced by the HW key slot, not the hardware key token 280 */ 281 keylen = hki.keylen; 282 283 if (validate_keys_sizes(ctx_p, keylen)) { 284 dev_err(dev, "Unsupported key size %d.\n", keylen); 285 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 286 return -EINVAL; 287 } 288 289 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 290 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 291 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1); 292 return -EINVAL; 293 } 294 295 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 296 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 297 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 298 if (hki.hw_key1 == hki.hw_key2) { 299 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 300 hki.hw_key1, hki.hw_key2); 301 return -EINVAL; 302 } 303 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 304 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 305 dev_err(dev, "Unsupported hw key2 number (%d)\n", 306 hki.hw_key2); 307 return -EINVAL; 308 } 309 } 310 311 ctx_p->keylen = keylen; 312 ctx_p->hw_key = true; 313 dev_dbg(dev, "cc_is_hw_key ret 0"); 314 315 return 0; 316 } 317 318 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 319 unsigned int keylen) 320 { 321 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 322 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 323 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 324 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 325 struct cc_crypto_alg *cc_alg = 326 container_of(tfm->__crt_alg, struct cc_crypto_alg, 327 skcipher_alg.base); 328 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 329 330 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 331 ctx_p, crypto_tfm_alg_name(tfm), keylen); 332 dump_byte_array("key", (u8 *)key, keylen); 333 334 /* STAT_PHASE_0: Init and sanity checks */ 335 336 if (validate_keys_sizes(ctx_p, keylen)) { 337 dev_err(dev, "Unsupported key size %d.\n", keylen); 338 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 339 return -EINVAL; 340 } 341 342 ctx_p->hw_key = false; 343 344 /* 345 * Verify DES weak keys 346 * Note that we're dropping the expanded key since the 347 * HW does the expansion on its own. 348 */ 349 if (ctx_p->flow_mode == S_DIN_to_DES) { 350 if (keylen == DES3_EDE_KEY_SIZE && 351 __des3_ede_setkey(tmp, &tfm->crt_flags, key, 352 DES3_EDE_KEY_SIZE)) { 353 dev_dbg(dev, "weak 3DES key"); 354 return -EINVAL; 355 } else if (!des_ekey(tmp, key) && 356 (crypto_tfm_get_flags(tfm) & 357 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 358 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 359 dev_dbg(dev, "weak DES key"); 360 return -EINVAL; 361 } 362 } 363 364 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 365 xts_check_key(tfm, key, keylen)) { 366 dev_dbg(dev, "weak XTS key"); 367 return -EINVAL; 368 } 369 370 /* STAT_PHASE_1: Copy key to ctx */ 371 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 372 max_key_buf_size, DMA_TO_DEVICE); 373 374 memcpy(ctx_p->user.key, key, keylen); 375 if (keylen == 24) 376 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); 377 378 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 379 /* sha256 for key2 - use sw implementation */ 380 int key_len = keylen >> 1; 381 int err; 382 383 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); 384 385 desc->tfm = ctx_p->shash_tfm; 386 387 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, 388 ctx_p->user.key + key_len); 389 if (err) { 390 dev_err(dev, "Failed to hash ESSIV key.\n"); 391 return err; 392 } 393 } 394 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 395 max_key_buf_size, DMA_TO_DEVICE); 396 ctx_p->keylen = keylen; 397 398 dev_dbg(dev, "return safely"); 399 return 0; 400 } 401 402 static void cc_setup_cipher_desc(struct crypto_tfm *tfm, 403 struct cipher_req_ctx *req_ctx, 404 unsigned int ivsize, unsigned int nbytes, 405 struct cc_hw_desc desc[], 406 unsigned int *seq_size) 407 { 408 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 409 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 410 int cipher_mode = ctx_p->cipher_mode; 411 int flow_mode = ctx_p->flow_mode; 412 int direction = req_ctx->gen_ctx.op_type; 413 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 414 unsigned int key_len = ctx_p->keylen; 415 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 416 unsigned int du_size = nbytes; 417 418 struct cc_crypto_alg *cc_alg = 419 container_of(tfm->__crt_alg, struct cc_crypto_alg, 420 skcipher_alg.base); 421 422 if (cc_alg->data_unit) 423 du_size = cc_alg->data_unit; 424 425 switch (cipher_mode) { 426 case DRV_CIPHER_CBC: 427 case DRV_CIPHER_CBC_CTS: 428 case DRV_CIPHER_CTR: 429 case DRV_CIPHER_OFB: 430 /* Load cipher state */ 431 hw_desc_init(&desc[*seq_size]); 432 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 433 NS_BIT); 434 set_cipher_config0(&desc[*seq_size], direction); 435 set_flow_mode(&desc[*seq_size], flow_mode); 436 set_cipher_mode(&desc[*seq_size], cipher_mode); 437 if (cipher_mode == DRV_CIPHER_CTR || 438 cipher_mode == DRV_CIPHER_OFB) { 439 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 440 } else { 441 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 442 } 443 (*seq_size)++; 444 /*FALLTHROUGH*/ 445 case DRV_CIPHER_ECB: 446 /* Load key */ 447 hw_desc_init(&desc[*seq_size]); 448 set_cipher_mode(&desc[*seq_size], cipher_mode); 449 set_cipher_config0(&desc[*seq_size], direction); 450 if (flow_mode == S_DIN_to_AES) { 451 if (cc_is_hw_key(tfm)) { 452 set_hw_crypto_key(&desc[*seq_size], 453 ctx_p->hw.key1_slot); 454 } else { 455 set_din_type(&desc[*seq_size], DMA_DLLI, 456 key_dma_addr, ((key_len == 24) ? 457 AES_MAX_KEY_SIZE : 458 key_len), NS_BIT); 459 } 460 set_key_size_aes(&desc[*seq_size], key_len); 461 } else { 462 /*des*/ 463 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 464 key_len, NS_BIT); 465 set_key_size_des(&desc[*seq_size], key_len); 466 } 467 set_flow_mode(&desc[*seq_size], flow_mode); 468 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 469 (*seq_size)++; 470 break; 471 case DRV_CIPHER_XTS: 472 case DRV_CIPHER_ESSIV: 473 case DRV_CIPHER_BITLOCKER: 474 /* Load AES key */ 475 hw_desc_init(&desc[*seq_size]); 476 set_cipher_mode(&desc[*seq_size], cipher_mode); 477 set_cipher_config0(&desc[*seq_size], direction); 478 if (cc_is_hw_key(tfm)) { 479 set_hw_crypto_key(&desc[*seq_size], 480 ctx_p->hw.key1_slot); 481 } else { 482 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 483 (key_len / 2), NS_BIT); 484 } 485 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 486 set_flow_mode(&desc[*seq_size], flow_mode); 487 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 488 (*seq_size)++; 489 490 /* load XEX key */ 491 hw_desc_init(&desc[*seq_size]); 492 set_cipher_mode(&desc[*seq_size], cipher_mode); 493 set_cipher_config0(&desc[*seq_size], direction); 494 if (cc_is_hw_key(tfm)) { 495 set_hw_crypto_key(&desc[*seq_size], 496 ctx_p->hw.key2_slot); 497 } else { 498 set_din_type(&desc[*seq_size], DMA_DLLI, 499 (key_dma_addr + (key_len / 2)), 500 (key_len / 2), NS_BIT); 501 } 502 set_xex_data_unit_size(&desc[*seq_size], du_size); 503 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 504 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 505 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 506 (*seq_size)++; 507 508 /* Set state */ 509 hw_desc_init(&desc[*seq_size]); 510 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 511 set_cipher_mode(&desc[*seq_size], cipher_mode); 512 set_cipher_config0(&desc[*seq_size], direction); 513 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 514 set_flow_mode(&desc[*seq_size], flow_mode); 515 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 516 CC_AES_BLOCK_SIZE, NS_BIT); 517 (*seq_size)++; 518 break; 519 default: 520 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 521 } 522 } 523 524 static void cc_setup_cipher_data(struct crypto_tfm *tfm, 525 struct cipher_req_ctx *req_ctx, 526 struct scatterlist *dst, 527 struct scatterlist *src, unsigned int nbytes, 528 void *areq, struct cc_hw_desc desc[], 529 unsigned int *seq_size) 530 { 531 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 532 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 533 unsigned int flow_mode = ctx_p->flow_mode; 534 535 switch (ctx_p->flow_mode) { 536 case S_DIN_to_AES: 537 flow_mode = DIN_AES_DOUT; 538 break; 539 case S_DIN_to_DES: 540 flow_mode = DIN_DES_DOUT; 541 break; 542 case S_DIN_to_SM4: 543 flow_mode = DIN_SM4_DOUT; 544 break; 545 default: 546 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); 547 return; 548 } 549 /* Process */ 550 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 551 dev_dbg(dev, " data params addr %pad length 0x%X\n", 552 &sg_dma_address(src), nbytes); 553 dev_dbg(dev, " data params addr %pad length 0x%X\n", 554 &sg_dma_address(dst), nbytes); 555 hw_desc_init(&desc[*seq_size]); 556 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 557 nbytes, NS_BIT); 558 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 559 nbytes, NS_BIT, (!areq ? 0 : 1)); 560 if (areq) 561 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 562 563 set_flow_mode(&desc[*seq_size], flow_mode); 564 (*seq_size)++; 565 } else { 566 /* bypass */ 567 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 568 &req_ctx->mlli_params.mlli_dma_addr, 569 req_ctx->mlli_params.mlli_len, 570 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 571 hw_desc_init(&desc[*seq_size]); 572 set_din_type(&desc[*seq_size], DMA_DLLI, 573 req_ctx->mlli_params.mlli_dma_addr, 574 req_ctx->mlli_params.mlli_len, NS_BIT); 575 set_dout_sram(&desc[*seq_size], 576 ctx_p->drvdata->mlli_sram_addr, 577 req_ctx->mlli_params.mlli_len); 578 set_flow_mode(&desc[*seq_size], BYPASS); 579 (*seq_size)++; 580 581 hw_desc_init(&desc[*seq_size]); 582 set_din_type(&desc[*seq_size], DMA_MLLI, 583 ctx_p->drvdata->mlli_sram_addr, 584 req_ctx->in_mlli_nents, NS_BIT); 585 if (req_ctx->out_nents == 0) { 586 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 587 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 588 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 589 set_dout_mlli(&desc[*seq_size], 590 ctx_p->drvdata->mlli_sram_addr, 591 req_ctx->in_mlli_nents, NS_BIT, 592 (!areq ? 0 : 1)); 593 } else { 594 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 595 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 596 (unsigned int)ctx_p->drvdata->mlli_sram_addr + 597 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 598 set_dout_mlli(&desc[*seq_size], 599 (ctx_p->drvdata->mlli_sram_addr + 600 (LLI_ENTRY_BYTE_SIZE * 601 req_ctx->in_mlli_nents)), 602 req_ctx->out_mlli_nents, NS_BIT, 603 (!areq ? 0 : 1)); 604 } 605 if (areq) 606 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 607 608 set_flow_mode(&desc[*seq_size], flow_mode); 609 (*seq_size)++; 610 } 611 } 612 613 /* 614 * Update a CTR-AES 128 bit counter 615 */ 616 static void cc_update_ctr(u8 *ctr, unsigned int increment) 617 { 618 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 619 IS_ALIGNED((unsigned long)ctr, 8)) { 620 621 __be64 *high_be = (__be64 *)ctr; 622 __be64 *low_be = high_be + 1; 623 u64 orig_low = __be64_to_cpu(*low_be); 624 u64 new_low = orig_low + (u64)increment; 625 626 *low_be = __cpu_to_be64(new_low); 627 628 if (new_low < orig_low) 629 *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1); 630 } else { 631 u8 *pos = (ctr + AES_BLOCK_SIZE); 632 u8 val; 633 unsigned int size; 634 635 for (; increment; increment--) 636 for (size = AES_BLOCK_SIZE; size; size--) { 637 val = *--pos + 1; 638 *pos = val; 639 if (val) 640 break; 641 } 642 } 643 } 644 645 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 646 { 647 struct skcipher_request *req = (struct skcipher_request *)cc_req; 648 struct scatterlist *dst = req->dst; 649 struct scatterlist *src = req->src; 650 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 651 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 652 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 653 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 654 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 655 unsigned int len; 656 657 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 658 659 switch (ctx_p->cipher_mode) { 660 case DRV_CIPHER_CBC: 661 /* 662 * The crypto API expects us to set the req->iv to the last 663 * ciphertext block. For encrypt, simply copy from the result. 664 * For decrypt, we must copy from a saved buffer since this 665 * could be an in-place decryption operation and the src is 666 * lost by this point. 667 */ 668 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { 669 memcpy(req->iv, req_ctx->backup_info, ivsize); 670 kzfree(req_ctx->backup_info); 671 } else if (!err) { 672 len = req->cryptlen - ivsize; 673 scatterwalk_map_and_copy(req->iv, req->dst, len, 674 ivsize, 0); 675 } 676 break; 677 678 case DRV_CIPHER_CTR: 679 /* Compute the counter of the last block */ 680 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE; 681 cc_update_ctr((u8 *)req->iv, len); 682 break; 683 684 default: 685 break; 686 } 687 688 kzfree(req_ctx->iv); 689 690 skcipher_request_complete(req, err); 691 } 692 693 static int cc_cipher_process(struct skcipher_request *req, 694 enum drv_crypto_direction direction) 695 { 696 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 697 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 698 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 699 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 700 struct scatterlist *dst = req->dst; 701 struct scatterlist *src = req->src; 702 unsigned int nbytes = req->cryptlen; 703 void *iv = req->iv; 704 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 705 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 706 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN]; 707 struct cc_crypto_req cc_req = {}; 708 int rc; 709 unsigned int seq_len = 0; 710 gfp_t flags = cc_gfp_flags(&req->base); 711 712 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 713 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 714 "Encrypt" : "Decrypt"), req, iv, nbytes); 715 716 /* STAT_PHASE_0: Init and sanity checks */ 717 718 /* TODO: check data length according to mode */ 719 if (validate_data_size(ctx_p, nbytes)) { 720 dev_err(dev, "Unsupported data size %d.\n", nbytes); 721 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN); 722 rc = -EINVAL; 723 goto exit_process; 724 } 725 if (nbytes == 0) { 726 /* No data to process is valid */ 727 rc = 0; 728 goto exit_process; 729 } 730 731 /* The IV we are handed may be allocted from the stack so 732 * we must copy it to a DMAable buffer before use. 733 */ 734 req_ctx->iv = kmemdup(iv, ivsize, flags); 735 if (!req_ctx->iv) { 736 rc = -ENOMEM; 737 goto exit_process; 738 } 739 740 /* Setup request structure */ 741 cc_req.user_cb = (void *)cc_cipher_complete; 742 cc_req.user_arg = (void *)req; 743 744 /* Setup request context */ 745 req_ctx->gen_ctx.op_type = direction; 746 747 /* STAT_PHASE_1: Map buffers */ 748 749 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 750 req_ctx->iv, src, dst, flags); 751 if (rc) { 752 dev_err(dev, "map_request() failed\n"); 753 goto exit_process; 754 } 755 756 /* STAT_PHASE_2: Create sequence */ 757 758 /* Setup processing */ 759 cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 760 /* Data processing */ 761 cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc, 762 &seq_len); 763 764 /* STAT_PHASE_3: Lock HW and push sequence */ 765 766 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 767 &req->base); 768 if (rc != -EINPROGRESS && rc != -EBUSY) { 769 /* Failed to send the request or request completed 770 * synchronously 771 */ 772 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 773 } 774 775 exit_process: 776 if (rc != -EINPROGRESS && rc != -EBUSY) { 777 kzfree(req_ctx->backup_info); 778 kzfree(req_ctx->iv); 779 } 780 781 return rc; 782 } 783 784 static int cc_cipher_encrypt(struct skcipher_request *req) 785 { 786 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 787 788 memset(req_ctx, 0, sizeof(*req_ctx)); 789 790 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 791 } 792 793 static int cc_cipher_decrypt(struct skcipher_request *req) 794 { 795 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 796 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 797 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 798 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 799 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 800 gfp_t flags = cc_gfp_flags(&req->base); 801 unsigned int len; 802 803 memset(req_ctx, 0, sizeof(*req_ctx)); 804 805 if ((ctx_p->cipher_mode == DRV_CIPHER_CBC) && 806 (req->cryptlen >= ivsize)) { 807 808 /* Allocate and save the last IV sized bytes of the source, 809 * which will be lost in case of in-place decryption. 810 */ 811 req_ctx->backup_info = kzalloc(ivsize, flags); 812 if (!req_ctx->backup_info) 813 return -ENOMEM; 814 815 len = req->cryptlen - ivsize; 816 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len, 817 ivsize, 0); 818 } 819 820 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 821 } 822 823 /* Block cipher alg */ 824 static const struct cc_alg_template skcipher_algs[] = { 825 { 826 .name = "xts(paes)", 827 .driver_name = "xts-paes-ccree", 828 .blocksize = AES_BLOCK_SIZE, 829 .template_skcipher = { 830 .setkey = cc_cipher_sethkey, 831 .encrypt = cc_cipher_encrypt, 832 .decrypt = cc_cipher_decrypt, 833 .min_keysize = CC_HW_KEY_SIZE, 834 .max_keysize = CC_HW_KEY_SIZE, 835 .ivsize = AES_BLOCK_SIZE, 836 }, 837 .cipher_mode = DRV_CIPHER_XTS, 838 .flow_mode = S_DIN_to_AES, 839 .min_hw_rev = CC_HW_REV_630, 840 .std_body = CC_STD_NIST, 841 }, 842 { 843 .name = "xts512(paes)", 844 .driver_name = "xts-paes-du512-ccree", 845 .blocksize = AES_BLOCK_SIZE, 846 .template_skcipher = { 847 .setkey = cc_cipher_sethkey, 848 .encrypt = cc_cipher_encrypt, 849 .decrypt = cc_cipher_decrypt, 850 .min_keysize = CC_HW_KEY_SIZE, 851 .max_keysize = CC_HW_KEY_SIZE, 852 .ivsize = AES_BLOCK_SIZE, 853 }, 854 .cipher_mode = DRV_CIPHER_XTS, 855 .flow_mode = S_DIN_to_AES, 856 .data_unit = 512, 857 .min_hw_rev = CC_HW_REV_712, 858 .std_body = CC_STD_NIST, 859 }, 860 { 861 .name = "xts4096(paes)", 862 .driver_name = "xts-paes-du4096-ccree", 863 .blocksize = AES_BLOCK_SIZE, 864 .template_skcipher = { 865 .setkey = cc_cipher_sethkey, 866 .encrypt = cc_cipher_encrypt, 867 .decrypt = cc_cipher_decrypt, 868 .min_keysize = CC_HW_KEY_SIZE, 869 .max_keysize = CC_HW_KEY_SIZE, 870 .ivsize = AES_BLOCK_SIZE, 871 }, 872 .cipher_mode = DRV_CIPHER_XTS, 873 .flow_mode = S_DIN_to_AES, 874 .data_unit = 4096, 875 .min_hw_rev = CC_HW_REV_712, 876 .std_body = CC_STD_NIST, 877 }, 878 { 879 .name = "essiv(paes)", 880 .driver_name = "essiv-paes-ccree", 881 .blocksize = AES_BLOCK_SIZE, 882 .template_skcipher = { 883 .setkey = cc_cipher_sethkey, 884 .encrypt = cc_cipher_encrypt, 885 .decrypt = cc_cipher_decrypt, 886 .min_keysize = CC_HW_KEY_SIZE, 887 .max_keysize = CC_HW_KEY_SIZE, 888 .ivsize = AES_BLOCK_SIZE, 889 }, 890 .cipher_mode = DRV_CIPHER_ESSIV, 891 .flow_mode = S_DIN_to_AES, 892 .min_hw_rev = CC_HW_REV_712, 893 .std_body = CC_STD_NIST, 894 }, 895 { 896 .name = "essiv512(paes)", 897 .driver_name = "essiv-paes-du512-ccree", 898 .blocksize = AES_BLOCK_SIZE, 899 .template_skcipher = { 900 .setkey = cc_cipher_sethkey, 901 .encrypt = cc_cipher_encrypt, 902 .decrypt = cc_cipher_decrypt, 903 .min_keysize = CC_HW_KEY_SIZE, 904 .max_keysize = CC_HW_KEY_SIZE, 905 .ivsize = AES_BLOCK_SIZE, 906 }, 907 .cipher_mode = DRV_CIPHER_ESSIV, 908 .flow_mode = S_DIN_to_AES, 909 .data_unit = 512, 910 .min_hw_rev = CC_HW_REV_712, 911 .std_body = CC_STD_NIST, 912 }, 913 { 914 .name = "essiv4096(paes)", 915 .driver_name = "essiv-paes-du4096-ccree", 916 .blocksize = AES_BLOCK_SIZE, 917 .template_skcipher = { 918 .setkey = cc_cipher_sethkey, 919 .encrypt = cc_cipher_encrypt, 920 .decrypt = cc_cipher_decrypt, 921 .min_keysize = CC_HW_KEY_SIZE, 922 .max_keysize = CC_HW_KEY_SIZE, 923 .ivsize = AES_BLOCK_SIZE, 924 }, 925 .cipher_mode = DRV_CIPHER_ESSIV, 926 .flow_mode = S_DIN_to_AES, 927 .data_unit = 4096, 928 .min_hw_rev = CC_HW_REV_712, 929 .std_body = CC_STD_NIST, 930 }, 931 { 932 .name = "bitlocker(paes)", 933 .driver_name = "bitlocker-paes-ccree", 934 .blocksize = AES_BLOCK_SIZE, 935 .template_skcipher = { 936 .setkey = cc_cipher_sethkey, 937 .encrypt = cc_cipher_encrypt, 938 .decrypt = cc_cipher_decrypt, 939 .min_keysize = CC_HW_KEY_SIZE, 940 .max_keysize = CC_HW_KEY_SIZE, 941 .ivsize = AES_BLOCK_SIZE, 942 }, 943 .cipher_mode = DRV_CIPHER_BITLOCKER, 944 .flow_mode = S_DIN_to_AES, 945 .min_hw_rev = CC_HW_REV_712, 946 .std_body = CC_STD_NIST, 947 }, 948 { 949 .name = "bitlocker512(paes)", 950 .driver_name = "bitlocker-paes-du512-ccree", 951 .blocksize = AES_BLOCK_SIZE, 952 .template_skcipher = { 953 .setkey = cc_cipher_sethkey, 954 .encrypt = cc_cipher_encrypt, 955 .decrypt = cc_cipher_decrypt, 956 .min_keysize = CC_HW_KEY_SIZE, 957 .max_keysize = CC_HW_KEY_SIZE, 958 .ivsize = AES_BLOCK_SIZE, 959 }, 960 .cipher_mode = DRV_CIPHER_BITLOCKER, 961 .flow_mode = S_DIN_to_AES, 962 .data_unit = 512, 963 .min_hw_rev = CC_HW_REV_712, 964 .std_body = CC_STD_NIST, 965 }, 966 { 967 .name = "bitlocker4096(paes)", 968 .driver_name = "bitlocker-paes-du4096-ccree", 969 .blocksize = AES_BLOCK_SIZE, 970 .template_skcipher = { 971 .setkey = cc_cipher_sethkey, 972 .encrypt = cc_cipher_encrypt, 973 .decrypt = cc_cipher_decrypt, 974 .min_keysize = CC_HW_KEY_SIZE, 975 .max_keysize = CC_HW_KEY_SIZE, 976 .ivsize = AES_BLOCK_SIZE, 977 }, 978 .cipher_mode = DRV_CIPHER_BITLOCKER, 979 .flow_mode = S_DIN_to_AES, 980 .data_unit = 4096, 981 .min_hw_rev = CC_HW_REV_712, 982 .std_body = CC_STD_NIST, 983 }, 984 { 985 .name = "ecb(paes)", 986 .driver_name = "ecb-paes-ccree", 987 .blocksize = AES_BLOCK_SIZE, 988 .template_skcipher = { 989 .setkey = cc_cipher_sethkey, 990 .encrypt = cc_cipher_encrypt, 991 .decrypt = cc_cipher_decrypt, 992 .min_keysize = CC_HW_KEY_SIZE, 993 .max_keysize = CC_HW_KEY_SIZE, 994 .ivsize = 0, 995 }, 996 .cipher_mode = DRV_CIPHER_ECB, 997 .flow_mode = S_DIN_to_AES, 998 .min_hw_rev = CC_HW_REV_712, 999 .std_body = CC_STD_NIST, 1000 }, 1001 { 1002 .name = "cbc(paes)", 1003 .driver_name = "cbc-paes-ccree", 1004 .blocksize = AES_BLOCK_SIZE, 1005 .template_skcipher = { 1006 .setkey = cc_cipher_sethkey, 1007 .encrypt = cc_cipher_encrypt, 1008 .decrypt = cc_cipher_decrypt, 1009 .min_keysize = CC_HW_KEY_SIZE, 1010 .max_keysize = CC_HW_KEY_SIZE, 1011 .ivsize = AES_BLOCK_SIZE, 1012 }, 1013 .cipher_mode = DRV_CIPHER_CBC, 1014 .flow_mode = S_DIN_to_AES, 1015 .min_hw_rev = CC_HW_REV_712, 1016 .std_body = CC_STD_NIST, 1017 }, 1018 { 1019 .name = "ofb(paes)", 1020 .driver_name = "ofb-paes-ccree", 1021 .blocksize = AES_BLOCK_SIZE, 1022 .template_skcipher = { 1023 .setkey = cc_cipher_sethkey, 1024 .encrypt = cc_cipher_encrypt, 1025 .decrypt = cc_cipher_decrypt, 1026 .min_keysize = CC_HW_KEY_SIZE, 1027 .max_keysize = CC_HW_KEY_SIZE, 1028 .ivsize = AES_BLOCK_SIZE, 1029 }, 1030 .cipher_mode = DRV_CIPHER_OFB, 1031 .flow_mode = S_DIN_to_AES, 1032 .min_hw_rev = CC_HW_REV_712, 1033 .std_body = CC_STD_NIST, 1034 }, 1035 { 1036 .name = "cts(cbc(paes))", 1037 .driver_name = "cts-cbc-paes-ccree", 1038 .blocksize = AES_BLOCK_SIZE, 1039 .template_skcipher = { 1040 .setkey = cc_cipher_sethkey, 1041 .encrypt = cc_cipher_encrypt, 1042 .decrypt = cc_cipher_decrypt, 1043 .min_keysize = CC_HW_KEY_SIZE, 1044 .max_keysize = CC_HW_KEY_SIZE, 1045 .ivsize = AES_BLOCK_SIZE, 1046 }, 1047 .cipher_mode = DRV_CIPHER_CBC_CTS, 1048 .flow_mode = S_DIN_to_AES, 1049 .min_hw_rev = CC_HW_REV_712, 1050 .std_body = CC_STD_NIST, 1051 }, 1052 { 1053 .name = "ctr(paes)", 1054 .driver_name = "ctr-paes-ccree", 1055 .blocksize = 1, 1056 .template_skcipher = { 1057 .setkey = cc_cipher_sethkey, 1058 .encrypt = cc_cipher_encrypt, 1059 .decrypt = cc_cipher_decrypt, 1060 .min_keysize = CC_HW_KEY_SIZE, 1061 .max_keysize = CC_HW_KEY_SIZE, 1062 .ivsize = AES_BLOCK_SIZE, 1063 }, 1064 .cipher_mode = DRV_CIPHER_CTR, 1065 .flow_mode = S_DIN_to_AES, 1066 .min_hw_rev = CC_HW_REV_712, 1067 .std_body = CC_STD_NIST, 1068 }, 1069 { 1070 .name = "xts(aes)", 1071 .driver_name = "xts-aes-ccree", 1072 .blocksize = AES_BLOCK_SIZE, 1073 .template_skcipher = { 1074 .setkey = cc_cipher_setkey, 1075 .encrypt = cc_cipher_encrypt, 1076 .decrypt = cc_cipher_decrypt, 1077 .min_keysize = AES_MIN_KEY_SIZE * 2, 1078 .max_keysize = AES_MAX_KEY_SIZE * 2, 1079 .ivsize = AES_BLOCK_SIZE, 1080 }, 1081 .cipher_mode = DRV_CIPHER_XTS, 1082 .flow_mode = S_DIN_to_AES, 1083 .min_hw_rev = CC_HW_REV_630, 1084 .std_body = CC_STD_NIST, 1085 }, 1086 { 1087 .name = "xts512(aes)", 1088 .driver_name = "xts-aes-du512-ccree", 1089 .blocksize = AES_BLOCK_SIZE, 1090 .template_skcipher = { 1091 .setkey = cc_cipher_setkey, 1092 .encrypt = cc_cipher_encrypt, 1093 .decrypt = cc_cipher_decrypt, 1094 .min_keysize = AES_MIN_KEY_SIZE * 2, 1095 .max_keysize = AES_MAX_KEY_SIZE * 2, 1096 .ivsize = AES_BLOCK_SIZE, 1097 }, 1098 .cipher_mode = DRV_CIPHER_XTS, 1099 .flow_mode = S_DIN_to_AES, 1100 .data_unit = 512, 1101 .min_hw_rev = CC_HW_REV_712, 1102 .std_body = CC_STD_NIST, 1103 }, 1104 { 1105 .name = "xts4096(aes)", 1106 .driver_name = "xts-aes-du4096-ccree", 1107 .blocksize = AES_BLOCK_SIZE, 1108 .template_skcipher = { 1109 .setkey = cc_cipher_setkey, 1110 .encrypt = cc_cipher_encrypt, 1111 .decrypt = cc_cipher_decrypt, 1112 .min_keysize = AES_MIN_KEY_SIZE * 2, 1113 .max_keysize = AES_MAX_KEY_SIZE * 2, 1114 .ivsize = AES_BLOCK_SIZE, 1115 }, 1116 .cipher_mode = DRV_CIPHER_XTS, 1117 .flow_mode = S_DIN_to_AES, 1118 .data_unit = 4096, 1119 .min_hw_rev = CC_HW_REV_712, 1120 .std_body = CC_STD_NIST, 1121 }, 1122 { 1123 .name = "essiv(aes)", 1124 .driver_name = "essiv-aes-ccree", 1125 .blocksize = AES_BLOCK_SIZE, 1126 .template_skcipher = { 1127 .setkey = cc_cipher_setkey, 1128 .encrypt = cc_cipher_encrypt, 1129 .decrypt = cc_cipher_decrypt, 1130 .min_keysize = AES_MIN_KEY_SIZE * 2, 1131 .max_keysize = AES_MAX_KEY_SIZE * 2, 1132 .ivsize = AES_BLOCK_SIZE, 1133 }, 1134 .cipher_mode = DRV_CIPHER_ESSIV, 1135 .flow_mode = S_DIN_to_AES, 1136 .min_hw_rev = CC_HW_REV_712, 1137 .std_body = CC_STD_NIST, 1138 }, 1139 { 1140 .name = "essiv512(aes)", 1141 .driver_name = "essiv-aes-du512-ccree", 1142 .blocksize = AES_BLOCK_SIZE, 1143 .template_skcipher = { 1144 .setkey = cc_cipher_setkey, 1145 .encrypt = cc_cipher_encrypt, 1146 .decrypt = cc_cipher_decrypt, 1147 .min_keysize = AES_MIN_KEY_SIZE * 2, 1148 .max_keysize = AES_MAX_KEY_SIZE * 2, 1149 .ivsize = AES_BLOCK_SIZE, 1150 }, 1151 .cipher_mode = DRV_CIPHER_ESSIV, 1152 .flow_mode = S_DIN_to_AES, 1153 .data_unit = 512, 1154 .min_hw_rev = CC_HW_REV_712, 1155 .std_body = CC_STD_NIST, 1156 }, 1157 { 1158 .name = "essiv4096(aes)", 1159 .driver_name = "essiv-aes-du4096-ccree", 1160 .blocksize = AES_BLOCK_SIZE, 1161 .template_skcipher = { 1162 .setkey = cc_cipher_setkey, 1163 .encrypt = cc_cipher_encrypt, 1164 .decrypt = cc_cipher_decrypt, 1165 .min_keysize = AES_MIN_KEY_SIZE * 2, 1166 .max_keysize = AES_MAX_KEY_SIZE * 2, 1167 .ivsize = AES_BLOCK_SIZE, 1168 }, 1169 .cipher_mode = DRV_CIPHER_ESSIV, 1170 .flow_mode = S_DIN_to_AES, 1171 .data_unit = 4096, 1172 .min_hw_rev = CC_HW_REV_712, 1173 .std_body = CC_STD_NIST, 1174 }, 1175 { 1176 .name = "bitlocker(aes)", 1177 .driver_name = "bitlocker-aes-ccree", 1178 .blocksize = AES_BLOCK_SIZE, 1179 .template_skcipher = { 1180 .setkey = cc_cipher_setkey, 1181 .encrypt = cc_cipher_encrypt, 1182 .decrypt = cc_cipher_decrypt, 1183 .min_keysize = AES_MIN_KEY_SIZE * 2, 1184 .max_keysize = AES_MAX_KEY_SIZE * 2, 1185 .ivsize = AES_BLOCK_SIZE, 1186 }, 1187 .cipher_mode = DRV_CIPHER_BITLOCKER, 1188 .flow_mode = S_DIN_to_AES, 1189 .min_hw_rev = CC_HW_REV_712, 1190 .std_body = CC_STD_NIST, 1191 }, 1192 { 1193 .name = "bitlocker512(aes)", 1194 .driver_name = "bitlocker-aes-du512-ccree", 1195 .blocksize = AES_BLOCK_SIZE, 1196 .template_skcipher = { 1197 .setkey = cc_cipher_setkey, 1198 .encrypt = cc_cipher_encrypt, 1199 .decrypt = cc_cipher_decrypt, 1200 .min_keysize = AES_MIN_KEY_SIZE * 2, 1201 .max_keysize = AES_MAX_KEY_SIZE * 2, 1202 .ivsize = AES_BLOCK_SIZE, 1203 }, 1204 .cipher_mode = DRV_CIPHER_BITLOCKER, 1205 .flow_mode = S_DIN_to_AES, 1206 .data_unit = 512, 1207 .min_hw_rev = CC_HW_REV_712, 1208 .std_body = CC_STD_NIST, 1209 }, 1210 { 1211 .name = "bitlocker4096(aes)", 1212 .driver_name = "bitlocker-aes-du4096-ccree", 1213 .blocksize = AES_BLOCK_SIZE, 1214 .template_skcipher = { 1215 .setkey = cc_cipher_setkey, 1216 .encrypt = cc_cipher_encrypt, 1217 .decrypt = cc_cipher_decrypt, 1218 .min_keysize = AES_MIN_KEY_SIZE * 2, 1219 .max_keysize = AES_MAX_KEY_SIZE * 2, 1220 .ivsize = AES_BLOCK_SIZE, 1221 }, 1222 .cipher_mode = DRV_CIPHER_BITLOCKER, 1223 .flow_mode = S_DIN_to_AES, 1224 .data_unit = 4096, 1225 .min_hw_rev = CC_HW_REV_712, 1226 .std_body = CC_STD_NIST, 1227 }, 1228 { 1229 .name = "ecb(aes)", 1230 .driver_name = "ecb-aes-ccree", 1231 .blocksize = AES_BLOCK_SIZE, 1232 .template_skcipher = { 1233 .setkey = cc_cipher_setkey, 1234 .encrypt = cc_cipher_encrypt, 1235 .decrypt = cc_cipher_decrypt, 1236 .min_keysize = AES_MIN_KEY_SIZE, 1237 .max_keysize = AES_MAX_KEY_SIZE, 1238 .ivsize = 0, 1239 }, 1240 .cipher_mode = DRV_CIPHER_ECB, 1241 .flow_mode = S_DIN_to_AES, 1242 .min_hw_rev = CC_HW_REV_630, 1243 .std_body = CC_STD_NIST, 1244 }, 1245 { 1246 .name = "cbc(aes)", 1247 .driver_name = "cbc-aes-ccree", 1248 .blocksize = AES_BLOCK_SIZE, 1249 .template_skcipher = { 1250 .setkey = cc_cipher_setkey, 1251 .encrypt = cc_cipher_encrypt, 1252 .decrypt = cc_cipher_decrypt, 1253 .min_keysize = AES_MIN_KEY_SIZE, 1254 .max_keysize = AES_MAX_KEY_SIZE, 1255 .ivsize = AES_BLOCK_SIZE, 1256 }, 1257 .cipher_mode = DRV_CIPHER_CBC, 1258 .flow_mode = S_DIN_to_AES, 1259 .min_hw_rev = CC_HW_REV_630, 1260 .std_body = CC_STD_NIST, 1261 }, 1262 { 1263 .name = "ofb(aes)", 1264 .driver_name = "ofb-aes-ccree", 1265 .blocksize = AES_BLOCK_SIZE, 1266 .template_skcipher = { 1267 .setkey = cc_cipher_setkey, 1268 .encrypt = cc_cipher_encrypt, 1269 .decrypt = cc_cipher_decrypt, 1270 .min_keysize = AES_MIN_KEY_SIZE, 1271 .max_keysize = AES_MAX_KEY_SIZE, 1272 .ivsize = AES_BLOCK_SIZE, 1273 }, 1274 .cipher_mode = DRV_CIPHER_OFB, 1275 .flow_mode = S_DIN_to_AES, 1276 .min_hw_rev = CC_HW_REV_630, 1277 .std_body = CC_STD_NIST, 1278 }, 1279 { 1280 .name = "cts(cbc(aes))", 1281 .driver_name = "cts-cbc-aes-ccree", 1282 .blocksize = AES_BLOCK_SIZE, 1283 .template_skcipher = { 1284 .setkey = cc_cipher_setkey, 1285 .encrypt = cc_cipher_encrypt, 1286 .decrypt = cc_cipher_decrypt, 1287 .min_keysize = AES_MIN_KEY_SIZE, 1288 .max_keysize = AES_MAX_KEY_SIZE, 1289 .ivsize = AES_BLOCK_SIZE, 1290 }, 1291 .cipher_mode = DRV_CIPHER_CBC_CTS, 1292 .flow_mode = S_DIN_to_AES, 1293 .min_hw_rev = CC_HW_REV_630, 1294 .std_body = CC_STD_NIST, 1295 }, 1296 { 1297 .name = "ctr(aes)", 1298 .driver_name = "ctr-aes-ccree", 1299 .blocksize = 1, 1300 .template_skcipher = { 1301 .setkey = cc_cipher_setkey, 1302 .encrypt = cc_cipher_encrypt, 1303 .decrypt = cc_cipher_decrypt, 1304 .min_keysize = AES_MIN_KEY_SIZE, 1305 .max_keysize = AES_MAX_KEY_SIZE, 1306 .ivsize = AES_BLOCK_SIZE, 1307 }, 1308 .cipher_mode = DRV_CIPHER_CTR, 1309 .flow_mode = S_DIN_to_AES, 1310 .min_hw_rev = CC_HW_REV_630, 1311 .std_body = CC_STD_NIST, 1312 }, 1313 { 1314 .name = "cbc(des3_ede)", 1315 .driver_name = "cbc-3des-ccree", 1316 .blocksize = DES3_EDE_BLOCK_SIZE, 1317 .template_skcipher = { 1318 .setkey = cc_cipher_setkey, 1319 .encrypt = cc_cipher_encrypt, 1320 .decrypt = cc_cipher_decrypt, 1321 .min_keysize = DES3_EDE_KEY_SIZE, 1322 .max_keysize = DES3_EDE_KEY_SIZE, 1323 .ivsize = DES3_EDE_BLOCK_SIZE, 1324 }, 1325 .cipher_mode = DRV_CIPHER_CBC, 1326 .flow_mode = S_DIN_to_DES, 1327 .min_hw_rev = CC_HW_REV_630, 1328 .std_body = CC_STD_NIST, 1329 }, 1330 { 1331 .name = "ecb(des3_ede)", 1332 .driver_name = "ecb-3des-ccree", 1333 .blocksize = DES3_EDE_BLOCK_SIZE, 1334 .template_skcipher = { 1335 .setkey = cc_cipher_setkey, 1336 .encrypt = cc_cipher_encrypt, 1337 .decrypt = cc_cipher_decrypt, 1338 .min_keysize = DES3_EDE_KEY_SIZE, 1339 .max_keysize = DES3_EDE_KEY_SIZE, 1340 .ivsize = 0, 1341 }, 1342 .cipher_mode = DRV_CIPHER_ECB, 1343 .flow_mode = S_DIN_to_DES, 1344 .min_hw_rev = CC_HW_REV_630, 1345 .std_body = CC_STD_NIST, 1346 }, 1347 { 1348 .name = "cbc(des)", 1349 .driver_name = "cbc-des-ccree", 1350 .blocksize = DES_BLOCK_SIZE, 1351 .template_skcipher = { 1352 .setkey = cc_cipher_setkey, 1353 .encrypt = cc_cipher_encrypt, 1354 .decrypt = cc_cipher_decrypt, 1355 .min_keysize = DES_KEY_SIZE, 1356 .max_keysize = DES_KEY_SIZE, 1357 .ivsize = DES_BLOCK_SIZE, 1358 }, 1359 .cipher_mode = DRV_CIPHER_CBC, 1360 .flow_mode = S_DIN_to_DES, 1361 .min_hw_rev = CC_HW_REV_630, 1362 .std_body = CC_STD_NIST, 1363 }, 1364 { 1365 .name = "ecb(des)", 1366 .driver_name = "ecb-des-ccree", 1367 .blocksize = DES_BLOCK_SIZE, 1368 .template_skcipher = { 1369 .setkey = cc_cipher_setkey, 1370 .encrypt = cc_cipher_encrypt, 1371 .decrypt = cc_cipher_decrypt, 1372 .min_keysize = DES_KEY_SIZE, 1373 .max_keysize = DES_KEY_SIZE, 1374 .ivsize = 0, 1375 }, 1376 .cipher_mode = DRV_CIPHER_ECB, 1377 .flow_mode = S_DIN_to_DES, 1378 .min_hw_rev = CC_HW_REV_630, 1379 .std_body = CC_STD_NIST, 1380 }, 1381 { 1382 .name = "cbc(sm4)", 1383 .driver_name = "cbc-sm4-ccree", 1384 .blocksize = SM4_BLOCK_SIZE, 1385 .template_skcipher = { 1386 .setkey = cc_cipher_setkey, 1387 .encrypt = cc_cipher_encrypt, 1388 .decrypt = cc_cipher_decrypt, 1389 .min_keysize = SM4_KEY_SIZE, 1390 .max_keysize = SM4_KEY_SIZE, 1391 .ivsize = SM4_BLOCK_SIZE, 1392 }, 1393 .cipher_mode = DRV_CIPHER_CBC, 1394 .flow_mode = S_DIN_to_SM4, 1395 .min_hw_rev = CC_HW_REV_713, 1396 .std_body = CC_STD_OSCCA, 1397 }, 1398 { 1399 .name = "ecb(sm4)", 1400 .driver_name = "ecb-sm4-ccree", 1401 .blocksize = SM4_BLOCK_SIZE, 1402 .template_skcipher = { 1403 .setkey = cc_cipher_setkey, 1404 .encrypt = cc_cipher_encrypt, 1405 .decrypt = cc_cipher_decrypt, 1406 .min_keysize = SM4_KEY_SIZE, 1407 .max_keysize = SM4_KEY_SIZE, 1408 .ivsize = 0, 1409 }, 1410 .cipher_mode = DRV_CIPHER_ECB, 1411 .flow_mode = S_DIN_to_SM4, 1412 .min_hw_rev = CC_HW_REV_713, 1413 .std_body = CC_STD_OSCCA, 1414 }, 1415 { 1416 .name = "ctr(sm4)", 1417 .driver_name = "ctr-sm4-ccree", 1418 .blocksize = SM4_BLOCK_SIZE, 1419 .template_skcipher = { 1420 .setkey = cc_cipher_setkey, 1421 .encrypt = cc_cipher_encrypt, 1422 .decrypt = cc_cipher_decrypt, 1423 .min_keysize = SM4_KEY_SIZE, 1424 .max_keysize = SM4_KEY_SIZE, 1425 .ivsize = SM4_BLOCK_SIZE, 1426 }, 1427 .cipher_mode = DRV_CIPHER_CTR, 1428 .flow_mode = S_DIN_to_SM4, 1429 .min_hw_rev = CC_HW_REV_713, 1430 .std_body = CC_STD_OSCCA, 1431 }, 1432 }; 1433 1434 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1435 struct device *dev) 1436 { 1437 struct cc_crypto_alg *t_alg; 1438 struct skcipher_alg *alg; 1439 1440 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 1441 if (!t_alg) 1442 return ERR_PTR(-ENOMEM); 1443 1444 alg = &t_alg->skcipher_alg; 1445 1446 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1447 1448 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1449 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1450 tmpl->driver_name); 1451 alg->base.cra_module = THIS_MODULE; 1452 alg->base.cra_priority = CC_CRA_PRIO; 1453 alg->base.cra_blocksize = tmpl->blocksize; 1454 alg->base.cra_alignmask = 0; 1455 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1456 1457 alg->base.cra_init = cc_cipher_init; 1458 alg->base.cra_exit = cc_cipher_exit; 1459 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 1460 1461 t_alg->cipher_mode = tmpl->cipher_mode; 1462 t_alg->flow_mode = tmpl->flow_mode; 1463 t_alg->data_unit = tmpl->data_unit; 1464 1465 return t_alg; 1466 } 1467 1468 int cc_cipher_free(struct cc_drvdata *drvdata) 1469 { 1470 struct cc_crypto_alg *t_alg, *n; 1471 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; 1472 1473 if (cipher_handle) { 1474 /* Remove registered algs */ 1475 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, 1476 entry) { 1477 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1478 list_del(&t_alg->entry); 1479 kfree(t_alg); 1480 } 1481 kfree(cipher_handle); 1482 drvdata->cipher_handle = NULL; 1483 } 1484 return 0; 1485 } 1486 1487 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1488 { 1489 struct cc_cipher_handle *cipher_handle; 1490 struct cc_crypto_alg *t_alg; 1491 struct device *dev = drvdata_to_dev(drvdata); 1492 int rc = -ENOMEM; 1493 int alg; 1494 1495 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); 1496 if (!cipher_handle) 1497 return -ENOMEM; 1498 1499 INIT_LIST_HEAD(&cipher_handle->alg_list); 1500 drvdata->cipher_handle = cipher_handle; 1501 1502 /* Linux crypto */ 1503 dev_dbg(dev, "Number of algorithms = %zu\n", 1504 ARRAY_SIZE(skcipher_algs)); 1505 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1506 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || 1507 !(drvdata->std_bodies & skcipher_algs[alg].std_body)) 1508 continue; 1509 1510 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1511 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1512 if (IS_ERR(t_alg)) { 1513 rc = PTR_ERR(t_alg); 1514 dev_err(dev, "%s alg allocation failed\n", 1515 skcipher_algs[alg].driver_name); 1516 goto fail0; 1517 } 1518 t_alg->drvdata = drvdata; 1519 1520 dev_dbg(dev, "registering %s\n", 1521 skcipher_algs[alg].driver_name); 1522 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1523 dev_dbg(dev, "%s alg registration rc = %x\n", 1524 t_alg->skcipher_alg.base.cra_driver_name, rc); 1525 if (rc) { 1526 dev_err(dev, "%s alg registration failed\n", 1527 t_alg->skcipher_alg.base.cra_driver_name); 1528 kfree(t_alg); 1529 goto fail0; 1530 } else { 1531 list_add_tail(&t_alg->entry, 1532 &cipher_handle->alg_list); 1533 dev_dbg(dev, "Registered %s\n", 1534 t_alg->skcipher_alg.base.cra_driver_name); 1535 } 1536 } 1537 return 0; 1538 1539 fail0: 1540 cc_cipher_free(drvdata); 1541 return rc; 1542 } 1543