1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/sm4.h> 11 #include <crypto/scatterwalk.h> 12 13 #include "cc_driver.h" 14 #include "cc_lli_defs.h" 15 #include "cc_buffer_mgr.h" 16 #include "cc_cipher.h" 17 #include "cc_request_mgr.h" 18 19 #define MAX_ABLKCIPHER_SEQ_LEN 6 20 21 #define template_skcipher template_u.skcipher 22 23 struct cc_cipher_handle { 24 struct list_head alg_list; 25 }; 26 27 struct cc_user_key_info { 28 u8 *key; 29 dma_addr_t key_dma_addr; 30 }; 31 32 struct cc_hw_key_info { 33 enum cc_hw_crypto_key key1_slot; 34 enum cc_hw_crypto_key key2_slot; 35 }; 36 37 struct cc_cipher_ctx { 38 struct cc_drvdata *drvdata; 39 int keylen; 40 int key_round_number; 41 int cipher_mode; 42 int flow_mode; 43 unsigned int flags; 44 bool hw_key; 45 struct cc_user_key_info user; 46 struct cc_hw_key_info hw; 47 struct crypto_shash *shash_tfm; 48 }; 49 50 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 51 52 static inline bool cc_is_hw_key(struct crypto_tfm *tfm) 53 { 54 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 55 56 return ctx_p->hw_key; 57 } 58 59 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 60 { 61 switch (ctx_p->flow_mode) { 62 case S_DIN_to_AES: 63 switch (size) { 64 case CC_AES_128_BIT_KEY_SIZE: 65 case CC_AES_192_BIT_KEY_SIZE: 66 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 67 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && 68 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 69 return 0; 70 break; 71 case CC_AES_256_BIT_KEY_SIZE: 72 return 0; 73 case (CC_AES_192_BIT_KEY_SIZE * 2): 74 case (CC_AES_256_BIT_KEY_SIZE * 2): 75 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 76 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 77 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 78 return 0; 79 break; 80 default: 81 break; 82 } 83 case S_DIN_to_DES: 84 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 85 return 0; 86 break; 87 case S_DIN_to_SM4: 88 if (size == SM4_KEY_SIZE) 89 return 0; 90 default: 91 break; 92 } 93 return -EINVAL; 94 } 95 96 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 97 unsigned int size) 98 { 99 switch (ctx_p->flow_mode) { 100 case S_DIN_to_AES: 101 switch (ctx_p->cipher_mode) { 102 case DRV_CIPHER_XTS: 103 if (size >= AES_BLOCK_SIZE && 104 IS_ALIGNED(size, AES_BLOCK_SIZE)) 105 return 0; 106 break; 107 case DRV_CIPHER_CBC_CTS: 108 if (size >= AES_BLOCK_SIZE) 109 return 0; 110 break; 111 case DRV_CIPHER_OFB: 112 case DRV_CIPHER_CTR: 113 return 0; 114 case DRV_CIPHER_ECB: 115 case DRV_CIPHER_CBC: 116 case DRV_CIPHER_ESSIV: 117 case DRV_CIPHER_BITLOCKER: 118 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 119 return 0; 120 break; 121 default: 122 break; 123 } 124 break; 125 case S_DIN_to_DES: 126 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 127 return 0; 128 break; 129 case S_DIN_to_SM4: 130 switch (ctx_p->cipher_mode) { 131 case DRV_CIPHER_CTR: 132 return 0; 133 case DRV_CIPHER_ECB: 134 case DRV_CIPHER_CBC: 135 if (IS_ALIGNED(size, SM4_BLOCK_SIZE)) 136 return 0; 137 default: 138 break; 139 } 140 default: 141 break; 142 } 143 return -EINVAL; 144 } 145 146 static int cc_cipher_init(struct crypto_tfm *tfm) 147 { 148 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 149 struct cc_crypto_alg *cc_alg = 150 container_of(tfm->__crt_alg, struct cc_crypto_alg, 151 skcipher_alg.base); 152 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 153 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 154 int rc = 0; 155 156 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 157 crypto_tfm_alg_name(tfm)); 158 159 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 160 sizeof(struct cipher_req_ctx)); 161 162 ctx_p->cipher_mode = cc_alg->cipher_mode; 163 ctx_p->flow_mode = cc_alg->flow_mode; 164 ctx_p->drvdata = cc_alg->drvdata; 165 166 /* Allocate key buffer, cache line aligned */ 167 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); 168 if (!ctx_p->user.key) 169 return -ENOMEM; 170 171 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 172 ctx_p->user.key); 173 174 /* Map key buffer */ 175 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, 176 max_key_buf_size, 177 DMA_TO_DEVICE); 178 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 179 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 180 max_key_buf_size, ctx_p->user.key); 181 return -ENOMEM; 182 } 183 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 184 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 185 186 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 187 /* Alloc hash tfm for essiv */ 188 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); 189 if (IS_ERR(ctx_p->shash_tfm)) { 190 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 191 return PTR_ERR(ctx_p->shash_tfm); 192 } 193 } 194 195 return rc; 196 } 197 198 static void cc_cipher_exit(struct crypto_tfm *tfm) 199 { 200 struct crypto_alg *alg = tfm->__crt_alg; 201 struct cc_crypto_alg *cc_alg = 202 container_of(alg, struct cc_crypto_alg, 203 skcipher_alg.base); 204 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 205 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 206 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 207 208 dev_dbg(dev, "Clearing context @%p for %s\n", 209 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 210 211 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 212 /* Free hash tfm for essiv */ 213 crypto_free_shash(ctx_p->shash_tfm); 214 ctx_p->shash_tfm = NULL; 215 } 216 217 /* Unmap key buffer */ 218 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 219 DMA_TO_DEVICE); 220 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 221 &ctx_p->user.key_dma_addr); 222 223 /* Free key buffer in context */ 224 kzfree(ctx_p->user.key); 225 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 226 } 227 228 struct tdes_keys { 229 u8 key1[DES_KEY_SIZE]; 230 u8 key2[DES_KEY_SIZE]; 231 u8 key3[DES_KEY_SIZE]; 232 }; 233 234 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num) 235 { 236 switch (slot_num) { 237 case 0: 238 return KFDE0_KEY; 239 case 1: 240 return KFDE1_KEY; 241 case 2: 242 return KFDE2_KEY; 243 case 3: 244 return KFDE3_KEY; 245 } 246 return END_OF_KEYS; 247 } 248 249 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 250 unsigned int keylen) 251 { 252 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 253 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 254 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 255 struct cc_hkey_info hki; 256 257 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 258 ctx_p, crypto_tfm_alg_name(tfm), keylen); 259 dump_byte_array("key", (u8 *)key, keylen); 260 261 /* STAT_PHASE_0: Init and sanity checks */ 262 263 /* This check the size of the hardware key token */ 264 if (keylen != sizeof(hki)) { 265 dev_err(dev, "Unsupported HW key size %d.\n", keylen); 266 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 267 return -EINVAL; 268 } 269 270 if (ctx_p->flow_mode != S_DIN_to_AES) { 271 dev_err(dev, "HW key not supported for non-AES flows\n"); 272 return -EINVAL; 273 } 274 275 memcpy(&hki, key, keylen); 276 277 /* The real key len for crypto op is the size of the HW key 278 * referenced by the HW key slot, not the hardware key token 279 */ 280 keylen = hki.keylen; 281 282 if (validate_keys_sizes(ctx_p, keylen)) { 283 dev_err(dev, "Unsupported key size %d.\n", keylen); 284 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 285 return -EINVAL; 286 } 287 288 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 289 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 290 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1); 291 return -EINVAL; 292 } 293 294 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 295 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 296 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 297 if (hki.hw_key1 == hki.hw_key2) { 298 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 299 hki.hw_key1, hki.hw_key2); 300 return -EINVAL; 301 } 302 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 303 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 304 dev_err(dev, "Unsupported hw key2 number (%d)\n", 305 hki.hw_key2); 306 return -EINVAL; 307 } 308 } 309 310 ctx_p->keylen = keylen; 311 ctx_p->hw_key = true; 312 dev_dbg(dev, "cc_is_hw_key ret 0"); 313 314 return 0; 315 } 316 317 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 318 unsigned int keylen) 319 { 320 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 321 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 322 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 323 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 324 struct cc_crypto_alg *cc_alg = 325 container_of(tfm->__crt_alg, struct cc_crypto_alg, 326 skcipher_alg.base); 327 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 328 329 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 330 ctx_p, crypto_tfm_alg_name(tfm), keylen); 331 dump_byte_array("key", (u8 *)key, keylen); 332 333 /* STAT_PHASE_0: Init and sanity checks */ 334 335 if (validate_keys_sizes(ctx_p, keylen)) { 336 dev_err(dev, "Unsupported key size %d.\n", keylen); 337 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 338 return -EINVAL; 339 } 340 341 ctx_p->hw_key = false; 342 343 /* 344 * Verify DES weak keys 345 * Note that we're dropping the expanded key since the 346 * HW does the expansion on its own. 347 */ 348 if (ctx_p->flow_mode == S_DIN_to_DES) { 349 if (keylen == DES3_EDE_KEY_SIZE && 350 __des3_ede_setkey(tmp, &tfm->crt_flags, key, 351 DES3_EDE_KEY_SIZE)) { 352 dev_dbg(dev, "weak 3DES key"); 353 return -EINVAL; 354 } else if (!des_ekey(tmp, key) && 355 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) { 356 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 357 dev_dbg(dev, "weak DES key"); 358 return -EINVAL; 359 } 360 } 361 362 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 363 xts_check_key(tfm, key, keylen)) { 364 dev_dbg(dev, "weak XTS key"); 365 return -EINVAL; 366 } 367 368 /* STAT_PHASE_1: Copy key to ctx */ 369 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 370 max_key_buf_size, DMA_TO_DEVICE); 371 372 memcpy(ctx_p->user.key, key, keylen); 373 if (keylen == 24) 374 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); 375 376 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 377 /* sha256 for key2 - use sw implementation */ 378 int key_len = keylen >> 1; 379 int err; 380 381 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); 382 383 desc->tfm = ctx_p->shash_tfm; 384 385 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, 386 ctx_p->user.key + key_len); 387 if (err) { 388 dev_err(dev, "Failed to hash ESSIV key.\n"); 389 return err; 390 } 391 } 392 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 393 max_key_buf_size, DMA_TO_DEVICE); 394 ctx_p->keylen = keylen; 395 396 dev_dbg(dev, "return safely"); 397 return 0; 398 } 399 400 static void cc_setup_cipher_desc(struct crypto_tfm *tfm, 401 struct cipher_req_ctx *req_ctx, 402 unsigned int ivsize, unsigned int nbytes, 403 struct cc_hw_desc desc[], 404 unsigned int *seq_size) 405 { 406 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 407 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 408 int cipher_mode = ctx_p->cipher_mode; 409 int flow_mode = ctx_p->flow_mode; 410 int direction = req_ctx->gen_ctx.op_type; 411 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 412 unsigned int key_len = ctx_p->keylen; 413 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 414 unsigned int du_size = nbytes; 415 416 struct cc_crypto_alg *cc_alg = 417 container_of(tfm->__crt_alg, struct cc_crypto_alg, 418 skcipher_alg.base); 419 420 if (cc_alg->data_unit) 421 du_size = cc_alg->data_unit; 422 423 switch (cipher_mode) { 424 case DRV_CIPHER_CBC: 425 case DRV_CIPHER_CBC_CTS: 426 case DRV_CIPHER_CTR: 427 case DRV_CIPHER_OFB: 428 /* Load cipher state */ 429 hw_desc_init(&desc[*seq_size]); 430 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 431 NS_BIT); 432 set_cipher_config0(&desc[*seq_size], direction); 433 set_flow_mode(&desc[*seq_size], flow_mode); 434 set_cipher_mode(&desc[*seq_size], cipher_mode); 435 if (cipher_mode == DRV_CIPHER_CTR || 436 cipher_mode == DRV_CIPHER_OFB) { 437 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 438 } else { 439 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 440 } 441 (*seq_size)++; 442 /*FALLTHROUGH*/ 443 case DRV_CIPHER_ECB: 444 /* Load key */ 445 hw_desc_init(&desc[*seq_size]); 446 set_cipher_mode(&desc[*seq_size], cipher_mode); 447 set_cipher_config0(&desc[*seq_size], direction); 448 if (flow_mode == S_DIN_to_AES) { 449 if (cc_is_hw_key(tfm)) { 450 set_hw_crypto_key(&desc[*seq_size], 451 ctx_p->hw.key1_slot); 452 } else { 453 set_din_type(&desc[*seq_size], DMA_DLLI, 454 key_dma_addr, ((key_len == 24) ? 455 AES_MAX_KEY_SIZE : 456 key_len), NS_BIT); 457 } 458 set_key_size_aes(&desc[*seq_size], key_len); 459 } else { 460 /*des*/ 461 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 462 key_len, NS_BIT); 463 set_key_size_des(&desc[*seq_size], key_len); 464 } 465 set_flow_mode(&desc[*seq_size], flow_mode); 466 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 467 (*seq_size)++; 468 break; 469 case DRV_CIPHER_XTS: 470 case DRV_CIPHER_ESSIV: 471 case DRV_CIPHER_BITLOCKER: 472 /* Load AES key */ 473 hw_desc_init(&desc[*seq_size]); 474 set_cipher_mode(&desc[*seq_size], cipher_mode); 475 set_cipher_config0(&desc[*seq_size], direction); 476 if (cc_is_hw_key(tfm)) { 477 set_hw_crypto_key(&desc[*seq_size], 478 ctx_p->hw.key1_slot); 479 } else { 480 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 481 (key_len / 2), NS_BIT); 482 } 483 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 484 set_flow_mode(&desc[*seq_size], flow_mode); 485 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 486 (*seq_size)++; 487 488 /* load XEX key */ 489 hw_desc_init(&desc[*seq_size]); 490 set_cipher_mode(&desc[*seq_size], cipher_mode); 491 set_cipher_config0(&desc[*seq_size], direction); 492 if (cc_is_hw_key(tfm)) { 493 set_hw_crypto_key(&desc[*seq_size], 494 ctx_p->hw.key2_slot); 495 } else { 496 set_din_type(&desc[*seq_size], DMA_DLLI, 497 (key_dma_addr + (key_len / 2)), 498 (key_len / 2), NS_BIT); 499 } 500 set_xex_data_unit_size(&desc[*seq_size], du_size); 501 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 502 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 503 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 504 (*seq_size)++; 505 506 /* Set state */ 507 hw_desc_init(&desc[*seq_size]); 508 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 509 set_cipher_mode(&desc[*seq_size], cipher_mode); 510 set_cipher_config0(&desc[*seq_size], direction); 511 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 512 set_flow_mode(&desc[*seq_size], flow_mode); 513 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 514 CC_AES_BLOCK_SIZE, NS_BIT); 515 (*seq_size)++; 516 break; 517 default: 518 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 519 } 520 } 521 522 static void cc_setup_cipher_data(struct crypto_tfm *tfm, 523 struct cipher_req_ctx *req_ctx, 524 struct scatterlist *dst, 525 struct scatterlist *src, unsigned int nbytes, 526 void *areq, struct cc_hw_desc desc[], 527 unsigned int *seq_size) 528 { 529 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 530 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 531 unsigned int flow_mode = ctx_p->flow_mode; 532 533 switch (ctx_p->flow_mode) { 534 case S_DIN_to_AES: 535 flow_mode = DIN_AES_DOUT; 536 break; 537 case S_DIN_to_DES: 538 flow_mode = DIN_DES_DOUT; 539 break; 540 case S_DIN_to_SM4: 541 flow_mode = DIN_SM4_DOUT; 542 break; 543 default: 544 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); 545 return; 546 } 547 /* Process */ 548 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 549 dev_dbg(dev, " data params addr %pad length 0x%X\n", 550 &sg_dma_address(src), nbytes); 551 dev_dbg(dev, " data params addr %pad length 0x%X\n", 552 &sg_dma_address(dst), nbytes); 553 hw_desc_init(&desc[*seq_size]); 554 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 555 nbytes, NS_BIT); 556 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 557 nbytes, NS_BIT, (!areq ? 0 : 1)); 558 if (areq) 559 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 560 561 set_flow_mode(&desc[*seq_size], flow_mode); 562 (*seq_size)++; 563 } else { 564 /* bypass */ 565 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 566 &req_ctx->mlli_params.mlli_dma_addr, 567 req_ctx->mlli_params.mlli_len, 568 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 569 hw_desc_init(&desc[*seq_size]); 570 set_din_type(&desc[*seq_size], DMA_DLLI, 571 req_ctx->mlli_params.mlli_dma_addr, 572 req_ctx->mlli_params.mlli_len, NS_BIT); 573 set_dout_sram(&desc[*seq_size], 574 ctx_p->drvdata->mlli_sram_addr, 575 req_ctx->mlli_params.mlli_len); 576 set_flow_mode(&desc[*seq_size], BYPASS); 577 (*seq_size)++; 578 579 hw_desc_init(&desc[*seq_size]); 580 set_din_type(&desc[*seq_size], DMA_MLLI, 581 ctx_p->drvdata->mlli_sram_addr, 582 req_ctx->in_mlli_nents, NS_BIT); 583 if (req_ctx->out_nents == 0) { 584 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 585 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 586 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 587 set_dout_mlli(&desc[*seq_size], 588 ctx_p->drvdata->mlli_sram_addr, 589 req_ctx->in_mlli_nents, NS_BIT, 590 (!areq ? 0 : 1)); 591 } else { 592 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 593 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 594 (unsigned int)ctx_p->drvdata->mlli_sram_addr + 595 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 596 set_dout_mlli(&desc[*seq_size], 597 (ctx_p->drvdata->mlli_sram_addr + 598 (LLI_ENTRY_BYTE_SIZE * 599 req_ctx->in_mlli_nents)), 600 req_ctx->out_mlli_nents, NS_BIT, 601 (!areq ? 0 : 1)); 602 } 603 if (areq) 604 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 605 606 set_flow_mode(&desc[*seq_size], flow_mode); 607 (*seq_size)++; 608 } 609 } 610 611 /* 612 * Update a CTR-AES 128 bit counter 613 */ 614 static void cc_update_ctr(u8 *ctr, unsigned int increment) 615 { 616 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 617 IS_ALIGNED((unsigned long)ctr, 8)) { 618 619 __be64 *high_be = (__be64 *)ctr; 620 __be64 *low_be = high_be + 1; 621 u64 orig_low = __be64_to_cpu(*low_be); 622 u64 new_low = orig_low + (u64)increment; 623 624 *low_be = __cpu_to_be64(new_low); 625 626 if (new_low < orig_low) 627 *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1); 628 } else { 629 u8 *pos = (ctr + AES_BLOCK_SIZE); 630 u8 val; 631 unsigned int size; 632 633 for (; increment; increment--) 634 for (size = AES_BLOCK_SIZE; size; size--) { 635 val = *--pos + 1; 636 *pos = val; 637 if (val) 638 break; 639 } 640 } 641 } 642 643 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 644 { 645 struct skcipher_request *req = (struct skcipher_request *)cc_req; 646 struct scatterlist *dst = req->dst; 647 struct scatterlist *src = req->src; 648 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 649 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 650 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 651 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 652 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 653 unsigned int len; 654 655 switch (ctx_p->cipher_mode) { 656 case DRV_CIPHER_CBC: 657 /* 658 * The crypto API expects us to set the req->iv to the last 659 * ciphertext block. For encrypt, simply copy from the result. 660 * For decrypt, we must copy from a saved buffer since this 661 * could be an in-place decryption operation and the src is 662 * lost by this point. 663 */ 664 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { 665 memcpy(req->iv, req_ctx->backup_info, ivsize); 666 kzfree(req_ctx->backup_info); 667 } else if (!err) { 668 len = req->cryptlen - ivsize; 669 scatterwalk_map_and_copy(req->iv, req->dst, len, 670 ivsize, 0); 671 } 672 break; 673 674 case DRV_CIPHER_CTR: 675 /* Compute the counter of the last block */ 676 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE; 677 cc_update_ctr((u8 *)req->iv, len); 678 break; 679 680 default: 681 break; 682 } 683 684 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 685 kzfree(req_ctx->iv); 686 687 skcipher_request_complete(req, err); 688 } 689 690 static int cc_cipher_process(struct skcipher_request *req, 691 enum drv_crypto_direction direction) 692 { 693 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 694 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 695 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 696 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 697 struct scatterlist *dst = req->dst; 698 struct scatterlist *src = req->src; 699 unsigned int nbytes = req->cryptlen; 700 void *iv = req->iv; 701 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 702 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 703 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN]; 704 struct cc_crypto_req cc_req = {}; 705 int rc; 706 unsigned int seq_len = 0; 707 gfp_t flags = cc_gfp_flags(&req->base); 708 709 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 710 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 711 "Encrypt" : "Decrypt"), req, iv, nbytes); 712 713 /* STAT_PHASE_0: Init and sanity checks */ 714 715 /* TODO: check data length according to mode */ 716 if (validate_data_size(ctx_p, nbytes)) { 717 dev_err(dev, "Unsupported data size %d.\n", nbytes); 718 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN); 719 rc = -EINVAL; 720 goto exit_process; 721 } 722 if (nbytes == 0) { 723 /* No data to process is valid */ 724 rc = 0; 725 goto exit_process; 726 } 727 728 /* The IV we are handed may be allocted from the stack so 729 * we must copy it to a DMAable buffer before use. 730 */ 731 req_ctx->iv = kmemdup(iv, ivsize, flags); 732 if (!req_ctx->iv) { 733 rc = -ENOMEM; 734 goto exit_process; 735 } 736 737 /* Setup request structure */ 738 cc_req.user_cb = (void *)cc_cipher_complete; 739 cc_req.user_arg = (void *)req; 740 741 /* Setup request context */ 742 req_ctx->gen_ctx.op_type = direction; 743 744 /* STAT_PHASE_1: Map buffers */ 745 746 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 747 req_ctx->iv, src, dst, flags); 748 if (rc) { 749 dev_err(dev, "map_request() failed\n"); 750 goto exit_process; 751 } 752 753 /* STAT_PHASE_2: Create sequence */ 754 755 /* Setup processing */ 756 cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 757 /* Data processing */ 758 cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc, 759 &seq_len); 760 761 /* STAT_PHASE_3: Lock HW and push sequence */ 762 763 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 764 &req->base); 765 if (rc != -EINPROGRESS && rc != -EBUSY) { 766 /* Failed to send the request or request completed 767 * synchronously 768 */ 769 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 770 } 771 772 exit_process: 773 if (rc != -EINPROGRESS && rc != -EBUSY) { 774 kzfree(req_ctx->backup_info); 775 kzfree(req_ctx->iv); 776 } 777 778 return rc; 779 } 780 781 static int cc_cipher_encrypt(struct skcipher_request *req) 782 { 783 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 784 785 memset(req_ctx, 0, sizeof(*req_ctx)); 786 787 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 788 } 789 790 static int cc_cipher_decrypt(struct skcipher_request *req) 791 { 792 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 793 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 794 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 795 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 796 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 797 gfp_t flags = cc_gfp_flags(&req->base); 798 unsigned int len; 799 800 memset(req_ctx, 0, sizeof(*req_ctx)); 801 802 if (ctx_p->cipher_mode == DRV_CIPHER_CBC) { 803 804 /* Allocate and save the last IV sized bytes of the source, 805 * which will be lost in case of in-place decryption. 806 */ 807 req_ctx->backup_info = kzalloc(ivsize, flags); 808 if (!req_ctx->backup_info) 809 return -ENOMEM; 810 811 len = req->cryptlen - ivsize; 812 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len, 813 ivsize, 0); 814 } 815 816 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 817 } 818 819 /* Block cipher alg */ 820 static const struct cc_alg_template skcipher_algs[] = { 821 { 822 .name = "xts(paes)", 823 .driver_name = "xts-paes-ccree", 824 .blocksize = AES_BLOCK_SIZE, 825 .template_skcipher = { 826 .setkey = cc_cipher_sethkey, 827 .encrypt = cc_cipher_encrypt, 828 .decrypt = cc_cipher_decrypt, 829 .min_keysize = CC_HW_KEY_SIZE, 830 .max_keysize = CC_HW_KEY_SIZE, 831 .ivsize = AES_BLOCK_SIZE, 832 }, 833 .cipher_mode = DRV_CIPHER_XTS, 834 .flow_mode = S_DIN_to_AES, 835 .min_hw_rev = CC_HW_REV_630, 836 .std_body = CC_STD_NIST, 837 }, 838 { 839 .name = "xts512(paes)", 840 .driver_name = "xts-paes-du512-ccree", 841 .blocksize = AES_BLOCK_SIZE, 842 .template_skcipher = { 843 .setkey = cc_cipher_sethkey, 844 .encrypt = cc_cipher_encrypt, 845 .decrypt = cc_cipher_decrypt, 846 .min_keysize = CC_HW_KEY_SIZE, 847 .max_keysize = CC_HW_KEY_SIZE, 848 .ivsize = AES_BLOCK_SIZE, 849 }, 850 .cipher_mode = DRV_CIPHER_XTS, 851 .flow_mode = S_DIN_to_AES, 852 .data_unit = 512, 853 .min_hw_rev = CC_HW_REV_712, 854 .std_body = CC_STD_NIST, 855 }, 856 { 857 .name = "xts4096(paes)", 858 .driver_name = "xts-paes-du4096-ccree", 859 .blocksize = AES_BLOCK_SIZE, 860 .template_skcipher = { 861 .setkey = cc_cipher_sethkey, 862 .encrypt = cc_cipher_encrypt, 863 .decrypt = cc_cipher_decrypt, 864 .min_keysize = CC_HW_KEY_SIZE, 865 .max_keysize = CC_HW_KEY_SIZE, 866 .ivsize = AES_BLOCK_SIZE, 867 }, 868 .cipher_mode = DRV_CIPHER_XTS, 869 .flow_mode = S_DIN_to_AES, 870 .data_unit = 4096, 871 .min_hw_rev = CC_HW_REV_712, 872 .std_body = CC_STD_NIST, 873 }, 874 { 875 .name = "essiv(paes)", 876 .driver_name = "essiv-paes-ccree", 877 .blocksize = AES_BLOCK_SIZE, 878 .template_skcipher = { 879 .setkey = cc_cipher_sethkey, 880 .encrypt = cc_cipher_encrypt, 881 .decrypt = cc_cipher_decrypt, 882 .min_keysize = CC_HW_KEY_SIZE, 883 .max_keysize = CC_HW_KEY_SIZE, 884 .ivsize = AES_BLOCK_SIZE, 885 }, 886 .cipher_mode = DRV_CIPHER_ESSIV, 887 .flow_mode = S_DIN_to_AES, 888 .min_hw_rev = CC_HW_REV_712, 889 .std_body = CC_STD_NIST, 890 }, 891 { 892 .name = "essiv512(paes)", 893 .driver_name = "essiv-paes-du512-ccree", 894 .blocksize = AES_BLOCK_SIZE, 895 .template_skcipher = { 896 .setkey = cc_cipher_sethkey, 897 .encrypt = cc_cipher_encrypt, 898 .decrypt = cc_cipher_decrypt, 899 .min_keysize = CC_HW_KEY_SIZE, 900 .max_keysize = CC_HW_KEY_SIZE, 901 .ivsize = AES_BLOCK_SIZE, 902 }, 903 .cipher_mode = DRV_CIPHER_ESSIV, 904 .flow_mode = S_DIN_to_AES, 905 .data_unit = 512, 906 .min_hw_rev = CC_HW_REV_712, 907 .std_body = CC_STD_NIST, 908 }, 909 { 910 .name = "essiv4096(paes)", 911 .driver_name = "essiv-paes-du4096-ccree", 912 .blocksize = AES_BLOCK_SIZE, 913 .template_skcipher = { 914 .setkey = cc_cipher_sethkey, 915 .encrypt = cc_cipher_encrypt, 916 .decrypt = cc_cipher_decrypt, 917 .min_keysize = CC_HW_KEY_SIZE, 918 .max_keysize = CC_HW_KEY_SIZE, 919 .ivsize = AES_BLOCK_SIZE, 920 }, 921 .cipher_mode = DRV_CIPHER_ESSIV, 922 .flow_mode = S_DIN_to_AES, 923 .data_unit = 4096, 924 .min_hw_rev = CC_HW_REV_712, 925 .std_body = CC_STD_NIST, 926 }, 927 { 928 .name = "bitlocker(paes)", 929 .driver_name = "bitlocker-paes-ccree", 930 .blocksize = AES_BLOCK_SIZE, 931 .template_skcipher = { 932 .setkey = cc_cipher_sethkey, 933 .encrypt = cc_cipher_encrypt, 934 .decrypt = cc_cipher_decrypt, 935 .min_keysize = CC_HW_KEY_SIZE, 936 .max_keysize = CC_HW_KEY_SIZE, 937 .ivsize = AES_BLOCK_SIZE, 938 }, 939 .cipher_mode = DRV_CIPHER_BITLOCKER, 940 .flow_mode = S_DIN_to_AES, 941 .min_hw_rev = CC_HW_REV_712, 942 .std_body = CC_STD_NIST, 943 }, 944 { 945 .name = "bitlocker512(paes)", 946 .driver_name = "bitlocker-paes-du512-ccree", 947 .blocksize = AES_BLOCK_SIZE, 948 .template_skcipher = { 949 .setkey = cc_cipher_sethkey, 950 .encrypt = cc_cipher_encrypt, 951 .decrypt = cc_cipher_decrypt, 952 .min_keysize = CC_HW_KEY_SIZE, 953 .max_keysize = CC_HW_KEY_SIZE, 954 .ivsize = AES_BLOCK_SIZE, 955 }, 956 .cipher_mode = DRV_CIPHER_BITLOCKER, 957 .flow_mode = S_DIN_to_AES, 958 .data_unit = 512, 959 .min_hw_rev = CC_HW_REV_712, 960 .std_body = CC_STD_NIST, 961 }, 962 { 963 .name = "bitlocker4096(paes)", 964 .driver_name = "bitlocker-paes-du4096-ccree", 965 .blocksize = AES_BLOCK_SIZE, 966 .template_skcipher = { 967 .setkey = cc_cipher_sethkey, 968 .encrypt = cc_cipher_encrypt, 969 .decrypt = cc_cipher_decrypt, 970 .min_keysize = CC_HW_KEY_SIZE, 971 .max_keysize = CC_HW_KEY_SIZE, 972 .ivsize = AES_BLOCK_SIZE, 973 }, 974 .cipher_mode = DRV_CIPHER_BITLOCKER, 975 .flow_mode = S_DIN_to_AES, 976 .data_unit = 4096, 977 .min_hw_rev = CC_HW_REV_712, 978 .std_body = CC_STD_NIST, 979 }, 980 { 981 .name = "ecb(paes)", 982 .driver_name = "ecb-paes-ccree", 983 .blocksize = AES_BLOCK_SIZE, 984 .template_skcipher = { 985 .setkey = cc_cipher_sethkey, 986 .encrypt = cc_cipher_encrypt, 987 .decrypt = cc_cipher_decrypt, 988 .min_keysize = CC_HW_KEY_SIZE, 989 .max_keysize = CC_HW_KEY_SIZE, 990 .ivsize = 0, 991 }, 992 .cipher_mode = DRV_CIPHER_ECB, 993 .flow_mode = S_DIN_to_AES, 994 .min_hw_rev = CC_HW_REV_712, 995 .std_body = CC_STD_NIST, 996 }, 997 { 998 .name = "cbc(paes)", 999 .driver_name = "cbc-paes-ccree", 1000 .blocksize = AES_BLOCK_SIZE, 1001 .template_skcipher = { 1002 .setkey = cc_cipher_sethkey, 1003 .encrypt = cc_cipher_encrypt, 1004 .decrypt = cc_cipher_decrypt, 1005 .min_keysize = CC_HW_KEY_SIZE, 1006 .max_keysize = CC_HW_KEY_SIZE, 1007 .ivsize = AES_BLOCK_SIZE, 1008 }, 1009 .cipher_mode = DRV_CIPHER_CBC, 1010 .flow_mode = S_DIN_to_AES, 1011 .min_hw_rev = CC_HW_REV_712, 1012 .std_body = CC_STD_NIST, 1013 }, 1014 { 1015 .name = "ofb(paes)", 1016 .driver_name = "ofb-paes-ccree", 1017 .blocksize = AES_BLOCK_SIZE, 1018 .template_skcipher = { 1019 .setkey = cc_cipher_sethkey, 1020 .encrypt = cc_cipher_encrypt, 1021 .decrypt = cc_cipher_decrypt, 1022 .min_keysize = CC_HW_KEY_SIZE, 1023 .max_keysize = CC_HW_KEY_SIZE, 1024 .ivsize = AES_BLOCK_SIZE, 1025 }, 1026 .cipher_mode = DRV_CIPHER_OFB, 1027 .flow_mode = S_DIN_to_AES, 1028 .min_hw_rev = CC_HW_REV_712, 1029 .std_body = CC_STD_NIST, 1030 }, 1031 { 1032 .name = "cts(cbc(paes))", 1033 .driver_name = "cts-cbc-paes-ccree", 1034 .blocksize = AES_BLOCK_SIZE, 1035 .template_skcipher = { 1036 .setkey = cc_cipher_sethkey, 1037 .encrypt = cc_cipher_encrypt, 1038 .decrypt = cc_cipher_decrypt, 1039 .min_keysize = CC_HW_KEY_SIZE, 1040 .max_keysize = CC_HW_KEY_SIZE, 1041 .ivsize = AES_BLOCK_SIZE, 1042 }, 1043 .cipher_mode = DRV_CIPHER_CBC_CTS, 1044 .flow_mode = S_DIN_to_AES, 1045 .min_hw_rev = CC_HW_REV_712, 1046 .std_body = CC_STD_NIST, 1047 }, 1048 { 1049 .name = "ctr(paes)", 1050 .driver_name = "ctr-paes-ccree", 1051 .blocksize = 1, 1052 .template_skcipher = { 1053 .setkey = cc_cipher_sethkey, 1054 .encrypt = cc_cipher_encrypt, 1055 .decrypt = cc_cipher_decrypt, 1056 .min_keysize = CC_HW_KEY_SIZE, 1057 .max_keysize = CC_HW_KEY_SIZE, 1058 .ivsize = AES_BLOCK_SIZE, 1059 }, 1060 .cipher_mode = DRV_CIPHER_CTR, 1061 .flow_mode = S_DIN_to_AES, 1062 .min_hw_rev = CC_HW_REV_712, 1063 .std_body = CC_STD_NIST, 1064 }, 1065 { 1066 .name = "xts(aes)", 1067 .driver_name = "xts-aes-ccree", 1068 .blocksize = AES_BLOCK_SIZE, 1069 .template_skcipher = { 1070 .setkey = cc_cipher_setkey, 1071 .encrypt = cc_cipher_encrypt, 1072 .decrypt = cc_cipher_decrypt, 1073 .min_keysize = AES_MIN_KEY_SIZE * 2, 1074 .max_keysize = AES_MAX_KEY_SIZE * 2, 1075 .ivsize = AES_BLOCK_SIZE, 1076 }, 1077 .cipher_mode = DRV_CIPHER_XTS, 1078 .flow_mode = S_DIN_to_AES, 1079 .min_hw_rev = CC_HW_REV_630, 1080 .std_body = CC_STD_NIST, 1081 }, 1082 { 1083 .name = "xts512(aes)", 1084 .driver_name = "xts-aes-du512-ccree", 1085 .blocksize = AES_BLOCK_SIZE, 1086 .template_skcipher = { 1087 .setkey = cc_cipher_setkey, 1088 .encrypt = cc_cipher_encrypt, 1089 .decrypt = cc_cipher_decrypt, 1090 .min_keysize = AES_MIN_KEY_SIZE * 2, 1091 .max_keysize = AES_MAX_KEY_SIZE * 2, 1092 .ivsize = AES_BLOCK_SIZE, 1093 }, 1094 .cipher_mode = DRV_CIPHER_XTS, 1095 .flow_mode = S_DIN_to_AES, 1096 .data_unit = 512, 1097 .min_hw_rev = CC_HW_REV_712, 1098 .std_body = CC_STD_NIST, 1099 }, 1100 { 1101 .name = "xts4096(aes)", 1102 .driver_name = "xts-aes-du4096-ccree", 1103 .blocksize = AES_BLOCK_SIZE, 1104 .template_skcipher = { 1105 .setkey = cc_cipher_setkey, 1106 .encrypt = cc_cipher_encrypt, 1107 .decrypt = cc_cipher_decrypt, 1108 .min_keysize = AES_MIN_KEY_SIZE * 2, 1109 .max_keysize = AES_MAX_KEY_SIZE * 2, 1110 .ivsize = AES_BLOCK_SIZE, 1111 }, 1112 .cipher_mode = DRV_CIPHER_XTS, 1113 .flow_mode = S_DIN_to_AES, 1114 .data_unit = 4096, 1115 .min_hw_rev = CC_HW_REV_712, 1116 .std_body = CC_STD_NIST, 1117 }, 1118 { 1119 .name = "essiv(aes)", 1120 .driver_name = "essiv-aes-ccree", 1121 .blocksize = AES_BLOCK_SIZE, 1122 .template_skcipher = { 1123 .setkey = cc_cipher_setkey, 1124 .encrypt = cc_cipher_encrypt, 1125 .decrypt = cc_cipher_decrypt, 1126 .min_keysize = AES_MIN_KEY_SIZE * 2, 1127 .max_keysize = AES_MAX_KEY_SIZE * 2, 1128 .ivsize = AES_BLOCK_SIZE, 1129 }, 1130 .cipher_mode = DRV_CIPHER_ESSIV, 1131 .flow_mode = S_DIN_to_AES, 1132 .min_hw_rev = CC_HW_REV_712, 1133 .std_body = CC_STD_NIST, 1134 }, 1135 { 1136 .name = "essiv512(aes)", 1137 .driver_name = "essiv-aes-du512-ccree", 1138 .blocksize = AES_BLOCK_SIZE, 1139 .template_skcipher = { 1140 .setkey = cc_cipher_setkey, 1141 .encrypt = cc_cipher_encrypt, 1142 .decrypt = cc_cipher_decrypt, 1143 .min_keysize = AES_MIN_KEY_SIZE * 2, 1144 .max_keysize = AES_MAX_KEY_SIZE * 2, 1145 .ivsize = AES_BLOCK_SIZE, 1146 }, 1147 .cipher_mode = DRV_CIPHER_ESSIV, 1148 .flow_mode = S_DIN_to_AES, 1149 .data_unit = 512, 1150 .min_hw_rev = CC_HW_REV_712, 1151 .std_body = CC_STD_NIST, 1152 }, 1153 { 1154 .name = "essiv4096(aes)", 1155 .driver_name = "essiv-aes-du4096-ccree", 1156 .blocksize = AES_BLOCK_SIZE, 1157 .template_skcipher = { 1158 .setkey = cc_cipher_setkey, 1159 .encrypt = cc_cipher_encrypt, 1160 .decrypt = cc_cipher_decrypt, 1161 .min_keysize = AES_MIN_KEY_SIZE * 2, 1162 .max_keysize = AES_MAX_KEY_SIZE * 2, 1163 .ivsize = AES_BLOCK_SIZE, 1164 }, 1165 .cipher_mode = DRV_CIPHER_ESSIV, 1166 .flow_mode = S_DIN_to_AES, 1167 .data_unit = 4096, 1168 .min_hw_rev = CC_HW_REV_712, 1169 .std_body = CC_STD_NIST, 1170 }, 1171 { 1172 .name = "bitlocker(aes)", 1173 .driver_name = "bitlocker-aes-ccree", 1174 .blocksize = AES_BLOCK_SIZE, 1175 .template_skcipher = { 1176 .setkey = cc_cipher_setkey, 1177 .encrypt = cc_cipher_encrypt, 1178 .decrypt = cc_cipher_decrypt, 1179 .min_keysize = AES_MIN_KEY_SIZE * 2, 1180 .max_keysize = AES_MAX_KEY_SIZE * 2, 1181 .ivsize = AES_BLOCK_SIZE, 1182 }, 1183 .cipher_mode = DRV_CIPHER_BITLOCKER, 1184 .flow_mode = S_DIN_to_AES, 1185 .min_hw_rev = CC_HW_REV_712, 1186 .std_body = CC_STD_NIST, 1187 }, 1188 { 1189 .name = "bitlocker512(aes)", 1190 .driver_name = "bitlocker-aes-du512-ccree", 1191 .blocksize = AES_BLOCK_SIZE, 1192 .template_skcipher = { 1193 .setkey = cc_cipher_setkey, 1194 .encrypt = cc_cipher_encrypt, 1195 .decrypt = cc_cipher_decrypt, 1196 .min_keysize = AES_MIN_KEY_SIZE * 2, 1197 .max_keysize = AES_MAX_KEY_SIZE * 2, 1198 .ivsize = AES_BLOCK_SIZE, 1199 }, 1200 .cipher_mode = DRV_CIPHER_BITLOCKER, 1201 .flow_mode = S_DIN_to_AES, 1202 .data_unit = 512, 1203 .min_hw_rev = CC_HW_REV_712, 1204 .std_body = CC_STD_NIST, 1205 }, 1206 { 1207 .name = "bitlocker4096(aes)", 1208 .driver_name = "bitlocker-aes-du4096-ccree", 1209 .blocksize = AES_BLOCK_SIZE, 1210 .template_skcipher = { 1211 .setkey = cc_cipher_setkey, 1212 .encrypt = cc_cipher_encrypt, 1213 .decrypt = cc_cipher_decrypt, 1214 .min_keysize = AES_MIN_KEY_SIZE * 2, 1215 .max_keysize = AES_MAX_KEY_SIZE * 2, 1216 .ivsize = AES_BLOCK_SIZE, 1217 }, 1218 .cipher_mode = DRV_CIPHER_BITLOCKER, 1219 .flow_mode = S_DIN_to_AES, 1220 .data_unit = 4096, 1221 .min_hw_rev = CC_HW_REV_712, 1222 .std_body = CC_STD_NIST, 1223 }, 1224 { 1225 .name = "ecb(aes)", 1226 .driver_name = "ecb-aes-ccree", 1227 .blocksize = AES_BLOCK_SIZE, 1228 .template_skcipher = { 1229 .setkey = cc_cipher_setkey, 1230 .encrypt = cc_cipher_encrypt, 1231 .decrypt = cc_cipher_decrypt, 1232 .min_keysize = AES_MIN_KEY_SIZE, 1233 .max_keysize = AES_MAX_KEY_SIZE, 1234 .ivsize = 0, 1235 }, 1236 .cipher_mode = DRV_CIPHER_ECB, 1237 .flow_mode = S_DIN_to_AES, 1238 .min_hw_rev = CC_HW_REV_630, 1239 .std_body = CC_STD_NIST, 1240 }, 1241 { 1242 .name = "cbc(aes)", 1243 .driver_name = "cbc-aes-ccree", 1244 .blocksize = AES_BLOCK_SIZE, 1245 .template_skcipher = { 1246 .setkey = cc_cipher_setkey, 1247 .encrypt = cc_cipher_encrypt, 1248 .decrypt = cc_cipher_decrypt, 1249 .min_keysize = AES_MIN_KEY_SIZE, 1250 .max_keysize = AES_MAX_KEY_SIZE, 1251 .ivsize = AES_BLOCK_SIZE, 1252 }, 1253 .cipher_mode = DRV_CIPHER_CBC, 1254 .flow_mode = S_DIN_to_AES, 1255 .min_hw_rev = CC_HW_REV_630, 1256 .std_body = CC_STD_NIST, 1257 }, 1258 { 1259 .name = "ofb(aes)", 1260 .driver_name = "ofb-aes-ccree", 1261 .blocksize = AES_BLOCK_SIZE, 1262 .template_skcipher = { 1263 .setkey = cc_cipher_setkey, 1264 .encrypt = cc_cipher_encrypt, 1265 .decrypt = cc_cipher_decrypt, 1266 .min_keysize = AES_MIN_KEY_SIZE, 1267 .max_keysize = AES_MAX_KEY_SIZE, 1268 .ivsize = AES_BLOCK_SIZE, 1269 }, 1270 .cipher_mode = DRV_CIPHER_OFB, 1271 .flow_mode = S_DIN_to_AES, 1272 .min_hw_rev = CC_HW_REV_630, 1273 .std_body = CC_STD_NIST, 1274 }, 1275 { 1276 .name = "cts(cbc(aes))", 1277 .driver_name = "cts-cbc-aes-ccree", 1278 .blocksize = AES_BLOCK_SIZE, 1279 .template_skcipher = { 1280 .setkey = cc_cipher_setkey, 1281 .encrypt = cc_cipher_encrypt, 1282 .decrypt = cc_cipher_decrypt, 1283 .min_keysize = AES_MIN_KEY_SIZE, 1284 .max_keysize = AES_MAX_KEY_SIZE, 1285 .ivsize = AES_BLOCK_SIZE, 1286 }, 1287 .cipher_mode = DRV_CIPHER_CBC_CTS, 1288 .flow_mode = S_DIN_to_AES, 1289 .min_hw_rev = CC_HW_REV_630, 1290 .std_body = CC_STD_NIST, 1291 }, 1292 { 1293 .name = "ctr(aes)", 1294 .driver_name = "ctr-aes-ccree", 1295 .blocksize = 1, 1296 .template_skcipher = { 1297 .setkey = cc_cipher_setkey, 1298 .encrypt = cc_cipher_encrypt, 1299 .decrypt = cc_cipher_decrypt, 1300 .min_keysize = AES_MIN_KEY_SIZE, 1301 .max_keysize = AES_MAX_KEY_SIZE, 1302 .ivsize = AES_BLOCK_SIZE, 1303 }, 1304 .cipher_mode = DRV_CIPHER_CTR, 1305 .flow_mode = S_DIN_to_AES, 1306 .min_hw_rev = CC_HW_REV_630, 1307 .std_body = CC_STD_NIST, 1308 }, 1309 { 1310 .name = "cbc(des3_ede)", 1311 .driver_name = "cbc-3des-ccree", 1312 .blocksize = DES3_EDE_BLOCK_SIZE, 1313 .template_skcipher = { 1314 .setkey = cc_cipher_setkey, 1315 .encrypt = cc_cipher_encrypt, 1316 .decrypt = cc_cipher_decrypt, 1317 .min_keysize = DES3_EDE_KEY_SIZE, 1318 .max_keysize = DES3_EDE_KEY_SIZE, 1319 .ivsize = DES3_EDE_BLOCK_SIZE, 1320 }, 1321 .cipher_mode = DRV_CIPHER_CBC, 1322 .flow_mode = S_DIN_to_DES, 1323 .min_hw_rev = CC_HW_REV_630, 1324 .std_body = CC_STD_NIST, 1325 }, 1326 { 1327 .name = "ecb(des3_ede)", 1328 .driver_name = "ecb-3des-ccree", 1329 .blocksize = DES3_EDE_BLOCK_SIZE, 1330 .template_skcipher = { 1331 .setkey = cc_cipher_setkey, 1332 .encrypt = cc_cipher_encrypt, 1333 .decrypt = cc_cipher_decrypt, 1334 .min_keysize = DES3_EDE_KEY_SIZE, 1335 .max_keysize = DES3_EDE_KEY_SIZE, 1336 .ivsize = 0, 1337 }, 1338 .cipher_mode = DRV_CIPHER_ECB, 1339 .flow_mode = S_DIN_to_DES, 1340 .min_hw_rev = CC_HW_REV_630, 1341 .std_body = CC_STD_NIST, 1342 }, 1343 { 1344 .name = "cbc(des)", 1345 .driver_name = "cbc-des-ccree", 1346 .blocksize = DES_BLOCK_SIZE, 1347 .template_skcipher = { 1348 .setkey = cc_cipher_setkey, 1349 .encrypt = cc_cipher_encrypt, 1350 .decrypt = cc_cipher_decrypt, 1351 .min_keysize = DES_KEY_SIZE, 1352 .max_keysize = DES_KEY_SIZE, 1353 .ivsize = DES_BLOCK_SIZE, 1354 }, 1355 .cipher_mode = DRV_CIPHER_CBC, 1356 .flow_mode = S_DIN_to_DES, 1357 .min_hw_rev = CC_HW_REV_630, 1358 .std_body = CC_STD_NIST, 1359 }, 1360 { 1361 .name = "ecb(des)", 1362 .driver_name = "ecb-des-ccree", 1363 .blocksize = DES_BLOCK_SIZE, 1364 .template_skcipher = { 1365 .setkey = cc_cipher_setkey, 1366 .encrypt = cc_cipher_encrypt, 1367 .decrypt = cc_cipher_decrypt, 1368 .min_keysize = DES_KEY_SIZE, 1369 .max_keysize = DES_KEY_SIZE, 1370 .ivsize = 0, 1371 }, 1372 .cipher_mode = DRV_CIPHER_ECB, 1373 .flow_mode = S_DIN_to_DES, 1374 .min_hw_rev = CC_HW_REV_630, 1375 .std_body = CC_STD_NIST, 1376 }, 1377 { 1378 .name = "cbc(sm4)", 1379 .driver_name = "cbc-sm4-ccree", 1380 .blocksize = SM4_BLOCK_SIZE, 1381 .template_skcipher = { 1382 .setkey = cc_cipher_setkey, 1383 .encrypt = cc_cipher_encrypt, 1384 .decrypt = cc_cipher_decrypt, 1385 .min_keysize = SM4_KEY_SIZE, 1386 .max_keysize = SM4_KEY_SIZE, 1387 .ivsize = SM4_BLOCK_SIZE, 1388 }, 1389 .cipher_mode = DRV_CIPHER_CBC, 1390 .flow_mode = S_DIN_to_SM4, 1391 .min_hw_rev = CC_HW_REV_713, 1392 .std_body = CC_STD_OSCCA, 1393 }, 1394 { 1395 .name = "ecb(sm4)", 1396 .driver_name = "ecb-sm4-ccree", 1397 .blocksize = SM4_BLOCK_SIZE, 1398 .template_skcipher = { 1399 .setkey = cc_cipher_setkey, 1400 .encrypt = cc_cipher_encrypt, 1401 .decrypt = cc_cipher_decrypt, 1402 .min_keysize = SM4_KEY_SIZE, 1403 .max_keysize = SM4_KEY_SIZE, 1404 .ivsize = 0, 1405 }, 1406 .cipher_mode = DRV_CIPHER_ECB, 1407 .flow_mode = S_DIN_to_SM4, 1408 .min_hw_rev = CC_HW_REV_713, 1409 .std_body = CC_STD_OSCCA, 1410 }, 1411 { 1412 .name = "ctr(sm4)", 1413 .driver_name = "ctr-sm4-ccree", 1414 .blocksize = SM4_BLOCK_SIZE, 1415 .template_skcipher = { 1416 .setkey = cc_cipher_setkey, 1417 .encrypt = cc_cipher_encrypt, 1418 .decrypt = cc_cipher_decrypt, 1419 .min_keysize = SM4_KEY_SIZE, 1420 .max_keysize = SM4_KEY_SIZE, 1421 .ivsize = SM4_BLOCK_SIZE, 1422 }, 1423 .cipher_mode = DRV_CIPHER_CTR, 1424 .flow_mode = S_DIN_to_SM4, 1425 .min_hw_rev = CC_HW_REV_713, 1426 .std_body = CC_STD_OSCCA, 1427 }, 1428 }; 1429 1430 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1431 struct device *dev) 1432 { 1433 struct cc_crypto_alg *t_alg; 1434 struct skcipher_alg *alg; 1435 1436 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 1437 if (!t_alg) 1438 return ERR_PTR(-ENOMEM); 1439 1440 alg = &t_alg->skcipher_alg; 1441 1442 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1443 1444 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1445 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1446 tmpl->driver_name); 1447 alg->base.cra_module = THIS_MODULE; 1448 alg->base.cra_priority = CC_CRA_PRIO; 1449 alg->base.cra_blocksize = tmpl->blocksize; 1450 alg->base.cra_alignmask = 0; 1451 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1452 1453 alg->base.cra_init = cc_cipher_init; 1454 alg->base.cra_exit = cc_cipher_exit; 1455 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 1456 1457 t_alg->cipher_mode = tmpl->cipher_mode; 1458 t_alg->flow_mode = tmpl->flow_mode; 1459 t_alg->data_unit = tmpl->data_unit; 1460 1461 return t_alg; 1462 } 1463 1464 int cc_cipher_free(struct cc_drvdata *drvdata) 1465 { 1466 struct cc_crypto_alg *t_alg, *n; 1467 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; 1468 1469 if (cipher_handle) { 1470 /* Remove registered algs */ 1471 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, 1472 entry) { 1473 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1474 list_del(&t_alg->entry); 1475 kfree(t_alg); 1476 } 1477 kfree(cipher_handle); 1478 drvdata->cipher_handle = NULL; 1479 } 1480 return 0; 1481 } 1482 1483 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1484 { 1485 struct cc_cipher_handle *cipher_handle; 1486 struct cc_crypto_alg *t_alg; 1487 struct device *dev = drvdata_to_dev(drvdata); 1488 int rc = -ENOMEM; 1489 int alg; 1490 1491 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); 1492 if (!cipher_handle) 1493 return -ENOMEM; 1494 1495 INIT_LIST_HEAD(&cipher_handle->alg_list); 1496 drvdata->cipher_handle = cipher_handle; 1497 1498 /* Linux crypto */ 1499 dev_dbg(dev, "Number of algorithms = %zu\n", 1500 ARRAY_SIZE(skcipher_algs)); 1501 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1502 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) || 1503 !(drvdata->std_bodies & skcipher_algs[alg].std_body)) 1504 continue; 1505 1506 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1507 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1508 if (IS_ERR(t_alg)) { 1509 rc = PTR_ERR(t_alg); 1510 dev_err(dev, "%s alg allocation failed\n", 1511 skcipher_algs[alg].driver_name); 1512 goto fail0; 1513 } 1514 t_alg->drvdata = drvdata; 1515 1516 dev_dbg(dev, "registering %s\n", 1517 skcipher_algs[alg].driver_name); 1518 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1519 dev_dbg(dev, "%s alg registration rc = %x\n", 1520 t_alg->skcipher_alg.base.cra_driver_name, rc); 1521 if (rc) { 1522 dev_err(dev, "%s alg registration failed\n", 1523 t_alg->skcipher_alg.base.cra_driver_name); 1524 kfree(t_alg); 1525 goto fail0; 1526 } else { 1527 list_add_tail(&t_alg->entry, 1528 &cipher_handle->alg_list); 1529 dev_dbg(dev, "Registered %s\n", 1530 t_alg->skcipher_alg.base.cra_driver_name); 1531 } 1532 } 1533 return 0; 1534 1535 fail0: 1536 cc_cipher_free(drvdata); 1537 return rc; 1538 } 1539