1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/scatterwalk.h> 11 12 #include "cc_driver.h" 13 #include "cc_lli_defs.h" 14 #include "cc_buffer_mgr.h" 15 #include "cc_cipher.h" 16 #include "cc_request_mgr.h" 17 18 #define MAX_ABLKCIPHER_SEQ_LEN 6 19 20 #define template_skcipher template_u.skcipher 21 22 #define CC_MIN_AES_XTS_SIZE 0x10 23 #define CC_MAX_AES_XTS_SIZE 0x2000 24 struct cc_cipher_handle { 25 struct list_head alg_list; 26 }; 27 28 struct cc_user_key_info { 29 u8 *key; 30 dma_addr_t key_dma_addr; 31 }; 32 33 struct cc_hw_key_info { 34 enum cc_hw_crypto_key key1_slot; 35 enum cc_hw_crypto_key key2_slot; 36 }; 37 38 struct cc_cipher_ctx { 39 struct cc_drvdata *drvdata; 40 int keylen; 41 int key_round_number; 42 int cipher_mode; 43 int flow_mode; 44 unsigned int flags; 45 bool hw_key; 46 struct cc_user_key_info user; 47 struct cc_hw_key_info hw; 48 struct crypto_shash *shash_tfm; 49 }; 50 51 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 52 53 static inline bool cc_is_hw_key(struct crypto_tfm *tfm) 54 { 55 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 56 57 return ctx_p->hw_key; 58 } 59 60 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 61 { 62 switch (ctx_p->flow_mode) { 63 case S_DIN_to_AES: 64 switch (size) { 65 case CC_AES_128_BIT_KEY_SIZE: 66 case CC_AES_192_BIT_KEY_SIZE: 67 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 68 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && 69 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 70 return 0; 71 break; 72 case CC_AES_256_BIT_KEY_SIZE: 73 return 0; 74 case (CC_AES_192_BIT_KEY_SIZE * 2): 75 case (CC_AES_256_BIT_KEY_SIZE * 2): 76 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 77 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 78 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 79 return 0; 80 break; 81 default: 82 break; 83 } 84 case S_DIN_to_DES: 85 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 86 return 0; 87 break; 88 default: 89 break; 90 } 91 return -EINVAL; 92 } 93 94 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 95 unsigned int size) 96 { 97 switch (ctx_p->flow_mode) { 98 case S_DIN_to_AES: 99 switch (ctx_p->cipher_mode) { 100 case DRV_CIPHER_XTS: 101 if (size >= CC_MIN_AES_XTS_SIZE && 102 size <= CC_MAX_AES_XTS_SIZE && 103 IS_ALIGNED(size, AES_BLOCK_SIZE)) 104 return 0; 105 break; 106 case DRV_CIPHER_CBC_CTS: 107 if (size >= AES_BLOCK_SIZE) 108 return 0; 109 break; 110 case DRV_CIPHER_OFB: 111 case DRV_CIPHER_CTR: 112 return 0; 113 case DRV_CIPHER_ECB: 114 case DRV_CIPHER_CBC: 115 case DRV_CIPHER_ESSIV: 116 case DRV_CIPHER_BITLOCKER: 117 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 118 return 0; 119 break; 120 default: 121 break; 122 } 123 break; 124 case S_DIN_to_DES: 125 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 126 return 0; 127 break; 128 default: 129 break; 130 } 131 return -EINVAL; 132 } 133 134 static int cc_cipher_init(struct crypto_tfm *tfm) 135 { 136 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 137 struct cc_crypto_alg *cc_alg = 138 container_of(tfm->__crt_alg, struct cc_crypto_alg, 139 skcipher_alg.base); 140 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 141 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 142 int rc = 0; 143 144 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 145 crypto_tfm_alg_name(tfm)); 146 147 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 148 sizeof(struct cipher_req_ctx)); 149 150 ctx_p->cipher_mode = cc_alg->cipher_mode; 151 ctx_p->flow_mode = cc_alg->flow_mode; 152 ctx_p->drvdata = cc_alg->drvdata; 153 154 /* Allocate key buffer, cache line aligned */ 155 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); 156 if (!ctx_p->user.key) 157 return -ENOMEM; 158 159 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 160 ctx_p->user.key); 161 162 /* Map key buffer */ 163 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, 164 max_key_buf_size, 165 DMA_TO_DEVICE); 166 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 167 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 168 max_key_buf_size, ctx_p->user.key); 169 return -ENOMEM; 170 } 171 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 172 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 173 174 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 175 /* Alloc hash tfm for essiv */ 176 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); 177 if (IS_ERR(ctx_p->shash_tfm)) { 178 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 179 return PTR_ERR(ctx_p->shash_tfm); 180 } 181 } 182 183 return rc; 184 } 185 186 static void cc_cipher_exit(struct crypto_tfm *tfm) 187 { 188 struct crypto_alg *alg = tfm->__crt_alg; 189 struct cc_crypto_alg *cc_alg = 190 container_of(alg, struct cc_crypto_alg, 191 skcipher_alg.base); 192 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 193 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 194 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 195 196 dev_dbg(dev, "Clearing context @%p for %s\n", 197 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 198 199 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 200 /* Free hash tfm for essiv */ 201 crypto_free_shash(ctx_p->shash_tfm); 202 ctx_p->shash_tfm = NULL; 203 } 204 205 /* Unmap key buffer */ 206 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 207 DMA_TO_DEVICE); 208 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 209 &ctx_p->user.key_dma_addr); 210 211 /* Free key buffer in context */ 212 kzfree(ctx_p->user.key); 213 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 214 } 215 216 struct tdes_keys { 217 u8 key1[DES_KEY_SIZE]; 218 u8 key2[DES_KEY_SIZE]; 219 u8 key3[DES_KEY_SIZE]; 220 }; 221 222 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num) 223 { 224 switch (slot_num) { 225 case 0: 226 return KFDE0_KEY; 227 case 1: 228 return KFDE1_KEY; 229 case 2: 230 return KFDE2_KEY; 231 case 3: 232 return KFDE3_KEY; 233 } 234 return END_OF_KEYS; 235 } 236 237 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 238 unsigned int keylen) 239 { 240 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 241 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 242 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 243 struct cc_hkey_info hki; 244 245 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 246 ctx_p, crypto_tfm_alg_name(tfm), keylen); 247 dump_byte_array("key", (u8 *)key, keylen); 248 249 /* STAT_PHASE_0: Init and sanity checks */ 250 251 /* This check the size of the hardware key token */ 252 if (keylen != sizeof(hki)) { 253 dev_err(dev, "Unsupported HW key size %d.\n", keylen); 254 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 255 return -EINVAL; 256 } 257 258 if (ctx_p->flow_mode != S_DIN_to_AES) { 259 dev_err(dev, "HW key not supported for non-AES flows\n"); 260 return -EINVAL; 261 } 262 263 memcpy(&hki, key, keylen); 264 265 /* The real key len for crypto op is the size of the HW key 266 * referenced by the HW key slot, not the hardware key token 267 */ 268 keylen = hki.keylen; 269 270 if (validate_keys_sizes(ctx_p, keylen)) { 271 dev_err(dev, "Unsupported key size %d.\n", keylen); 272 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 273 return -EINVAL; 274 } 275 276 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 277 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 278 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1); 279 return -EINVAL; 280 } 281 282 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 283 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 284 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 285 if (hki.hw_key1 == hki.hw_key2) { 286 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 287 hki.hw_key1, hki.hw_key2); 288 return -EINVAL; 289 } 290 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 291 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 292 dev_err(dev, "Unsupported hw key2 number (%d)\n", 293 hki.hw_key2); 294 return -EINVAL; 295 } 296 } 297 298 ctx_p->keylen = keylen; 299 ctx_p->hw_key = true; 300 dev_dbg(dev, "cc_is_hw_key ret 0"); 301 302 return 0; 303 } 304 305 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 306 unsigned int keylen) 307 { 308 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 309 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 310 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 311 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 312 struct cc_crypto_alg *cc_alg = 313 container_of(tfm->__crt_alg, struct cc_crypto_alg, 314 skcipher_alg.base); 315 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 316 317 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 318 ctx_p, crypto_tfm_alg_name(tfm), keylen); 319 dump_byte_array("key", (u8 *)key, keylen); 320 321 /* STAT_PHASE_0: Init and sanity checks */ 322 323 if (validate_keys_sizes(ctx_p, keylen)) { 324 dev_err(dev, "Unsupported key size %d.\n", keylen); 325 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 326 return -EINVAL; 327 } 328 329 ctx_p->hw_key = false; 330 331 /* 332 * Verify DES weak keys 333 * Note that we're dropping the expanded key since the 334 * HW does the expansion on its own. 335 */ 336 if (ctx_p->flow_mode == S_DIN_to_DES) { 337 if (keylen == DES3_EDE_KEY_SIZE && 338 __des3_ede_setkey(tmp, &tfm->crt_flags, key, 339 DES3_EDE_KEY_SIZE)) { 340 dev_dbg(dev, "weak 3DES key"); 341 return -EINVAL; 342 } else if (!des_ekey(tmp, key) && 343 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) { 344 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 345 dev_dbg(dev, "weak DES key"); 346 return -EINVAL; 347 } 348 } 349 350 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 351 xts_check_key(tfm, key, keylen)) { 352 dev_dbg(dev, "weak XTS key"); 353 return -EINVAL; 354 } 355 356 /* STAT_PHASE_1: Copy key to ctx */ 357 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 358 max_key_buf_size, DMA_TO_DEVICE); 359 360 memcpy(ctx_p->user.key, key, keylen); 361 if (keylen == 24) 362 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); 363 364 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 365 /* sha256 for key2 - use sw implementation */ 366 int key_len = keylen >> 1; 367 int err; 368 369 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); 370 371 desc->tfm = ctx_p->shash_tfm; 372 373 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, 374 ctx_p->user.key + key_len); 375 if (err) { 376 dev_err(dev, "Failed to hash ESSIV key.\n"); 377 return err; 378 } 379 } 380 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 381 max_key_buf_size, DMA_TO_DEVICE); 382 ctx_p->keylen = keylen; 383 384 dev_dbg(dev, "return safely"); 385 return 0; 386 } 387 388 static void cc_setup_cipher_desc(struct crypto_tfm *tfm, 389 struct cipher_req_ctx *req_ctx, 390 unsigned int ivsize, unsigned int nbytes, 391 struct cc_hw_desc desc[], 392 unsigned int *seq_size) 393 { 394 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 395 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 396 int cipher_mode = ctx_p->cipher_mode; 397 int flow_mode = ctx_p->flow_mode; 398 int direction = req_ctx->gen_ctx.op_type; 399 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 400 unsigned int key_len = ctx_p->keylen; 401 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 402 unsigned int du_size = nbytes; 403 404 struct cc_crypto_alg *cc_alg = 405 container_of(tfm->__crt_alg, struct cc_crypto_alg, 406 skcipher_alg.base); 407 408 if (cc_alg->data_unit) 409 du_size = cc_alg->data_unit; 410 411 switch (cipher_mode) { 412 case DRV_CIPHER_CBC: 413 case DRV_CIPHER_CBC_CTS: 414 case DRV_CIPHER_CTR: 415 case DRV_CIPHER_OFB: 416 /* Load cipher state */ 417 hw_desc_init(&desc[*seq_size]); 418 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 419 NS_BIT); 420 set_cipher_config0(&desc[*seq_size], direction); 421 set_flow_mode(&desc[*seq_size], flow_mode); 422 set_cipher_mode(&desc[*seq_size], cipher_mode); 423 if (cipher_mode == DRV_CIPHER_CTR || 424 cipher_mode == DRV_CIPHER_OFB) { 425 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 426 } else { 427 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 428 } 429 (*seq_size)++; 430 /*FALLTHROUGH*/ 431 case DRV_CIPHER_ECB: 432 /* Load key */ 433 hw_desc_init(&desc[*seq_size]); 434 set_cipher_mode(&desc[*seq_size], cipher_mode); 435 set_cipher_config0(&desc[*seq_size], direction); 436 if (flow_mode == S_DIN_to_AES) { 437 if (cc_is_hw_key(tfm)) { 438 set_hw_crypto_key(&desc[*seq_size], 439 ctx_p->hw.key1_slot); 440 } else { 441 set_din_type(&desc[*seq_size], DMA_DLLI, 442 key_dma_addr, ((key_len == 24) ? 443 AES_MAX_KEY_SIZE : 444 key_len), NS_BIT); 445 } 446 set_key_size_aes(&desc[*seq_size], key_len); 447 } else { 448 /*des*/ 449 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 450 key_len, NS_BIT); 451 set_key_size_des(&desc[*seq_size], key_len); 452 } 453 set_flow_mode(&desc[*seq_size], flow_mode); 454 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 455 (*seq_size)++; 456 break; 457 case DRV_CIPHER_XTS: 458 case DRV_CIPHER_ESSIV: 459 case DRV_CIPHER_BITLOCKER: 460 /* Load AES key */ 461 hw_desc_init(&desc[*seq_size]); 462 set_cipher_mode(&desc[*seq_size], cipher_mode); 463 set_cipher_config0(&desc[*seq_size], direction); 464 if (cc_is_hw_key(tfm)) { 465 set_hw_crypto_key(&desc[*seq_size], 466 ctx_p->hw.key1_slot); 467 } else { 468 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 469 (key_len / 2), NS_BIT); 470 } 471 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 472 set_flow_mode(&desc[*seq_size], flow_mode); 473 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 474 (*seq_size)++; 475 476 /* load XEX key */ 477 hw_desc_init(&desc[*seq_size]); 478 set_cipher_mode(&desc[*seq_size], cipher_mode); 479 set_cipher_config0(&desc[*seq_size], direction); 480 if (cc_is_hw_key(tfm)) { 481 set_hw_crypto_key(&desc[*seq_size], 482 ctx_p->hw.key2_slot); 483 } else { 484 set_din_type(&desc[*seq_size], DMA_DLLI, 485 (key_dma_addr + (key_len / 2)), 486 (key_len / 2), NS_BIT); 487 } 488 set_xex_data_unit_size(&desc[*seq_size], du_size); 489 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 490 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 491 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 492 (*seq_size)++; 493 494 /* Set state */ 495 hw_desc_init(&desc[*seq_size]); 496 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 497 set_cipher_mode(&desc[*seq_size], cipher_mode); 498 set_cipher_config0(&desc[*seq_size], direction); 499 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 500 set_flow_mode(&desc[*seq_size], flow_mode); 501 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 502 CC_AES_BLOCK_SIZE, NS_BIT); 503 (*seq_size)++; 504 break; 505 default: 506 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 507 } 508 } 509 510 static void cc_setup_cipher_data(struct crypto_tfm *tfm, 511 struct cipher_req_ctx *req_ctx, 512 struct scatterlist *dst, 513 struct scatterlist *src, unsigned int nbytes, 514 void *areq, struct cc_hw_desc desc[], 515 unsigned int *seq_size) 516 { 517 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 518 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 519 unsigned int flow_mode = ctx_p->flow_mode; 520 521 switch (ctx_p->flow_mode) { 522 case S_DIN_to_AES: 523 flow_mode = DIN_AES_DOUT; 524 break; 525 case S_DIN_to_DES: 526 flow_mode = DIN_DES_DOUT; 527 break; 528 default: 529 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); 530 return; 531 } 532 /* Process */ 533 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 534 dev_dbg(dev, " data params addr %pad length 0x%X\n", 535 &sg_dma_address(src), nbytes); 536 dev_dbg(dev, " data params addr %pad length 0x%X\n", 537 &sg_dma_address(dst), nbytes); 538 hw_desc_init(&desc[*seq_size]); 539 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 540 nbytes, NS_BIT); 541 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 542 nbytes, NS_BIT, (!areq ? 0 : 1)); 543 if (areq) 544 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 545 546 set_flow_mode(&desc[*seq_size], flow_mode); 547 (*seq_size)++; 548 } else { 549 /* bypass */ 550 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 551 &req_ctx->mlli_params.mlli_dma_addr, 552 req_ctx->mlli_params.mlli_len, 553 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 554 hw_desc_init(&desc[*seq_size]); 555 set_din_type(&desc[*seq_size], DMA_DLLI, 556 req_ctx->mlli_params.mlli_dma_addr, 557 req_ctx->mlli_params.mlli_len, NS_BIT); 558 set_dout_sram(&desc[*seq_size], 559 ctx_p->drvdata->mlli_sram_addr, 560 req_ctx->mlli_params.mlli_len); 561 set_flow_mode(&desc[*seq_size], BYPASS); 562 (*seq_size)++; 563 564 hw_desc_init(&desc[*seq_size]); 565 set_din_type(&desc[*seq_size], DMA_MLLI, 566 ctx_p->drvdata->mlli_sram_addr, 567 req_ctx->in_mlli_nents, NS_BIT); 568 if (req_ctx->out_nents == 0) { 569 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 570 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 571 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 572 set_dout_mlli(&desc[*seq_size], 573 ctx_p->drvdata->mlli_sram_addr, 574 req_ctx->in_mlli_nents, NS_BIT, 575 (!areq ? 0 : 1)); 576 } else { 577 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 578 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 579 (unsigned int)ctx_p->drvdata->mlli_sram_addr + 580 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 581 set_dout_mlli(&desc[*seq_size], 582 (ctx_p->drvdata->mlli_sram_addr + 583 (LLI_ENTRY_BYTE_SIZE * 584 req_ctx->in_mlli_nents)), 585 req_ctx->out_mlli_nents, NS_BIT, 586 (!areq ? 0 : 1)); 587 } 588 if (areq) 589 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 590 591 set_flow_mode(&desc[*seq_size], flow_mode); 592 (*seq_size)++; 593 } 594 } 595 596 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 597 { 598 struct skcipher_request *req = (struct skcipher_request *)cc_req; 599 struct scatterlist *dst = req->dst; 600 struct scatterlist *src = req->src; 601 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 602 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 603 unsigned int ivsize = crypto_skcipher_ivsize(tfm); 604 605 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 606 kzfree(req_ctx->iv); 607 608 /* 609 * The crypto API expects us to set the req->iv to the last 610 * ciphertext block. For encrypt, simply copy from the result. 611 * For decrypt, we must copy from a saved buffer since this 612 * could be an in-place decryption operation and the src is 613 * lost by this point. 614 */ 615 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { 616 memcpy(req->iv, req_ctx->backup_info, ivsize); 617 kzfree(req_ctx->backup_info); 618 } else if (!err) { 619 scatterwalk_map_and_copy(req->iv, req->dst, 620 (req->cryptlen - ivsize), 621 ivsize, 0); 622 } 623 624 skcipher_request_complete(req, err); 625 } 626 627 static int cc_cipher_process(struct skcipher_request *req, 628 enum drv_crypto_direction direction) 629 { 630 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 631 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 632 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 633 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 634 struct scatterlist *dst = req->dst; 635 struct scatterlist *src = req->src; 636 unsigned int nbytes = req->cryptlen; 637 void *iv = req->iv; 638 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 639 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 640 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN]; 641 struct cc_crypto_req cc_req = {}; 642 int rc, cts_restore_flag = 0; 643 unsigned int seq_len = 0; 644 gfp_t flags = cc_gfp_flags(&req->base); 645 646 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 647 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 648 "Encrypt" : "Decrypt"), req, iv, nbytes); 649 650 /* STAT_PHASE_0: Init and sanity checks */ 651 652 /* TODO: check data length according to mode */ 653 if (validate_data_size(ctx_p, nbytes)) { 654 dev_err(dev, "Unsupported data size %d.\n", nbytes); 655 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN); 656 rc = -EINVAL; 657 goto exit_process; 658 } 659 if (nbytes == 0) { 660 /* No data to process is valid */ 661 rc = 0; 662 goto exit_process; 663 } 664 665 /* The IV we are handed may be allocted from the stack so 666 * we must copy it to a DMAable buffer before use. 667 */ 668 req_ctx->iv = kmemdup(iv, ivsize, flags); 669 if (!req_ctx->iv) { 670 rc = -ENOMEM; 671 goto exit_process; 672 } 673 674 /*For CTS in case of data size aligned to 16 use CBC mode*/ 675 if (((nbytes % AES_BLOCK_SIZE) == 0) && 676 ctx_p->cipher_mode == DRV_CIPHER_CBC_CTS) { 677 ctx_p->cipher_mode = DRV_CIPHER_CBC; 678 cts_restore_flag = 1; 679 } 680 681 /* Setup request structure */ 682 cc_req.user_cb = (void *)cc_cipher_complete; 683 cc_req.user_arg = (void *)req; 684 685 #ifdef ENABLE_CYCLE_COUNT 686 cc_req.op_type = (direction == DRV_CRYPTO_DIRECTION_DECRYPT) ? 687 STAT_OP_TYPE_DECODE : STAT_OP_TYPE_ENCODE; 688 689 #endif 690 691 /* Setup request context */ 692 req_ctx->gen_ctx.op_type = direction; 693 694 /* STAT_PHASE_1: Map buffers */ 695 696 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 697 req_ctx->iv, src, dst, flags); 698 if (rc) { 699 dev_err(dev, "map_request() failed\n"); 700 goto exit_process; 701 } 702 703 /* STAT_PHASE_2: Create sequence */ 704 705 /* Setup processing */ 706 cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 707 /* Data processing */ 708 cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc, 709 &seq_len); 710 711 /* do we need to generate IV? */ 712 if (req_ctx->is_giv) { 713 cc_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr; 714 cc_req.ivgen_dma_addr_len = 1; 715 /* set the IV size (8/16 B long)*/ 716 cc_req.ivgen_size = ivsize; 717 } 718 719 /* STAT_PHASE_3: Lock HW and push sequence */ 720 721 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 722 &req->base); 723 if (rc != -EINPROGRESS && rc != -EBUSY) { 724 /* Failed to send the request or request completed 725 * synchronously 726 */ 727 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 728 } 729 730 exit_process: 731 if (cts_restore_flag) 732 ctx_p->cipher_mode = DRV_CIPHER_CBC_CTS; 733 734 if (rc != -EINPROGRESS && rc != -EBUSY) { 735 kzfree(req_ctx->backup_info); 736 kzfree(req_ctx->iv); 737 } 738 739 return rc; 740 } 741 742 static int cc_cipher_encrypt(struct skcipher_request *req) 743 { 744 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 745 746 req_ctx->is_giv = false; 747 req_ctx->backup_info = NULL; 748 749 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 750 } 751 752 static int cc_cipher_decrypt(struct skcipher_request *req) 753 { 754 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 755 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 756 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 757 gfp_t flags = cc_gfp_flags(&req->base); 758 759 /* 760 * Allocate and save the last IV sized bytes of the source, which will 761 * be lost in case of in-place decryption and might be needed for CTS. 762 */ 763 req_ctx->backup_info = kmalloc(ivsize, flags); 764 if (!req_ctx->backup_info) 765 return -ENOMEM; 766 767 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, 768 (req->cryptlen - ivsize), ivsize, 0); 769 req_ctx->is_giv = false; 770 771 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 772 } 773 774 /* Block cipher alg */ 775 static const struct cc_alg_template skcipher_algs[] = { 776 { 777 .name = "xts(paes)", 778 .driver_name = "xts-paes-ccree", 779 .blocksize = AES_BLOCK_SIZE, 780 .template_skcipher = { 781 .setkey = cc_cipher_sethkey, 782 .encrypt = cc_cipher_encrypt, 783 .decrypt = cc_cipher_decrypt, 784 .min_keysize = CC_HW_KEY_SIZE, 785 .max_keysize = CC_HW_KEY_SIZE, 786 .ivsize = AES_BLOCK_SIZE, 787 }, 788 .cipher_mode = DRV_CIPHER_XTS, 789 .flow_mode = S_DIN_to_AES, 790 .min_hw_rev = CC_HW_REV_630, 791 }, 792 { 793 .name = "xts512(paes)", 794 .driver_name = "xts-paes-du512-ccree", 795 .blocksize = AES_BLOCK_SIZE, 796 .template_skcipher = { 797 .setkey = cc_cipher_sethkey, 798 .encrypt = cc_cipher_encrypt, 799 .decrypt = cc_cipher_decrypt, 800 .min_keysize = CC_HW_KEY_SIZE, 801 .max_keysize = CC_HW_KEY_SIZE, 802 .ivsize = AES_BLOCK_SIZE, 803 }, 804 .cipher_mode = DRV_CIPHER_XTS, 805 .flow_mode = S_DIN_to_AES, 806 .data_unit = 512, 807 .min_hw_rev = CC_HW_REV_712, 808 }, 809 { 810 .name = "xts4096(paes)", 811 .driver_name = "xts-paes-du4096-ccree", 812 .blocksize = AES_BLOCK_SIZE, 813 .template_skcipher = { 814 .setkey = cc_cipher_sethkey, 815 .encrypt = cc_cipher_encrypt, 816 .decrypt = cc_cipher_decrypt, 817 .min_keysize = CC_HW_KEY_SIZE, 818 .max_keysize = CC_HW_KEY_SIZE, 819 .ivsize = AES_BLOCK_SIZE, 820 }, 821 .cipher_mode = DRV_CIPHER_XTS, 822 .flow_mode = S_DIN_to_AES, 823 .data_unit = 4096, 824 .min_hw_rev = CC_HW_REV_712, 825 }, 826 { 827 .name = "essiv(paes)", 828 .driver_name = "essiv-paes-ccree", 829 .blocksize = AES_BLOCK_SIZE, 830 .template_skcipher = { 831 .setkey = cc_cipher_sethkey, 832 .encrypt = cc_cipher_encrypt, 833 .decrypt = cc_cipher_decrypt, 834 .min_keysize = CC_HW_KEY_SIZE, 835 .max_keysize = CC_HW_KEY_SIZE, 836 .ivsize = AES_BLOCK_SIZE, 837 }, 838 .cipher_mode = DRV_CIPHER_ESSIV, 839 .flow_mode = S_DIN_to_AES, 840 .min_hw_rev = CC_HW_REV_712, 841 }, 842 { 843 .name = "essiv512(paes)", 844 .driver_name = "essiv-paes-du512-ccree", 845 .blocksize = AES_BLOCK_SIZE, 846 .template_skcipher = { 847 .setkey = cc_cipher_sethkey, 848 .encrypt = cc_cipher_encrypt, 849 .decrypt = cc_cipher_decrypt, 850 .min_keysize = CC_HW_KEY_SIZE, 851 .max_keysize = CC_HW_KEY_SIZE, 852 .ivsize = AES_BLOCK_SIZE, 853 }, 854 .cipher_mode = DRV_CIPHER_ESSIV, 855 .flow_mode = S_DIN_to_AES, 856 .data_unit = 512, 857 .min_hw_rev = CC_HW_REV_712, 858 }, 859 { 860 .name = "essiv4096(paes)", 861 .driver_name = "essiv-paes-du4096-ccree", 862 .blocksize = AES_BLOCK_SIZE, 863 .template_skcipher = { 864 .setkey = cc_cipher_sethkey, 865 .encrypt = cc_cipher_encrypt, 866 .decrypt = cc_cipher_decrypt, 867 .min_keysize = CC_HW_KEY_SIZE, 868 .max_keysize = CC_HW_KEY_SIZE, 869 .ivsize = AES_BLOCK_SIZE, 870 }, 871 .cipher_mode = DRV_CIPHER_ESSIV, 872 .flow_mode = S_DIN_to_AES, 873 .data_unit = 4096, 874 .min_hw_rev = CC_HW_REV_712, 875 }, 876 { 877 .name = "bitlocker(paes)", 878 .driver_name = "bitlocker-paes-ccree", 879 .blocksize = AES_BLOCK_SIZE, 880 .template_skcipher = { 881 .setkey = cc_cipher_sethkey, 882 .encrypt = cc_cipher_encrypt, 883 .decrypt = cc_cipher_decrypt, 884 .min_keysize = CC_HW_KEY_SIZE, 885 .max_keysize = CC_HW_KEY_SIZE, 886 .ivsize = AES_BLOCK_SIZE, 887 }, 888 .cipher_mode = DRV_CIPHER_BITLOCKER, 889 .flow_mode = S_DIN_to_AES, 890 .min_hw_rev = CC_HW_REV_712, 891 }, 892 { 893 .name = "bitlocker512(paes)", 894 .driver_name = "bitlocker-paes-du512-ccree", 895 .blocksize = AES_BLOCK_SIZE, 896 .template_skcipher = { 897 .setkey = cc_cipher_sethkey, 898 .encrypt = cc_cipher_encrypt, 899 .decrypt = cc_cipher_decrypt, 900 .min_keysize = CC_HW_KEY_SIZE, 901 .max_keysize = CC_HW_KEY_SIZE, 902 .ivsize = AES_BLOCK_SIZE, 903 }, 904 .cipher_mode = DRV_CIPHER_BITLOCKER, 905 .flow_mode = S_DIN_to_AES, 906 .data_unit = 512, 907 .min_hw_rev = CC_HW_REV_712, 908 }, 909 { 910 .name = "bitlocker4096(paes)", 911 .driver_name = "bitlocker-paes-du4096-ccree", 912 .blocksize = AES_BLOCK_SIZE, 913 .template_skcipher = { 914 .setkey = cc_cipher_sethkey, 915 .encrypt = cc_cipher_encrypt, 916 .decrypt = cc_cipher_decrypt, 917 .min_keysize = CC_HW_KEY_SIZE, 918 .max_keysize = CC_HW_KEY_SIZE, 919 .ivsize = AES_BLOCK_SIZE, 920 }, 921 .cipher_mode = DRV_CIPHER_BITLOCKER, 922 .flow_mode = S_DIN_to_AES, 923 .data_unit = 4096, 924 .min_hw_rev = CC_HW_REV_712, 925 }, 926 { 927 .name = "ecb(paes)", 928 .driver_name = "ecb-paes-ccree", 929 .blocksize = AES_BLOCK_SIZE, 930 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 931 .template_skcipher = { 932 .setkey = cc_cipher_sethkey, 933 .encrypt = cc_cipher_encrypt, 934 .decrypt = cc_cipher_decrypt, 935 .min_keysize = CC_HW_KEY_SIZE, 936 .max_keysize = CC_HW_KEY_SIZE, 937 .ivsize = 0, 938 }, 939 .cipher_mode = DRV_CIPHER_ECB, 940 .flow_mode = S_DIN_to_AES, 941 .min_hw_rev = CC_HW_REV_712, 942 }, 943 { 944 .name = "cbc(paes)", 945 .driver_name = "cbc-paes-ccree", 946 .blocksize = AES_BLOCK_SIZE, 947 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 948 .template_skcipher = { 949 .setkey = cc_cipher_sethkey, 950 .encrypt = cc_cipher_encrypt, 951 .decrypt = cc_cipher_decrypt, 952 .min_keysize = CC_HW_KEY_SIZE, 953 .max_keysize = CC_HW_KEY_SIZE, 954 .ivsize = AES_BLOCK_SIZE, 955 }, 956 .cipher_mode = DRV_CIPHER_CBC, 957 .flow_mode = S_DIN_to_AES, 958 .min_hw_rev = CC_HW_REV_712, 959 }, 960 { 961 .name = "ofb(paes)", 962 .driver_name = "ofb-paes-ccree", 963 .blocksize = AES_BLOCK_SIZE, 964 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 965 .template_skcipher = { 966 .setkey = cc_cipher_sethkey, 967 .encrypt = cc_cipher_encrypt, 968 .decrypt = cc_cipher_decrypt, 969 .min_keysize = CC_HW_KEY_SIZE, 970 .max_keysize = CC_HW_KEY_SIZE, 971 .ivsize = AES_BLOCK_SIZE, 972 }, 973 .cipher_mode = DRV_CIPHER_OFB, 974 .flow_mode = S_DIN_to_AES, 975 .min_hw_rev = CC_HW_REV_712, 976 }, 977 { 978 .name = "cts1(cbc(paes))", 979 .driver_name = "cts1-cbc-paes-ccree", 980 .blocksize = AES_BLOCK_SIZE, 981 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 982 .template_skcipher = { 983 .setkey = cc_cipher_sethkey, 984 .encrypt = cc_cipher_encrypt, 985 .decrypt = cc_cipher_decrypt, 986 .min_keysize = CC_HW_KEY_SIZE, 987 .max_keysize = CC_HW_KEY_SIZE, 988 .ivsize = AES_BLOCK_SIZE, 989 }, 990 .cipher_mode = DRV_CIPHER_CBC_CTS, 991 .flow_mode = S_DIN_to_AES, 992 .min_hw_rev = CC_HW_REV_712, 993 }, 994 { 995 .name = "ctr(paes)", 996 .driver_name = "ctr-paes-ccree", 997 .blocksize = 1, 998 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 999 .template_skcipher = { 1000 .setkey = cc_cipher_sethkey, 1001 .encrypt = cc_cipher_encrypt, 1002 .decrypt = cc_cipher_decrypt, 1003 .min_keysize = CC_HW_KEY_SIZE, 1004 .max_keysize = CC_HW_KEY_SIZE, 1005 .ivsize = AES_BLOCK_SIZE, 1006 }, 1007 .cipher_mode = DRV_CIPHER_CTR, 1008 .flow_mode = S_DIN_to_AES, 1009 .min_hw_rev = CC_HW_REV_712, 1010 }, 1011 { 1012 .name = "xts(aes)", 1013 .driver_name = "xts-aes-ccree", 1014 .blocksize = AES_BLOCK_SIZE, 1015 .template_skcipher = { 1016 .setkey = cc_cipher_setkey, 1017 .encrypt = cc_cipher_encrypt, 1018 .decrypt = cc_cipher_decrypt, 1019 .min_keysize = AES_MIN_KEY_SIZE * 2, 1020 .max_keysize = AES_MAX_KEY_SIZE * 2, 1021 .ivsize = AES_BLOCK_SIZE, 1022 }, 1023 .cipher_mode = DRV_CIPHER_XTS, 1024 .flow_mode = S_DIN_to_AES, 1025 .min_hw_rev = CC_HW_REV_630, 1026 }, 1027 { 1028 .name = "xts512(aes)", 1029 .driver_name = "xts-aes-du512-ccree", 1030 .blocksize = AES_BLOCK_SIZE, 1031 .template_skcipher = { 1032 .setkey = cc_cipher_setkey, 1033 .encrypt = cc_cipher_encrypt, 1034 .decrypt = cc_cipher_decrypt, 1035 .min_keysize = AES_MIN_KEY_SIZE * 2, 1036 .max_keysize = AES_MAX_KEY_SIZE * 2, 1037 .ivsize = AES_BLOCK_SIZE, 1038 }, 1039 .cipher_mode = DRV_CIPHER_XTS, 1040 .flow_mode = S_DIN_to_AES, 1041 .data_unit = 512, 1042 .min_hw_rev = CC_HW_REV_712, 1043 }, 1044 { 1045 .name = "xts4096(aes)", 1046 .driver_name = "xts-aes-du4096-ccree", 1047 .blocksize = AES_BLOCK_SIZE, 1048 .template_skcipher = { 1049 .setkey = cc_cipher_setkey, 1050 .encrypt = cc_cipher_encrypt, 1051 .decrypt = cc_cipher_decrypt, 1052 .min_keysize = AES_MIN_KEY_SIZE * 2, 1053 .max_keysize = AES_MAX_KEY_SIZE * 2, 1054 .ivsize = AES_BLOCK_SIZE, 1055 }, 1056 .cipher_mode = DRV_CIPHER_XTS, 1057 .flow_mode = S_DIN_to_AES, 1058 .data_unit = 4096, 1059 .min_hw_rev = CC_HW_REV_712, 1060 }, 1061 { 1062 .name = "essiv(aes)", 1063 .driver_name = "essiv-aes-ccree", 1064 .blocksize = AES_BLOCK_SIZE, 1065 .template_skcipher = { 1066 .setkey = cc_cipher_setkey, 1067 .encrypt = cc_cipher_encrypt, 1068 .decrypt = cc_cipher_decrypt, 1069 .min_keysize = AES_MIN_KEY_SIZE * 2, 1070 .max_keysize = AES_MAX_KEY_SIZE * 2, 1071 .ivsize = AES_BLOCK_SIZE, 1072 }, 1073 .cipher_mode = DRV_CIPHER_ESSIV, 1074 .flow_mode = S_DIN_to_AES, 1075 .min_hw_rev = CC_HW_REV_712, 1076 }, 1077 { 1078 .name = "essiv512(aes)", 1079 .driver_name = "essiv-aes-du512-ccree", 1080 .blocksize = AES_BLOCK_SIZE, 1081 .template_skcipher = { 1082 .setkey = cc_cipher_setkey, 1083 .encrypt = cc_cipher_encrypt, 1084 .decrypt = cc_cipher_decrypt, 1085 .min_keysize = AES_MIN_KEY_SIZE * 2, 1086 .max_keysize = AES_MAX_KEY_SIZE * 2, 1087 .ivsize = AES_BLOCK_SIZE, 1088 }, 1089 .cipher_mode = DRV_CIPHER_ESSIV, 1090 .flow_mode = S_DIN_to_AES, 1091 .data_unit = 512, 1092 .min_hw_rev = CC_HW_REV_712, 1093 }, 1094 { 1095 .name = "essiv4096(aes)", 1096 .driver_name = "essiv-aes-du4096-ccree", 1097 .blocksize = AES_BLOCK_SIZE, 1098 .template_skcipher = { 1099 .setkey = cc_cipher_setkey, 1100 .encrypt = cc_cipher_encrypt, 1101 .decrypt = cc_cipher_decrypt, 1102 .min_keysize = AES_MIN_KEY_SIZE * 2, 1103 .max_keysize = AES_MAX_KEY_SIZE * 2, 1104 .ivsize = AES_BLOCK_SIZE, 1105 }, 1106 .cipher_mode = DRV_CIPHER_ESSIV, 1107 .flow_mode = S_DIN_to_AES, 1108 .data_unit = 4096, 1109 .min_hw_rev = CC_HW_REV_712, 1110 }, 1111 { 1112 .name = "bitlocker(aes)", 1113 .driver_name = "bitlocker-aes-ccree", 1114 .blocksize = AES_BLOCK_SIZE, 1115 .template_skcipher = { 1116 .setkey = cc_cipher_setkey, 1117 .encrypt = cc_cipher_encrypt, 1118 .decrypt = cc_cipher_decrypt, 1119 .min_keysize = AES_MIN_KEY_SIZE * 2, 1120 .max_keysize = AES_MAX_KEY_SIZE * 2, 1121 .ivsize = AES_BLOCK_SIZE, 1122 }, 1123 .cipher_mode = DRV_CIPHER_BITLOCKER, 1124 .flow_mode = S_DIN_to_AES, 1125 .min_hw_rev = CC_HW_REV_712, 1126 }, 1127 { 1128 .name = "bitlocker512(aes)", 1129 .driver_name = "bitlocker-aes-du512-ccree", 1130 .blocksize = AES_BLOCK_SIZE, 1131 .template_skcipher = { 1132 .setkey = cc_cipher_setkey, 1133 .encrypt = cc_cipher_encrypt, 1134 .decrypt = cc_cipher_decrypt, 1135 .min_keysize = AES_MIN_KEY_SIZE * 2, 1136 .max_keysize = AES_MAX_KEY_SIZE * 2, 1137 .ivsize = AES_BLOCK_SIZE, 1138 }, 1139 .cipher_mode = DRV_CIPHER_BITLOCKER, 1140 .flow_mode = S_DIN_to_AES, 1141 .data_unit = 512, 1142 .min_hw_rev = CC_HW_REV_712, 1143 }, 1144 { 1145 .name = "bitlocker4096(aes)", 1146 .driver_name = "bitlocker-aes-du4096-ccree", 1147 .blocksize = AES_BLOCK_SIZE, 1148 .template_skcipher = { 1149 .setkey = cc_cipher_setkey, 1150 .encrypt = cc_cipher_encrypt, 1151 .decrypt = cc_cipher_decrypt, 1152 .min_keysize = AES_MIN_KEY_SIZE * 2, 1153 .max_keysize = AES_MAX_KEY_SIZE * 2, 1154 .ivsize = AES_BLOCK_SIZE, 1155 }, 1156 .cipher_mode = DRV_CIPHER_BITLOCKER, 1157 .flow_mode = S_DIN_to_AES, 1158 .data_unit = 4096, 1159 .min_hw_rev = CC_HW_REV_712, 1160 }, 1161 { 1162 .name = "ecb(aes)", 1163 .driver_name = "ecb-aes-ccree", 1164 .blocksize = AES_BLOCK_SIZE, 1165 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1166 .template_skcipher = { 1167 .setkey = cc_cipher_setkey, 1168 .encrypt = cc_cipher_encrypt, 1169 .decrypt = cc_cipher_decrypt, 1170 .min_keysize = AES_MIN_KEY_SIZE, 1171 .max_keysize = AES_MAX_KEY_SIZE, 1172 .ivsize = 0, 1173 }, 1174 .cipher_mode = DRV_CIPHER_ECB, 1175 .flow_mode = S_DIN_to_AES, 1176 .min_hw_rev = CC_HW_REV_630, 1177 }, 1178 { 1179 .name = "cbc(aes)", 1180 .driver_name = "cbc-aes-ccree", 1181 .blocksize = AES_BLOCK_SIZE, 1182 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1183 .template_skcipher = { 1184 .setkey = cc_cipher_setkey, 1185 .encrypt = cc_cipher_encrypt, 1186 .decrypt = cc_cipher_decrypt, 1187 .min_keysize = AES_MIN_KEY_SIZE, 1188 .max_keysize = AES_MAX_KEY_SIZE, 1189 .ivsize = AES_BLOCK_SIZE, 1190 }, 1191 .cipher_mode = DRV_CIPHER_CBC, 1192 .flow_mode = S_DIN_to_AES, 1193 .min_hw_rev = CC_HW_REV_630, 1194 }, 1195 { 1196 .name = "ofb(aes)", 1197 .driver_name = "ofb-aes-ccree", 1198 .blocksize = AES_BLOCK_SIZE, 1199 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1200 .template_skcipher = { 1201 .setkey = cc_cipher_setkey, 1202 .encrypt = cc_cipher_encrypt, 1203 .decrypt = cc_cipher_decrypt, 1204 .min_keysize = AES_MIN_KEY_SIZE, 1205 .max_keysize = AES_MAX_KEY_SIZE, 1206 .ivsize = AES_BLOCK_SIZE, 1207 }, 1208 .cipher_mode = DRV_CIPHER_OFB, 1209 .flow_mode = S_DIN_to_AES, 1210 .min_hw_rev = CC_HW_REV_630, 1211 }, 1212 { 1213 .name = "cts1(cbc(aes))", 1214 .driver_name = "cts1-cbc-aes-ccree", 1215 .blocksize = AES_BLOCK_SIZE, 1216 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1217 .template_skcipher = { 1218 .setkey = cc_cipher_setkey, 1219 .encrypt = cc_cipher_encrypt, 1220 .decrypt = cc_cipher_decrypt, 1221 .min_keysize = AES_MIN_KEY_SIZE, 1222 .max_keysize = AES_MAX_KEY_SIZE, 1223 .ivsize = AES_BLOCK_SIZE, 1224 }, 1225 .cipher_mode = DRV_CIPHER_CBC_CTS, 1226 .flow_mode = S_DIN_to_AES, 1227 .min_hw_rev = CC_HW_REV_630, 1228 }, 1229 { 1230 .name = "ctr(aes)", 1231 .driver_name = "ctr-aes-ccree", 1232 .blocksize = 1, 1233 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1234 .template_skcipher = { 1235 .setkey = cc_cipher_setkey, 1236 .encrypt = cc_cipher_encrypt, 1237 .decrypt = cc_cipher_decrypt, 1238 .min_keysize = AES_MIN_KEY_SIZE, 1239 .max_keysize = AES_MAX_KEY_SIZE, 1240 .ivsize = AES_BLOCK_SIZE, 1241 }, 1242 .cipher_mode = DRV_CIPHER_CTR, 1243 .flow_mode = S_DIN_to_AES, 1244 .min_hw_rev = CC_HW_REV_630, 1245 }, 1246 { 1247 .name = "cbc(des3_ede)", 1248 .driver_name = "cbc-3des-ccree", 1249 .blocksize = DES3_EDE_BLOCK_SIZE, 1250 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1251 .template_skcipher = { 1252 .setkey = cc_cipher_setkey, 1253 .encrypt = cc_cipher_encrypt, 1254 .decrypt = cc_cipher_decrypt, 1255 .min_keysize = DES3_EDE_KEY_SIZE, 1256 .max_keysize = DES3_EDE_KEY_SIZE, 1257 .ivsize = DES3_EDE_BLOCK_SIZE, 1258 }, 1259 .cipher_mode = DRV_CIPHER_CBC, 1260 .flow_mode = S_DIN_to_DES, 1261 .min_hw_rev = CC_HW_REV_630, 1262 }, 1263 { 1264 .name = "ecb(des3_ede)", 1265 .driver_name = "ecb-3des-ccree", 1266 .blocksize = DES3_EDE_BLOCK_SIZE, 1267 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1268 .template_skcipher = { 1269 .setkey = cc_cipher_setkey, 1270 .encrypt = cc_cipher_encrypt, 1271 .decrypt = cc_cipher_decrypt, 1272 .min_keysize = DES3_EDE_KEY_SIZE, 1273 .max_keysize = DES3_EDE_KEY_SIZE, 1274 .ivsize = 0, 1275 }, 1276 .cipher_mode = DRV_CIPHER_ECB, 1277 .flow_mode = S_DIN_to_DES, 1278 .min_hw_rev = CC_HW_REV_630, 1279 }, 1280 { 1281 .name = "cbc(des)", 1282 .driver_name = "cbc-des-ccree", 1283 .blocksize = DES_BLOCK_SIZE, 1284 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1285 .template_skcipher = { 1286 .setkey = cc_cipher_setkey, 1287 .encrypt = cc_cipher_encrypt, 1288 .decrypt = cc_cipher_decrypt, 1289 .min_keysize = DES_KEY_SIZE, 1290 .max_keysize = DES_KEY_SIZE, 1291 .ivsize = DES_BLOCK_SIZE, 1292 }, 1293 .cipher_mode = DRV_CIPHER_CBC, 1294 .flow_mode = S_DIN_to_DES, 1295 .min_hw_rev = CC_HW_REV_630, 1296 }, 1297 { 1298 .name = "ecb(des)", 1299 .driver_name = "ecb-des-ccree", 1300 .blocksize = DES_BLOCK_SIZE, 1301 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1302 .template_skcipher = { 1303 .setkey = cc_cipher_setkey, 1304 .encrypt = cc_cipher_encrypt, 1305 .decrypt = cc_cipher_decrypt, 1306 .min_keysize = DES_KEY_SIZE, 1307 .max_keysize = DES_KEY_SIZE, 1308 .ivsize = 0, 1309 }, 1310 .cipher_mode = DRV_CIPHER_ECB, 1311 .flow_mode = S_DIN_to_DES, 1312 .min_hw_rev = CC_HW_REV_630, 1313 }, 1314 }; 1315 1316 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1317 struct device *dev) 1318 { 1319 struct cc_crypto_alg *t_alg; 1320 struct skcipher_alg *alg; 1321 1322 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 1323 if (!t_alg) 1324 return ERR_PTR(-ENOMEM); 1325 1326 alg = &t_alg->skcipher_alg; 1327 1328 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1329 1330 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1331 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1332 tmpl->driver_name); 1333 alg->base.cra_module = THIS_MODULE; 1334 alg->base.cra_priority = CC_CRA_PRIO; 1335 alg->base.cra_blocksize = tmpl->blocksize; 1336 alg->base.cra_alignmask = 0; 1337 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1338 1339 alg->base.cra_init = cc_cipher_init; 1340 alg->base.cra_exit = cc_cipher_exit; 1341 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY | 1342 CRYPTO_ALG_TYPE_SKCIPHER; 1343 1344 t_alg->cipher_mode = tmpl->cipher_mode; 1345 t_alg->flow_mode = tmpl->flow_mode; 1346 t_alg->data_unit = tmpl->data_unit; 1347 1348 return t_alg; 1349 } 1350 1351 int cc_cipher_free(struct cc_drvdata *drvdata) 1352 { 1353 struct cc_crypto_alg *t_alg, *n; 1354 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; 1355 1356 if (cipher_handle) { 1357 /* Remove registered algs */ 1358 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, 1359 entry) { 1360 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1361 list_del(&t_alg->entry); 1362 kfree(t_alg); 1363 } 1364 kfree(cipher_handle); 1365 drvdata->cipher_handle = NULL; 1366 } 1367 return 0; 1368 } 1369 1370 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1371 { 1372 struct cc_cipher_handle *cipher_handle; 1373 struct cc_crypto_alg *t_alg; 1374 struct device *dev = drvdata_to_dev(drvdata); 1375 int rc = -ENOMEM; 1376 int alg; 1377 1378 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); 1379 if (!cipher_handle) 1380 return -ENOMEM; 1381 1382 INIT_LIST_HEAD(&cipher_handle->alg_list); 1383 drvdata->cipher_handle = cipher_handle; 1384 1385 /* Linux crypto */ 1386 dev_dbg(dev, "Number of algorithms = %zu\n", 1387 ARRAY_SIZE(skcipher_algs)); 1388 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1389 if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) 1390 continue; 1391 1392 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1393 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1394 if (IS_ERR(t_alg)) { 1395 rc = PTR_ERR(t_alg); 1396 dev_err(dev, "%s alg allocation failed\n", 1397 skcipher_algs[alg].driver_name); 1398 goto fail0; 1399 } 1400 t_alg->drvdata = drvdata; 1401 1402 dev_dbg(dev, "registering %s\n", 1403 skcipher_algs[alg].driver_name); 1404 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1405 dev_dbg(dev, "%s alg registration rc = %x\n", 1406 t_alg->skcipher_alg.base.cra_driver_name, rc); 1407 if (rc) { 1408 dev_err(dev, "%s alg registration failed\n", 1409 t_alg->skcipher_alg.base.cra_driver_name); 1410 kfree(t_alg); 1411 goto fail0; 1412 } else { 1413 list_add_tail(&t_alg->entry, 1414 &cipher_handle->alg_list); 1415 dev_dbg(dev, "Registered %s\n", 1416 t_alg->skcipher_alg.base.cra_driver_name); 1417 } 1418 } 1419 return 0; 1420 1421 fail0: 1422 cc_cipher_free(drvdata); 1423 return rc; 1424 } 1425