1 // SPDX-License-Identifier: GPL-2.0 2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */ 3 4 #include <linux/kernel.h> 5 #include <linux/module.h> 6 #include <crypto/algapi.h> 7 #include <crypto/internal/skcipher.h> 8 #include <crypto/des.h> 9 #include <crypto/xts.h> 10 #include <crypto/scatterwalk.h> 11 12 #include "cc_driver.h" 13 #include "cc_lli_defs.h" 14 #include "cc_buffer_mgr.h" 15 #include "cc_cipher.h" 16 #include "cc_request_mgr.h" 17 18 #define MAX_ABLKCIPHER_SEQ_LEN 6 19 20 #define template_skcipher template_u.skcipher 21 22 #define CC_MIN_AES_XTS_SIZE 0x10 23 #define CC_MAX_AES_XTS_SIZE 0x2000 24 struct cc_cipher_handle { 25 struct list_head alg_list; 26 }; 27 28 struct cc_user_key_info { 29 u8 *key; 30 dma_addr_t key_dma_addr; 31 }; 32 33 struct cc_hw_key_info { 34 enum cc_hw_crypto_key key1_slot; 35 enum cc_hw_crypto_key key2_slot; 36 }; 37 38 struct cc_cipher_ctx { 39 struct cc_drvdata *drvdata; 40 int keylen; 41 int key_round_number; 42 int cipher_mode; 43 int flow_mode; 44 unsigned int flags; 45 bool hw_key; 46 struct cc_user_key_info user; 47 struct cc_hw_key_info hw; 48 struct crypto_shash *shash_tfm; 49 }; 50 51 static void cc_cipher_complete(struct device *dev, void *cc_req, int err); 52 53 static inline bool cc_is_hw_key(struct crypto_tfm *tfm) 54 { 55 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 56 57 return ctx_p->hw_key; 58 } 59 60 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size) 61 { 62 switch (ctx_p->flow_mode) { 63 case S_DIN_to_AES: 64 switch (size) { 65 case CC_AES_128_BIT_KEY_SIZE: 66 case CC_AES_192_BIT_KEY_SIZE: 67 if (ctx_p->cipher_mode != DRV_CIPHER_XTS && 68 ctx_p->cipher_mode != DRV_CIPHER_ESSIV && 69 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER) 70 return 0; 71 break; 72 case CC_AES_256_BIT_KEY_SIZE: 73 return 0; 74 case (CC_AES_192_BIT_KEY_SIZE * 2): 75 case (CC_AES_256_BIT_KEY_SIZE * 2): 76 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 77 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 78 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) 79 return 0; 80 break; 81 default: 82 break; 83 } 84 case S_DIN_to_DES: 85 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE) 86 return 0; 87 break; 88 default: 89 break; 90 } 91 return -EINVAL; 92 } 93 94 static int validate_data_size(struct cc_cipher_ctx *ctx_p, 95 unsigned int size) 96 { 97 switch (ctx_p->flow_mode) { 98 case S_DIN_to_AES: 99 switch (ctx_p->cipher_mode) { 100 case DRV_CIPHER_XTS: 101 if (size >= CC_MIN_AES_XTS_SIZE && 102 size <= CC_MAX_AES_XTS_SIZE && 103 IS_ALIGNED(size, AES_BLOCK_SIZE)) 104 return 0; 105 break; 106 case DRV_CIPHER_CBC_CTS: 107 if (size >= AES_BLOCK_SIZE) 108 return 0; 109 break; 110 case DRV_CIPHER_OFB: 111 case DRV_CIPHER_CTR: 112 return 0; 113 case DRV_CIPHER_ECB: 114 case DRV_CIPHER_CBC: 115 case DRV_CIPHER_ESSIV: 116 case DRV_CIPHER_BITLOCKER: 117 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) 118 return 0; 119 break; 120 default: 121 break; 122 } 123 break; 124 case S_DIN_to_DES: 125 if (IS_ALIGNED(size, DES_BLOCK_SIZE)) 126 return 0; 127 break; 128 default: 129 break; 130 } 131 return -EINVAL; 132 } 133 134 static int cc_cipher_init(struct crypto_tfm *tfm) 135 { 136 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 137 struct cc_crypto_alg *cc_alg = 138 container_of(tfm->__crt_alg, struct cc_crypto_alg, 139 skcipher_alg.base); 140 struct device *dev = drvdata_to_dev(cc_alg->drvdata); 141 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 142 int rc = 0; 143 144 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p, 145 crypto_tfm_alg_name(tfm)); 146 147 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm), 148 sizeof(struct cipher_req_ctx)); 149 150 ctx_p->cipher_mode = cc_alg->cipher_mode; 151 ctx_p->flow_mode = cc_alg->flow_mode; 152 ctx_p->drvdata = cc_alg->drvdata; 153 154 /* Allocate key buffer, cache line aligned */ 155 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL); 156 if (!ctx_p->user.key) 157 return -ENOMEM; 158 159 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n", 160 ctx_p->user.key); 161 162 /* Map key buffer */ 163 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key, 164 max_key_buf_size, 165 DMA_TO_DEVICE); 166 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) { 167 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n", 168 max_key_buf_size, ctx_p->user.key); 169 return -ENOMEM; 170 } 171 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n", 172 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr); 173 174 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 175 /* Alloc hash tfm for essiv */ 176 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0); 177 if (IS_ERR(ctx_p->shash_tfm)) { 178 dev_err(dev, "Error allocating hash tfm for ESSIV.\n"); 179 return PTR_ERR(ctx_p->shash_tfm); 180 } 181 } 182 183 return rc; 184 } 185 186 static void cc_cipher_exit(struct crypto_tfm *tfm) 187 { 188 struct crypto_alg *alg = tfm->__crt_alg; 189 struct cc_crypto_alg *cc_alg = 190 container_of(alg, struct cc_crypto_alg, 191 skcipher_alg.base); 192 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 193 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 194 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 195 196 dev_dbg(dev, "Clearing context @%p for %s\n", 197 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm)); 198 199 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 200 /* Free hash tfm for essiv */ 201 crypto_free_shash(ctx_p->shash_tfm); 202 ctx_p->shash_tfm = NULL; 203 } 204 205 /* Unmap key buffer */ 206 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size, 207 DMA_TO_DEVICE); 208 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n", 209 &ctx_p->user.key_dma_addr); 210 211 /* Free key buffer in context */ 212 kzfree(ctx_p->user.key); 213 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key); 214 } 215 216 struct tdes_keys { 217 u8 key1[DES_KEY_SIZE]; 218 u8 key2[DES_KEY_SIZE]; 219 u8 key3[DES_KEY_SIZE]; 220 }; 221 222 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num) 223 { 224 switch (slot_num) { 225 case 0: 226 return KFDE0_KEY; 227 case 1: 228 return KFDE1_KEY; 229 case 2: 230 return KFDE2_KEY; 231 case 3: 232 return KFDE3_KEY; 233 } 234 return END_OF_KEYS; 235 } 236 237 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key, 238 unsigned int keylen) 239 { 240 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 241 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 242 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 243 struct cc_hkey_info hki; 244 245 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n", 246 ctx_p, crypto_tfm_alg_name(tfm), keylen); 247 dump_byte_array("key", (u8 *)key, keylen); 248 249 /* STAT_PHASE_0: Init and sanity checks */ 250 251 /* This check the size of the hardware key token */ 252 if (keylen != sizeof(hki)) { 253 dev_err(dev, "Unsupported HW key size %d.\n", keylen); 254 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 255 return -EINVAL; 256 } 257 258 if (ctx_p->flow_mode != S_DIN_to_AES) { 259 dev_err(dev, "HW key not supported for non-AES flows\n"); 260 return -EINVAL; 261 } 262 263 memcpy(&hki, key, keylen); 264 265 /* The real key len for crypto op is the size of the HW key 266 * referenced by the HW key slot, not the hardware key token 267 */ 268 keylen = hki.keylen; 269 270 if (validate_keys_sizes(ctx_p, keylen)) { 271 dev_err(dev, "Unsupported key size %d.\n", keylen); 272 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 273 return -EINVAL; 274 } 275 276 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1); 277 if (ctx_p->hw.key1_slot == END_OF_KEYS) { 278 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1); 279 return -EINVAL; 280 } 281 282 if (ctx_p->cipher_mode == DRV_CIPHER_XTS || 283 ctx_p->cipher_mode == DRV_CIPHER_ESSIV || 284 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) { 285 if (hki.hw_key1 == hki.hw_key2) { 286 dev_err(dev, "Illegal hw key numbers (%d,%d)\n", 287 hki.hw_key1, hki.hw_key2); 288 return -EINVAL; 289 } 290 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2); 291 if (ctx_p->hw.key2_slot == END_OF_KEYS) { 292 dev_err(dev, "Unsupported hw key2 number (%d)\n", 293 hki.hw_key2); 294 return -EINVAL; 295 } 296 } 297 298 ctx_p->keylen = keylen; 299 ctx_p->hw_key = true; 300 dev_dbg(dev, "cc_is_hw_key ret 0"); 301 302 return 0; 303 } 304 305 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key, 306 unsigned int keylen) 307 { 308 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm); 309 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 310 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 311 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 312 struct cc_crypto_alg *cc_alg = 313 container_of(tfm->__crt_alg, struct cc_crypto_alg, 314 skcipher_alg.base); 315 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize; 316 317 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n", 318 ctx_p, crypto_tfm_alg_name(tfm), keylen); 319 dump_byte_array("key", (u8 *)key, keylen); 320 321 /* STAT_PHASE_0: Init and sanity checks */ 322 323 if (validate_keys_sizes(ctx_p, keylen)) { 324 dev_err(dev, "Unsupported key size %d.\n", keylen); 325 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 326 return -EINVAL; 327 } 328 329 ctx_p->hw_key = false; 330 331 /* 332 * Verify DES weak keys 333 * Note that we're dropping the expanded key since the 334 * HW does the expansion on its own. 335 */ 336 if (ctx_p->flow_mode == S_DIN_to_DES) { 337 if (keylen == DES3_EDE_KEY_SIZE && 338 __des3_ede_setkey(tmp, &tfm->crt_flags, key, 339 DES3_EDE_KEY_SIZE)) { 340 dev_dbg(dev, "weak 3DES key"); 341 return -EINVAL; 342 } else if (!des_ekey(tmp, key) && 343 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) { 344 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY; 345 dev_dbg(dev, "weak DES key"); 346 return -EINVAL; 347 } 348 } 349 350 if (ctx_p->cipher_mode == DRV_CIPHER_XTS && 351 xts_check_key(tfm, key, keylen)) { 352 dev_dbg(dev, "weak XTS key"); 353 return -EINVAL; 354 } 355 356 /* STAT_PHASE_1: Copy key to ctx */ 357 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr, 358 max_key_buf_size, DMA_TO_DEVICE); 359 360 memcpy(ctx_p->user.key, key, keylen); 361 if (keylen == 24) 362 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24); 363 364 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) { 365 /* sha256 for key2 - use sw implementation */ 366 int key_len = keylen >> 1; 367 int err; 368 369 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm); 370 371 desc->tfm = ctx_p->shash_tfm; 372 373 err = crypto_shash_digest(desc, ctx_p->user.key, key_len, 374 ctx_p->user.key + key_len); 375 if (err) { 376 dev_err(dev, "Failed to hash ESSIV key.\n"); 377 return err; 378 } 379 } 380 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr, 381 max_key_buf_size, DMA_TO_DEVICE); 382 ctx_p->keylen = keylen; 383 384 dev_dbg(dev, "return safely"); 385 return 0; 386 } 387 388 static void cc_setup_cipher_desc(struct crypto_tfm *tfm, 389 struct cipher_req_ctx *req_ctx, 390 unsigned int ivsize, unsigned int nbytes, 391 struct cc_hw_desc desc[], 392 unsigned int *seq_size) 393 { 394 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 395 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 396 int cipher_mode = ctx_p->cipher_mode; 397 int flow_mode = ctx_p->flow_mode; 398 int direction = req_ctx->gen_ctx.op_type; 399 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr; 400 unsigned int key_len = ctx_p->keylen; 401 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr; 402 unsigned int du_size = nbytes; 403 404 struct cc_crypto_alg *cc_alg = 405 container_of(tfm->__crt_alg, struct cc_crypto_alg, 406 skcipher_alg.base); 407 408 if (cc_alg->data_unit) 409 du_size = cc_alg->data_unit; 410 411 switch (cipher_mode) { 412 case DRV_CIPHER_CBC: 413 case DRV_CIPHER_CBC_CTS: 414 case DRV_CIPHER_CTR: 415 case DRV_CIPHER_OFB: 416 /* Load cipher state */ 417 hw_desc_init(&desc[*seq_size]); 418 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, 419 NS_BIT); 420 set_cipher_config0(&desc[*seq_size], direction); 421 set_flow_mode(&desc[*seq_size], flow_mode); 422 set_cipher_mode(&desc[*seq_size], cipher_mode); 423 if (cipher_mode == DRV_CIPHER_CTR || 424 cipher_mode == DRV_CIPHER_OFB) { 425 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 426 } else { 427 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0); 428 } 429 (*seq_size)++; 430 /*FALLTHROUGH*/ 431 case DRV_CIPHER_ECB: 432 /* Load key */ 433 hw_desc_init(&desc[*seq_size]); 434 set_cipher_mode(&desc[*seq_size], cipher_mode); 435 set_cipher_config0(&desc[*seq_size], direction); 436 if (flow_mode == S_DIN_to_AES) { 437 if (cc_is_hw_key(tfm)) { 438 set_hw_crypto_key(&desc[*seq_size], 439 ctx_p->hw.key1_slot); 440 } else { 441 set_din_type(&desc[*seq_size], DMA_DLLI, 442 key_dma_addr, ((key_len == 24) ? 443 AES_MAX_KEY_SIZE : 444 key_len), NS_BIT); 445 } 446 set_key_size_aes(&desc[*seq_size], key_len); 447 } else { 448 /*des*/ 449 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 450 key_len, NS_BIT); 451 set_key_size_des(&desc[*seq_size], key_len); 452 } 453 set_flow_mode(&desc[*seq_size], flow_mode); 454 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 455 (*seq_size)++; 456 break; 457 case DRV_CIPHER_XTS: 458 case DRV_CIPHER_ESSIV: 459 case DRV_CIPHER_BITLOCKER: 460 /* Load AES key */ 461 hw_desc_init(&desc[*seq_size]); 462 set_cipher_mode(&desc[*seq_size], cipher_mode); 463 set_cipher_config0(&desc[*seq_size], direction); 464 if (cc_is_hw_key(tfm)) { 465 set_hw_crypto_key(&desc[*seq_size], 466 ctx_p->hw.key1_slot); 467 } else { 468 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr, 469 (key_len / 2), NS_BIT); 470 } 471 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 472 set_flow_mode(&desc[*seq_size], flow_mode); 473 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0); 474 (*seq_size)++; 475 476 /* load XEX key */ 477 hw_desc_init(&desc[*seq_size]); 478 set_cipher_mode(&desc[*seq_size], cipher_mode); 479 set_cipher_config0(&desc[*seq_size], direction); 480 if (cc_is_hw_key(tfm)) { 481 set_hw_crypto_key(&desc[*seq_size], 482 ctx_p->hw.key2_slot); 483 } else { 484 set_din_type(&desc[*seq_size], DMA_DLLI, 485 (key_dma_addr + (key_len / 2)), 486 (key_len / 2), NS_BIT); 487 } 488 set_xex_data_unit_size(&desc[*seq_size], du_size); 489 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2); 490 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 491 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY); 492 (*seq_size)++; 493 494 /* Set state */ 495 hw_desc_init(&desc[*seq_size]); 496 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1); 497 set_cipher_mode(&desc[*seq_size], cipher_mode); 498 set_cipher_config0(&desc[*seq_size], direction); 499 set_key_size_aes(&desc[*seq_size], (key_len / 2)); 500 set_flow_mode(&desc[*seq_size], flow_mode); 501 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, 502 CC_AES_BLOCK_SIZE, NS_BIT); 503 (*seq_size)++; 504 break; 505 default: 506 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode); 507 } 508 } 509 510 static void cc_setup_cipher_data(struct crypto_tfm *tfm, 511 struct cipher_req_ctx *req_ctx, 512 struct scatterlist *dst, 513 struct scatterlist *src, unsigned int nbytes, 514 void *areq, struct cc_hw_desc desc[], 515 unsigned int *seq_size) 516 { 517 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 518 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 519 unsigned int flow_mode = ctx_p->flow_mode; 520 521 switch (ctx_p->flow_mode) { 522 case S_DIN_to_AES: 523 flow_mode = DIN_AES_DOUT; 524 break; 525 case S_DIN_to_DES: 526 flow_mode = DIN_DES_DOUT; 527 break; 528 default: 529 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode); 530 return; 531 } 532 /* Process */ 533 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) { 534 dev_dbg(dev, " data params addr %pad length 0x%X\n", 535 &sg_dma_address(src), nbytes); 536 dev_dbg(dev, " data params addr %pad length 0x%X\n", 537 &sg_dma_address(dst), nbytes); 538 hw_desc_init(&desc[*seq_size]); 539 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src), 540 nbytes, NS_BIT); 541 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst), 542 nbytes, NS_BIT, (!areq ? 0 : 1)); 543 if (areq) 544 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 545 546 set_flow_mode(&desc[*seq_size], flow_mode); 547 (*seq_size)++; 548 } else { 549 /* bypass */ 550 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n", 551 &req_ctx->mlli_params.mlli_dma_addr, 552 req_ctx->mlli_params.mlli_len, 553 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 554 hw_desc_init(&desc[*seq_size]); 555 set_din_type(&desc[*seq_size], DMA_DLLI, 556 req_ctx->mlli_params.mlli_dma_addr, 557 req_ctx->mlli_params.mlli_len, NS_BIT); 558 set_dout_sram(&desc[*seq_size], 559 ctx_p->drvdata->mlli_sram_addr, 560 req_ctx->mlli_params.mlli_len); 561 set_flow_mode(&desc[*seq_size], BYPASS); 562 (*seq_size)++; 563 564 hw_desc_init(&desc[*seq_size]); 565 set_din_type(&desc[*seq_size], DMA_MLLI, 566 ctx_p->drvdata->mlli_sram_addr, 567 req_ctx->in_mlli_nents, NS_BIT); 568 if (req_ctx->out_nents == 0) { 569 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 570 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 571 (unsigned int)ctx_p->drvdata->mlli_sram_addr); 572 set_dout_mlli(&desc[*seq_size], 573 ctx_p->drvdata->mlli_sram_addr, 574 req_ctx->in_mlli_nents, NS_BIT, 575 (!areq ? 0 : 1)); 576 } else { 577 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n", 578 (unsigned int)ctx_p->drvdata->mlli_sram_addr, 579 (unsigned int)ctx_p->drvdata->mlli_sram_addr + 580 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents); 581 set_dout_mlli(&desc[*seq_size], 582 (ctx_p->drvdata->mlli_sram_addr + 583 (LLI_ENTRY_BYTE_SIZE * 584 req_ctx->in_mlli_nents)), 585 req_ctx->out_mlli_nents, NS_BIT, 586 (!areq ? 0 : 1)); 587 } 588 if (areq) 589 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]); 590 591 set_flow_mode(&desc[*seq_size], flow_mode); 592 (*seq_size)++; 593 } 594 } 595 596 /* 597 * Update a CTR-AES 128 bit counter 598 */ 599 static void cc_update_ctr(u8 *ctr, unsigned int increment) 600 { 601 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || 602 IS_ALIGNED((unsigned long)ctr, 8)) { 603 604 __be64 *high_be = (__be64 *)ctr; 605 __be64 *low_be = high_be + 1; 606 u64 orig_low = __be64_to_cpu(*low_be); 607 u64 new_low = orig_low + (u64)increment; 608 609 *low_be = __cpu_to_be64(new_low); 610 611 if (new_low < orig_low) 612 *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1); 613 } else { 614 u8 *pos = (ctr + AES_BLOCK_SIZE); 615 u8 val; 616 unsigned int size; 617 618 for (; increment; increment--) 619 for (size = AES_BLOCK_SIZE; size; size--) { 620 val = *--pos + 1; 621 *pos = val; 622 if (val) 623 break; 624 } 625 } 626 } 627 628 static void cc_cipher_complete(struct device *dev, void *cc_req, int err) 629 { 630 struct skcipher_request *req = (struct skcipher_request *)cc_req; 631 struct scatterlist *dst = req->dst; 632 struct scatterlist *src = req->src; 633 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 634 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 635 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 636 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 637 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 638 unsigned int len; 639 640 switch (ctx_p->cipher_mode) { 641 case DRV_CIPHER_CBC: 642 /* 643 * The crypto API expects us to set the req->iv to the last 644 * ciphertext block. For encrypt, simply copy from the result. 645 * For decrypt, we must copy from a saved buffer since this 646 * could be an in-place decryption operation and the src is 647 * lost by this point. 648 */ 649 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) { 650 memcpy(req->iv, req_ctx->backup_info, ivsize); 651 kzfree(req_ctx->backup_info); 652 } else if (!err) { 653 len = req->cryptlen - ivsize; 654 scatterwalk_map_and_copy(req->iv, req->dst, len, 655 ivsize, 0); 656 } 657 break; 658 659 case DRV_CIPHER_CTR: 660 /* Compute the counter of the last block */ 661 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE; 662 cc_update_ctr((u8 *)req->iv, len); 663 break; 664 665 default: 666 break; 667 } 668 669 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 670 kzfree(req_ctx->iv); 671 672 skcipher_request_complete(req, err); 673 } 674 675 static int cc_cipher_process(struct skcipher_request *req, 676 enum drv_crypto_direction direction) 677 { 678 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 679 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 680 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 681 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 682 struct scatterlist *dst = req->dst; 683 struct scatterlist *src = req->src; 684 unsigned int nbytes = req->cryptlen; 685 void *iv = req->iv; 686 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 687 struct device *dev = drvdata_to_dev(ctx_p->drvdata); 688 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN]; 689 struct cc_crypto_req cc_req = {}; 690 int rc; 691 unsigned int seq_len = 0; 692 gfp_t flags = cc_gfp_flags(&req->base); 693 694 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n", 695 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ? 696 "Encrypt" : "Decrypt"), req, iv, nbytes); 697 698 /* STAT_PHASE_0: Init and sanity checks */ 699 700 /* TODO: check data length according to mode */ 701 if (validate_data_size(ctx_p, nbytes)) { 702 dev_err(dev, "Unsupported data size %d.\n", nbytes); 703 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN); 704 rc = -EINVAL; 705 goto exit_process; 706 } 707 if (nbytes == 0) { 708 /* No data to process is valid */ 709 rc = 0; 710 goto exit_process; 711 } 712 713 /* The IV we are handed may be allocted from the stack so 714 * we must copy it to a DMAable buffer before use. 715 */ 716 req_ctx->iv = kmemdup(iv, ivsize, flags); 717 if (!req_ctx->iv) { 718 rc = -ENOMEM; 719 goto exit_process; 720 } 721 722 /* Setup request structure */ 723 cc_req.user_cb = (void *)cc_cipher_complete; 724 cc_req.user_arg = (void *)req; 725 726 /* Setup request context */ 727 req_ctx->gen_ctx.op_type = direction; 728 729 /* STAT_PHASE_1: Map buffers */ 730 731 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, 732 req_ctx->iv, src, dst, flags); 733 if (rc) { 734 dev_err(dev, "map_request() failed\n"); 735 goto exit_process; 736 } 737 738 /* STAT_PHASE_2: Create sequence */ 739 740 /* Setup processing */ 741 cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len); 742 /* Data processing */ 743 cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc, 744 &seq_len); 745 746 /* do we need to generate IV? */ 747 if (req_ctx->is_giv) { 748 cc_req.ivgen_dma_addr[0] = req_ctx->gen_ctx.iv_dma_addr; 749 cc_req.ivgen_dma_addr_len = 1; 750 /* set the IV size (8/16 B long)*/ 751 cc_req.ivgen_size = ivsize; 752 } 753 754 /* STAT_PHASE_3: Lock HW and push sequence */ 755 756 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len, 757 &req->base); 758 if (rc != -EINPROGRESS && rc != -EBUSY) { 759 /* Failed to send the request or request completed 760 * synchronously 761 */ 762 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); 763 } 764 765 exit_process: 766 if (rc != -EINPROGRESS && rc != -EBUSY) { 767 kzfree(req_ctx->backup_info); 768 kzfree(req_ctx->iv); 769 } 770 771 return rc; 772 } 773 774 static int cc_cipher_encrypt(struct skcipher_request *req) 775 { 776 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 777 778 req_ctx->is_giv = false; 779 req_ctx->backup_info = NULL; 780 781 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT); 782 } 783 784 static int cc_cipher_decrypt(struct skcipher_request *req) 785 { 786 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req); 787 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm); 788 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm); 789 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req); 790 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); 791 gfp_t flags = cc_gfp_flags(&req->base); 792 unsigned int len; 793 794 if (ctx_p->cipher_mode == DRV_CIPHER_CBC) { 795 796 /* Allocate and save the last IV sized bytes of the source, 797 * which will be lost in case of in-place decryption. 798 */ 799 req_ctx->backup_info = kzalloc(ivsize, flags); 800 if (!req_ctx->backup_info) 801 return -ENOMEM; 802 803 len = req->cryptlen - ivsize; 804 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len, 805 ivsize, 0); 806 } else { 807 req_ctx->backup_info = NULL; 808 } 809 810 req_ctx->is_giv = false; 811 812 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT); 813 } 814 815 /* Block cipher alg */ 816 static const struct cc_alg_template skcipher_algs[] = { 817 { 818 .name = "xts(paes)", 819 .driver_name = "xts-paes-ccree", 820 .blocksize = AES_BLOCK_SIZE, 821 .template_skcipher = { 822 .setkey = cc_cipher_sethkey, 823 .encrypt = cc_cipher_encrypt, 824 .decrypt = cc_cipher_decrypt, 825 .min_keysize = CC_HW_KEY_SIZE, 826 .max_keysize = CC_HW_KEY_SIZE, 827 .ivsize = AES_BLOCK_SIZE, 828 }, 829 .cipher_mode = DRV_CIPHER_XTS, 830 .flow_mode = S_DIN_to_AES, 831 .min_hw_rev = CC_HW_REV_630, 832 }, 833 { 834 .name = "xts512(paes)", 835 .driver_name = "xts-paes-du512-ccree", 836 .blocksize = AES_BLOCK_SIZE, 837 .template_skcipher = { 838 .setkey = cc_cipher_sethkey, 839 .encrypt = cc_cipher_encrypt, 840 .decrypt = cc_cipher_decrypt, 841 .min_keysize = CC_HW_KEY_SIZE, 842 .max_keysize = CC_HW_KEY_SIZE, 843 .ivsize = AES_BLOCK_SIZE, 844 }, 845 .cipher_mode = DRV_CIPHER_XTS, 846 .flow_mode = S_DIN_to_AES, 847 .data_unit = 512, 848 .min_hw_rev = CC_HW_REV_712, 849 }, 850 { 851 .name = "xts4096(paes)", 852 .driver_name = "xts-paes-du4096-ccree", 853 .blocksize = AES_BLOCK_SIZE, 854 .template_skcipher = { 855 .setkey = cc_cipher_sethkey, 856 .encrypt = cc_cipher_encrypt, 857 .decrypt = cc_cipher_decrypt, 858 .min_keysize = CC_HW_KEY_SIZE, 859 .max_keysize = CC_HW_KEY_SIZE, 860 .ivsize = AES_BLOCK_SIZE, 861 }, 862 .cipher_mode = DRV_CIPHER_XTS, 863 .flow_mode = S_DIN_to_AES, 864 .data_unit = 4096, 865 .min_hw_rev = CC_HW_REV_712, 866 }, 867 { 868 .name = "essiv(paes)", 869 .driver_name = "essiv-paes-ccree", 870 .blocksize = AES_BLOCK_SIZE, 871 .template_skcipher = { 872 .setkey = cc_cipher_sethkey, 873 .encrypt = cc_cipher_encrypt, 874 .decrypt = cc_cipher_decrypt, 875 .min_keysize = CC_HW_KEY_SIZE, 876 .max_keysize = CC_HW_KEY_SIZE, 877 .ivsize = AES_BLOCK_SIZE, 878 }, 879 .cipher_mode = DRV_CIPHER_ESSIV, 880 .flow_mode = S_DIN_to_AES, 881 .min_hw_rev = CC_HW_REV_712, 882 }, 883 { 884 .name = "essiv512(paes)", 885 .driver_name = "essiv-paes-du512-ccree", 886 .blocksize = AES_BLOCK_SIZE, 887 .template_skcipher = { 888 .setkey = cc_cipher_sethkey, 889 .encrypt = cc_cipher_encrypt, 890 .decrypt = cc_cipher_decrypt, 891 .min_keysize = CC_HW_KEY_SIZE, 892 .max_keysize = CC_HW_KEY_SIZE, 893 .ivsize = AES_BLOCK_SIZE, 894 }, 895 .cipher_mode = DRV_CIPHER_ESSIV, 896 .flow_mode = S_DIN_to_AES, 897 .data_unit = 512, 898 .min_hw_rev = CC_HW_REV_712, 899 }, 900 { 901 .name = "essiv4096(paes)", 902 .driver_name = "essiv-paes-du4096-ccree", 903 .blocksize = AES_BLOCK_SIZE, 904 .template_skcipher = { 905 .setkey = cc_cipher_sethkey, 906 .encrypt = cc_cipher_encrypt, 907 .decrypt = cc_cipher_decrypt, 908 .min_keysize = CC_HW_KEY_SIZE, 909 .max_keysize = CC_HW_KEY_SIZE, 910 .ivsize = AES_BLOCK_SIZE, 911 }, 912 .cipher_mode = DRV_CIPHER_ESSIV, 913 .flow_mode = S_DIN_to_AES, 914 .data_unit = 4096, 915 .min_hw_rev = CC_HW_REV_712, 916 }, 917 { 918 .name = "bitlocker(paes)", 919 .driver_name = "bitlocker-paes-ccree", 920 .blocksize = AES_BLOCK_SIZE, 921 .template_skcipher = { 922 .setkey = cc_cipher_sethkey, 923 .encrypt = cc_cipher_encrypt, 924 .decrypt = cc_cipher_decrypt, 925 .min_keysize = CC_HW_KEY_SIZE, 926 .max_keysize = CC_HW_KEY_SIZE, 927 .ivsize = AES_BLOCK_SIZE, 928 }, 929 .cipher_mode = DRV_CIPHER_BITLOCKER, 930 .flow_mode = S_DIN_to_AES, 931 .min_hw_rev = CC_HW_REV_712, 932 }, 933 { 934 .name = "bitlocker512(paes)", 935 .driver_name = "bitlocker-paes-du512-ccree", 936 .blocksize = AES_BLOCK_SIZE, 937 .template_skcipher = { 938 .setkey = cc_cipher_sethkey, 939 .encrypt = cc_cipher_encrypt, 940 .decrypt = cc_cipher_decrypt, 941 .min_keysize = CC_HW_KEY_SIZE, 942 .max_keysize = CC_HW_KEY_SIZE, 943 .ivsize = AES_BLOCK_SIZE, 944 }, 945 .cipher_mode = DRV_CIPHER_BITLOCKER, 946 .flow_mode = S_DIN_to_AES, 947 .data_unit = 512, 948 .min_hw_rev = CC_HW_REV_712, 949 }, 950 { 951 .name = "bitlocker4096(paes)", 952 .driver_name = "bitlocker-paes-du4096-ccree", 953 .blocksize = AES_BLOCK_SIZE, 954 .template_skcipher = { 955 .setkey = cc_cipher_sethkey, 956 .encrypt = cc_cipher_encrypt, 957 .decrypt = cc_cipher_decrypt, 958 .min_keysize = CC_HW_KEY_SIZE, 959 .max_keysize = CC_HW_KEY_SIZE, 960 .ivsize = AES_BLOCK_SIZE, 961 }, 962 .cipher_mode = DRV_CIPHER_BITLOCKER, 963 .flow_mode = S_DIN_to_AES, 964 .data_unit = 4096, 965 .min_hw_rev = CC_HW_REV_712, 966 }, 967 { 968 .name = "ecb(paes)", 969 .driver_name = "ecb-paes-ccree", 970 .blocksize = AES_BLOCK_SIZE, 971 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 972 .template_skcipher = { 973 .setkey = cc_cipher_sethkey, 974 .encrypt = cc_cipher_encrypt, 975 .decrypt = cc_cipher_decrypt, 976 .min_keysize = CC_HW_KEY_SIZE, 977 .max_keysize = CC_HW_KEY_SIZE, 978 .ivsize = 0, 979 }, 980 .cipher_mode = DRV_CIPHER_ECB, 981 .flow_mode = S_DIN_to_AES, 982 .min_hw_rev = CC_HW_REV_712, 983 }, 984 { 985 .name = "cbc(paes)", 986 .driver_name = "cbc-paes-ccree", 987 .blocksize = AES_BLOCK_SIZE, 988 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 989 .template_skcipher = { 990 .setkey = cc_cipher_sethkey, 991 .encrypt = cc_cipher_encrypt, 992 .decrypt = cc_cipher_decrypt, 993 .min_keysize = CC_HW_KEY_SIZE, 994 .max_keysize = CC_HW_KEY_SIZE, 995 .ivsize = AES_BLOCK_SIZE, 996 }, 997 .cipher_mode = DRV_CIPHER_CBC, 998 .flow_mode = S_DIN_to_AES, 999 .min_hw_rev = CC_HW_REV_712, 1000 }, 1001 { 1002 .name = "ofb(paes)", 1003 .driver_name = "ofb-paes-ccree", 1004 .blocksize = AES_BLOCK_SIZE, 1005 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1006 .template_skcipher = { 1007 .setkey = cc_cipher_sethkey, 1008 .encrypt = cc_cipher_encrypt, 1009 .decrypt = cc_cipher_decrypt, 1010 .min_keysize = CC_HW_KEY_SIZE, 1011 .max_keysize = CC_HW_KEY_SIZE, 1012 .ivsize = AES_BLOCK_SIZE, 1013 }, 1014 .cipher_mode = DRV_CIPHER_OFB, 1015 .flow_mode = S_DIN_to_AES, 1016 .min_hw_rev = CC_HW_REV_712, 1017 }, 1018 { 1019 .name = "cts(cbc(paes))", 1020 .driver_name = "cts-cbc-paes-ccree", 1021 .blocksize = AES_BLOCK_SIZE, 1022 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1023 .template_skcipher = { 1024 .setkey = cc_cipher_sethkey, 1025 .encrypt = cc_cipher_encrypt, 1026 .decrypt = cc_cipher_decrypt, 1027 .min_keysize = CC_HW_KEY_SIZE, 1028 .max_keysize = CC_HW_KEY_SIZE, 1029 .ivsize = AES_BLOCK_SIZE, 1030 }, 1031 .cipher_mode = DRV_CIPHER_CBC_CTS, 1032 .flow_mode = S_DIN_to_AES, 1033 .min_hw_rev = CC_HW_REV_712, 1034 }, 1035 { 1036 .name = "ctr(paes)", 1037 .driver_name = "ctr-paes-ccree", 1038 .blocksize = 1, 1039 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1040 .template_skcipher = { 1041 .setkey = cc_cipher_sethkey, 1042 .encrypt = cc_cipher_encrypt, 1043 .decrypt = cc_cipher_decrypt, 1044 .min_keysize = CC_HW_KEY_SIZE, 1045 .max_keysize = CC_HW_KEY_SIZE, 1046 .ivsize = AES_BLOCK_SIZE, 1047 }, 1048 .cipher_mode = DRV_CIPHER_CTR, 1049 .flow_mode = S_DIN_to_AES, 1050 .min_hw_rev = CC_HW_REV_712, 1051 }, 1052 { 1053 .name = "xts(aes)", 1054 .driver_name = "xts-aes-ccree", 1055 .blocksize = AES_BLOCK_SIZE, 1056 .template_skcipher = { 1057 .setkey = cc_cipher_setkey, 1058 .encrypt = cc_cipher_encrypt, 1059 .decrypt = cc_cipher_decrypt, 1060 .min_keysize = AES_MIN_KEY_SIZE * 2, 1061 .max_keysize = AES_MAX_KEY_SIZE * 2, 1062 .ivsize = AES_BLOCK_SIZE, 1063 }, 1064 .cipher_mode = DRV_CIPHER_XTS, 1065 .flow_mode = S_DIN_to_AES, 1066 .min_hw_rev = CC_HW_REV_630, 1067 }, 1068 { 1069 .name = "xts512(aes)", 1070 .driver_name = "xts-aes-du512-ccree", 1071 .blocksize = AES_BLOCK_SIZE, 1072 .template_skcipher = { 1073 .setkey = cc_cipher_setkey, 1074 .encrypt = cc_cipher_encrypt, 1075 .decrypt = cc_cipher_decrypt, 1076 .min_keysize = AES_MIN_KEY_SIZE * 2, 1077 .max_keysize = AES_MAX_KEY_SIZE * 2, 1078 .ivsize = AES_BLOCK_SIZE, 1079 }, 1080 .cipher_mode = DRV_CIPHER_XTS, 1081 .flow_mode = S_DIN_to_AES, 1082 .data_unit = 512, 1083 .min_hw_rev = CC_HW_REV_712, 1084 }, 1085 { 1086 .name = "xts4096(aes)", 1087 .driver_name = "xts-aes-du4096-ccree", 1088 .blocksize = AES_BLOCK_SIZE, 1089 .template_skcipher = { 1090 .setkey = cc_cipher_setkey, 1091 .encrypt = cc_cipher_encrypt, 1092 .decrypt = cc_cipher_decrypt, 1093 .min_keysize = AES_MIN_KEY_SIZE * 2, 1094 .max_keysize = AES_MAX_KEY_SIZE * 2, 1095 .ivsize = AES_BLOCK_SIZE, 1096 }, 1097 .cipher_mode = DRV_CIPHER_XTS, 1098 .flow_mode = S_DIN_to_AES, 1099 .data_unit = 4096, 1100 .min_hw_rev = CC_HW_REV_712, 1101 }, 1102 { 1103 .name = "essiv(aes)", 1104 .driver_name = "essiv-aes-ccree", 1105 .blocksize = AES_BLOCK_SIZE, 1106 .template_skcipher = { 1107 .setkey = cc_cipher_setkey, 1108 .encrypt = cc_cipher_encrypt, 1109 .decrypt = cc_cipher_decrypt, 1110 .min_keysize = AES_MIN_KEY_SIZE * 2, 1111 .max_keysize = AES_MAX_KEY_SIZE * 2, 1112 .ivsize = AES_BLOCK_SIZE, 1113 }, 1114 .cipher_mode = DRV_CIPHER_ESSIV, 1115 .flow_mode = S_DIN_to_AES, 1116 .min_hw_rev = CC_HW_REV_712, 1117 }, 1118 { 1119 .name = "essiv512(aes)", 1120 .driver_name = "essiv-aes-du512-ccree", 1121 .blocksize = AES_BLOCK_SIZE, 1122 .template_skcipher = { 1123 .setkey = cc_cipher_setkey, 1124 .encrypt = cc_cipher_encrypt, 1125 .decrypt = cc_cipher_decrypt, 1126 .min_keysize = AES_MIN_KEY_SIZE * 2, 1127 .max_keysize = AES_MAX_KEY_SIZE * 2, 1128 .ivsize = AES_BLOCK_SIZE, 1129 }, 1130 .cipher_mode = DRV_CIPHER_ESSIV, 1131 .flow_mode = S_DIN_to_AES, 1132 .data_unit = 512, 1133 .min_hw_rev = CC_HW_REV_712, 1134 }, 1135 { 1136 .name = "essiv4096(aes)", 1137 .driver_name = "essiv-aes-du4096-ccree", 1138 .blocksize = AES_BLOCK_SIZE, 1139 .template_skcipher = { 1140 .setkey = cc_cipher_setkey, 1141 .encrypt = cc_cipher_encrypt, 1142 .decrypt = cc_cipher_decrypt, 1143 .min_keysize = AES_MIN_KEY_SIZE * 2, 1144 .max_keysize = AES_MAX_KEY_SIZE * 2, 1145 .ivsize = AES_BLOCK_SIZE, 1146 }, 1147 .cipher_mode = DRV_CIPHER_ESSIV, 1148 .flow_mode = S_DIN_to_AES, 1149 .data_unit = 4096, 1150 .min_hw_rev = CC_HW_REV_712, 1151 }, 1152 { 1153 .name = "bitlocker(aes)", 1154 .driver_name = "bitlocker-aes-ccree", 1155 .blocksize = AES_BLOCK_SIZE, 1156 .template_skcipher = { 1157 .setkey = cc_cipher_setkey, 1158 .encrypt = cc_cipher_encrypt, 1159 .decrypt = cc_cipher_decrypt, 1160 .min_keysize = AES_MIN_KEY_SIZE * 2, 1161 .max_keysize = AES_MAX_KEY_SIZE * 2, 1162 .ivsize = AES_BLOCK_SIZE, 1163 }, 1164 .cipher_mode = DRV_CIPHER_BITLOCKER, 1165 .flow_mode = S_DIN_to_AES, 1166 .min_hw_rev = CC_HW_REV_712, 1167 }, 1168 { 1169 .name = "bitlocker512(aes)", 1170 .driver_name = "bitlocker-aes-du512-ccree", 1171 .blocksize = AES_BLOCK_SIZE, 1172 .template_skcipher = { 1173 .setkey = cc_cipher_setkey, 1174 .encrypt = cc_cipher_encrypt, 1175 .decrypt = cc_cipher_decrypt, 1176 .min_keysize = AES_MIN_KEY_SIZE * 2, 1177 .max_keysize = AES_MAX_KEY_SIZE * 2, 1178 .ivsize = AES_BLOCK_SIZE, 1179 }, 1180 .cipher_mode = DRV_CIPHER_BITLOCKER, 1181 .flow_mode = S_DIN_to_AES, 1182 .data_unit = 512, 1183 .min_hw_rev = CC_HW_REV_712, 1184 }, 1185 { 1186 .name = "bitlocker4096(aes)", 1187 .driver_name = "bitlocker-aes-du4096-ccree", 1188 .blocksize = AES_BLOCK_SIZE, 1189 .template_skcipher = { 1190 .setkey = cc_cipher_setkey, 1191 .encrypt = cc_cipher_encrypt, 1192 .decrypt = cc_cipher_decrypt, 1193 .min_keysize = AES_MIN_KEY_SIZE * 2, 1194 .max_keysize = AES_MAX_KEY_SIZE * 2, 1195 .ivsize = AES_BLOCK_SIZE, 1196 }, 1197 .cipher_mode = DRV_CIPHER_BITLOCKER, 1198 .flow_mode = S_DIN_to_AES, 1199 .data_unit = 4096, 1200 .min_hw_rev = CC_HW_REV_712, 1201 }, 1202 { 1203 .name = "ecb(aes)", 1204 .driver_name = "ecb-aes-ccree", 1205 .blocksize = AES_BLOCK_SIZE, 1206 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1207 .template_skcipher = { 1208 .setkey = cc_cipher_setkey, 1209 .encrypt = cc_cipher_encrypt, 1210 .decrypt = cc_cipher_decrypt, 1211 .min_keysize = AES_MIN_KEY_SIZE, 1212 .max_keysize = AES_MAX_KEY_SIZE, 1213 .ivsize = 0, 1214 }, 1215 .cipher_mode = DRV_CIPHER_ECB, 1216 .flow_mode = S_DIN_to_AES, 1217 .min_hw_rev = CC_HW_REV_630, 1218 }, 1219 { 1220 .name = "cbc(aes)", 1221 .driver_name = "cbc-aes-ccree", 1222 .blocksize = AES_BLOCK_SIZE, 1223 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1224 .template_skcipher = { 1225 .setkey = cc_cipher_setkey, 1226 .encrypt = cc_cipher_encrypt, 1227 .decrypt = cc_cipher_decrypt, 1228 .min_keysize = AES_MIN_KEY_SIZE, 1229 .max_keysize = AES_MAX_KEY_SIZE, 1230 .ivsize = AES_BLOCK_SIZE, 1231 }, 1232 .cipher_mode = DRV_CIPHER_CBC, 1233 .flow_mode = S_DIN_to_AES, 1234 .min_hw_rev = CC_HW_REV_630, 1235 }, 1236 { 1237 .name = "ofb(aes)", 1238 .driver_name = "ofb-aes-ccree", 1239 .blocksize = AES_BLOCK_SIZE, 1240 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1241 .template_skcipher = { 1242 .setkey = cc_cipher_setkey, 1243 .encrypt = cc_cipher_encrypt, 1244 .decrypt = cc_cipher_decrypt, 1245 .min_keysize = AES_MIN_KEY_SIZE, 1246 .max_keysize = AES_MAX_KEY_SIZE, 1247 .ivsize = AES_BLOCK_SIZE, 1248 }, 1249 .cipher_mode = DRV_CIPHER_OFB, 1250 .flow_mode = S_DIN_to_AES, 1251 .min_hw_rev = CC_HW_REV_630, 1252 }, 1253 { 1254 .name = "cts(cbc(aes))", 1255 .driver_name = "cts-cbc-aes-ccree", 1256 .blocksize = AES_BLOCK_SIZE, 1257 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1258 .template_skcipher = { 1259 .setkey = cc_cipher_setkey, 1260 .encrypt = cc_cipher_encrypt, 1261 .decrypt = cc_cipher_decrypt, 1262 .min_keysize = AES_MIN_KEY_SIZE, 1263 .max_keysize = AES_MAX_KEY_SIZE, 1264 .ivsize = AES_BLOCK_SIZE, 1265 }, 1266 .cipher_mode = DRV_CIPHER_CBC_CTS, 1267 .flow_mode = S_DIN_to_AES, 1268 .min_hw_rev = CC_HW_REV_630, 1269 }, 1270 { 1271 .name = "ctr(aes)", 1272 .driver_name = "ctr-aes-ccree", 1273 .blocksize = 1, 1274 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1275 .template_skcipher = { 1276 .setkey = cc_cipher_setkey, 1277 .encrypt = cc_cipher_encrypt, 1278 .decrypt = cc_cipher_decrypt, 1279 .min_keysize = AES_MIN_KEY_SIZE, 1280 .max_keysize = AES_MAX_KEY_SIZE, 1281 .ivsize = AES_BLOCK_SIZE, 1282 }, 1283 .cipher_mode = DRV_CIPHER_CTR, 1284 .flow_mode = S_DIN_to_AES, 1285 .min_hw_rev = CC_HW_REV_630, 1286 }, 1287 { 1288 .name = "cbc(des3_ede)", 1289 .driver_name = "cbc-3des-ccree", 1290 .blocksize = DES3_EDE_BLOCK_SIZE, 1291 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1292 .template_skcipher = { 1293 .setkey = cc_cipher_setkey, 1294 .encrypt = cc_cipher_encrypt, 1295 .decrypt = cc_cipher_decrypt, 1296 .min_keysize = DES3_EDE_KEY_SIZE, 1297 .max_keysize = DES3_EDE_KEY_SIZE, 1298 .ivsize = DES3_EDE_BLOCK_SIZE, 1299 }, 1300 .cipher_mode = DRV_CIPHER_CBC, 1301 .flow_mode = S_DIN_to_DES, 1302 .min_hw_rev = CC_HW_REV_630, 1303 }, 1304 { 1305 .name = "ecb(des3_ede)", 1306 .driver_name = "ecb-3des-ccree", 1307 .blocksize = DES3_EDE_BLOCK_SIZE, 1308 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1309 .template_skcipher = { 1310 .setkey = cc_cipher_setkey, 1311 .encrypt = cc_cipher_encrypt, 1312 .decrypt = cc_cipher_decrypt, 1313 .min_keysize = DES3_EDE_KEY_SIZE, 1314 .max_keysize = DES3_EDE_KEY_SIZE, 1315 .ivsize = 0, 1316 }, 1317 .cipher_mode = DRV_CIPHER_ECB, 1318 .flow_mode = S_DIN_to_DES, 1319 .min_hw_rev = CC_HW_REV_630, 1320 }, 1321 { 1322 .name = "cbc(des)", 1323 .driver_name = "cbc-des-ccree", 1324 .blocksize = DES_BLOCK_SIZE, 1325 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1326 .template_skcipher = { 1327 .setkey = cc_cipher_setkey, 1328 .encrypt = cc_cipher_encrypt, 1329 .decrypt = cc_cipher_decrypt, 1330 .min_keysize = DES_KEY_SIZE, 1331 .max_keysize = DES_KEY_SIZE, 1332 .ivsize = DES_BLOCK_SIZE, 1333 }, 1334 .cipher_mode = DRV_CIPHER_CBC, 1335 .flow_mode = S_DIN_to_DES, 1336 .min_hw_rev = CC_HW_REV_630, 1337 }, 1338 { 1339 .name = "ecb(des)", 1340 .driver_name = "ecb-des-ccree", 1341 .blocksize = DES_BLOCK_SIZE, 1342 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1343 .template_skcipher = { 1344 .setkey = cc_cipher_setkey, 1345 .encrypt = cc_cipher_encrypt, 1346 .decrypt = cc_cipher_decrypt, 1347 .min_keysize = DES_KEY_SIZE, 1348 .max_keysize = DES_KEY_SIZE, 1349 .ivsize = 0, 1350 }, 1351 .cipher_mode = DRV_CIPHER_ECB, 1352 .flow_mode = S_DIN_to_DES, 1353 .min_hw_rev = CC_HW_REV_630, 1354 }, 1355 }; 1356 1357 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl, 1358 struct device *dev) 1359 { 1360 struct cc_crypto_alg *t_alg; 1361 struct skcipher_alg *alg; 1362 1363 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 1364 if (!t_alg) 1365 return ERR_PTR(-ENOMEM); 1366 1367 alg = &t_alg->skcipher_alg; 1368 1369 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg)); 1370 1371 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name); 1372 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 1373 tmpl->driver_name); 1374 alg->base.cra_module = THIS_MODULE; 1375 alg->base.cra_priority = CC_CRA_PRIO; 1376 alg->base.cra_blocksize = tmpl->blocksize; 1377 alg->base.cra_alignmask = 0; 1378 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx); 1379 1380 alg->base.cra_init = cc_cipher_init; 1381 alg->base.cra_exit = cc_cipher_exit; 1382 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 1383 1384 t_alg->cipher_mode = tmpl->cipher_mode; 1385 t_alg->flow_mode = tmpl->flow_mode; 1386 t_alg->data_unit = tmpl->data_unit; 1387 1388 return t_alg; 1389 } 1390 1391 int cc_cipher_free(struct cc_drvdata *drvdata) 1392 { 1393 struct cc_crypto_alg *t_alg, *n; 1394 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle; 1395 1396 if (cipher_handle) { 1397 /* Remove registered algs */ 1398 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list, 1399 entry) { 1400 crypto_unregister_skcipher(&t_alg->skcipher_alg); 1401 list_del(&t_alg->entry); 1402 kfree(t_alg); 1403 } 1404 kfree(cipher_handle); 1405 drvdata->cipher_handle = NULL; 1406 } 1407 return 0; 1408 } 1409 1410 int cc_cipher_alloc(struct cc_drvdata *drvdata) 1411 { 1412 struct cc_cipher_handle *cipher_handle; 1413 struct cc_crypto_alg *t_alg; 1414 struct device *dev = drvdata_to_dev(drvdata); 1415 int rc = -ENOMEM; 1416 int alg; 1417 1418 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL); 1419 if (!cipher_handle) 1420 return -ENOMEM; 1421 1422 INIT_LIST_HEAD(&cipher_handle->alg_list); 1423 drvdata->cipher_handle = cipher_handle; 1424 1425 /* Linux crypto */ 1426 dev_dbg(dev, "Number of algorithms = %zu\n", 1427 ARRAY_SIZE(skcipher_algs)); 1428 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) { 1429 if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) 1430 continue; 1431 1432 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name); 1433 t_alg = cc_create_alg(&skcipher_algs[alg], dev); 1434 if (IS_ERR(t_alg)) { 1435 rc = PTR_ERR(t_alg); 1436 dev_err(dev, "%s alg allocation failed\n", 1437 skcipher_algs[alg].driver_name); 1438 goto fail0; 1439 } 1440 t_alg->drvdata = drvdata; 1441 1442 dev_dbg(dev, "registering %s\n", 1443 skcipher_algs[alg].driver_name); 1444 rc = crypto_register_skcipher(&t_alg->skcipher_alg); 1445 dev_dbg(dev, "%s alg registration rc = %x\n", 1446 t_alg->skcipher_alg.base.cra_driver_name, rc); 1447 if (rc) { 1448 dev_err(dev, "%s alg registration failed\n", 1449 t_alg->skcipher_alg.base.cra_driver_name); 1450 kfree(t_alg); 1451 goto fail0; 1452 } else { 1453 list_add_tail(&t_alg->entry, 1454 &cipher_handle->alg_list); 1455 dev_dbg(dev, "Registered %s\n", 1456 t_alg->skcipher_alg.base.cra_driver_name); 1457 } 1458 } 1459 return 0; 1460 1461 fail0: 1462 cc_cipher_free(drvdata); 1463 return rc; 1464 } 1465