1 /* 2 * caam - Freescale FSL CAAM support for crypto API 3 * 4 * Copyright 2008-2011 Freescale Semiconductor, Inc. 5 * Copyright 2016 NXP 6 * 7 * Based on talitos crypto API driver. 8 * 9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 10 * 11 * --------------- --------------- 12 * | JobDesc #1 |-------------------->| ShareDesc | 13 * | *(packet 1) | | (PDB) | 14 * --------------- |------------->| (hashKey) | 15 * . | | (cipherKey) | 16 * . | |-------->| (operation) | 17 * --------------- | | --------------- 18 * | JobDesc #2 |------| | 19 * | *(packet 2) | | 20 * --------------- | 21 * . | 22 * . | 23 * --------------- | 24 * | JobDesc #3 |------------ 25 * | *(packet 3) | 26 * --------------- 27 * 28 * The SharedDesc never changes for a connection unless rekeyed, but 29 * each packet will likely be in a different place. So all we need 30 * to know to process the packet is where the input is, where the 31 * output goes, and what context we want to process with. Context is 32 * in the SharedDesc, packet references in the JobDesc. 33 * 34 * So, a job desc looks like: 35 * 36 * --------------------- 37 * | Header | 38 * | ShareDesc Pointer | 39 * | SEQ_OUT_PTR | 40 * | (output buffer) | 41 * | (output length) | 42 * | SEQ_IN_PTR | 43 * | (input buffer) | 44 * | (input length) | 45 * --------------------- 46 */ 47 48 #include "compat.h" 49 50 #include "regs.h" 51 #include "intern.h" 52 #include "desc_constr.h" 53 #include "jr.h" 54 #include "error.h" 55 #include "sg_sw_sec4.h" 56 #include "key_gen.h" 57 #include "caamalg_desc.h" 58 59 /* 60 * crypto alg 61 */ 62 #define CAAM_CRA_PRIORITY 3000 63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 65 CTR_RFC3686_NONCE_SIZE + \ 66 SHA512_DIGEST_SIZE * 2) 67 68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 70 CAAM_CMD_SZ * 4) 71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 5) 73 74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 76 77 #ifdef DEBUG 78 /* for print_hex_dumps with line references */ 79 #define debug(format, arg...) printk(format, arg) 80 #else 81 #define debug(format, arg...) 82 #endif 83 84 static struct list_head alg_list; 85 86 struct caam_alg_entry { 87 int class1_alg_type; 88 int class2_alg_type; 89 bool rfc3686; 90 bool geniv; 91 }; 92 93 struct caam_aead_alg { 94 struct aead_alg aead; 95 struct caam_alg_entry caam; 96 bool registered; 97 }; 98 99 /* 100 * per-session context 101 */ 102 struct caam_ctx { 103 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 104 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 105 u32 sh_desc_givenc[DESC_MAX_USED_LEN]; 106 u8 key[CAAM_MAX_KEY_SIZE]; 107 dma_addr_t sh_desc_enc_dma; 108 dma_addr_t sh_desc_dec_dma; 109 dma_addr_t sh_desc_givenc_dma; 110 dma_addr_t key_dma; 111 struct device *jrdev; 112 struct alginfo adata; 113 struct alginfo cdata; 114 unsigned int authsize; 115 }; 116 117 static int aead_null_set_sh_desc(struct crypto_aead *aead) 118 { 119 struct caam_ctx *ctx = crypto_aead_ctx(aead); 120 struct device *jrdev = ctx->jrdev; 121 u32 *desc; 122 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 123 ctx->adata.keylen_pad; 124 125 /* 126 * Job Descriptor and Shared Descriptors 127 * must all fit into the 64-word Descriptor h/w Buffer 128 */ 129 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 130 ctx->adata.key_inline = true; 131 ctx->adata.key_virt = ctx->key; 132 } else { 133 ctx->adata.key_inline = false; 134 ctx->adata.key_dma = ctx->key_dma; 135 } 136 137 /* aead_encrypt shared descriptor */ 138 desc = ctx->sh_desc_enc; 139 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize); 140 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 141 desc_bytes(desc), DMA_TO_DEVICE); 142 143 /* 144 * Job Descriptor and Shared Descriptors 145 * must all fit into the 64-word Descriptor h/w Buffer 146 */ 147 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 148 ctx->adata.key_inline = true; 149 ctx->adata.key_virt = ctx->key; 150 } else { 151 ctx->adata.key_inline = false; 152 ctx->adata.key_dma = ctx->key_dma; 153 } 154 155 /* aead_decrypt shared descriptor */ 156 desc = ctx->sh_desc_dec; 157 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize); 158 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 159 desc_bytes(desc), DMA_TO_DEVICE); 160 161 return 0; 162 } 163 164 static int aead_set_sh_desc(struct crypto_aead *aead) 165 { 166 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 167 struct caam_aead_alg, aead); 168 unsigned int ivsize = crypto_aead_ivsize(aead); 169 struct caam_ctx *ctx = crypto_aead_ctx(aead); 170 struct device *jrdev = ctx->jrdev; 171 u32 ctx1_iv_off = 0; 172 u32 *desc, *nonce = NULL; 173 u32 inl_mask; 174 unsigned int data_len[2]; 175 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 176 OP_ALG_AAI_CTR_MOD128); 177 const bool is_rfc3686 = alg->caam.rfc3686; 178 179 if (!ctx->authsize) 180 return 0; 181 182 /* NULL encryption / decryption */ 183 if (!ctx->cdata.keylen) 184 return aead_null_set_sh_desc(aead); 185 186 /* 187 * AES-CTR needs to load IV in CONTEXT1 reg 188 * at an offset of 128bits (16bytes) 189 * CONTEXT1[255:128] = IV 190 */ 191 if (ctr_mode) 192 ctx1_iv_off = 16; 193 194 /* 195 * RFC3686 specific: 196 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 197 */ 198 if (is_rfc3686) { 199 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 200 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 201 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 202 } 203 204 data_len[0] = ctx->adata.keylen_pad; 205 data_len[1] = ctx->cdata.keylen; 206 207 if (alg->caam.geniv) 208 goto skip_enc; 209 210 /* 211 * Job Descriptor and Shared Descriptors 212 * must all fit into the 64-word Descriptor h/w Buffer 213 */ 214 if (desc_inline_query(DESC_AEAD_ENC_LEN + 215 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 216 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 217 ARRAY_SIZE(data_len)) < 0) 218 return -EINVAL; 219 220 if (inl_mask & 1) 221 ctx->adata.key_virt = ctx->key; 222 else 223 ctx->adata.key_dma = ctx->key_dma; 224 225 if (inl_mask & 2) 226 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 227 else 228 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 229 230 ctx->adata.key_inline = !!(inl_mask & 1); 231 ctx->cdata.key_inline = !!(inl_mask & 2); 232 233 /* aead_encrypt shared descriptor */ 234 desc = ctx->sh_desc_enc; 235 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 236 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 237 false); 238 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 239 desc_bytes(desc), DMA_TO_DEVICE); 240 241 skip_enc: 242 /* 243 * Job Descriptor and Shared Descriptors 244 * must all fit into the 64-word Descriptor h/w Buffer 245 */ 246 if (desc_inline_query(DESC_AEAD_DEC_LEN + 247 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 248 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 249 ARRAY_SIZE(data_len)) < 0) 250 return -EINVAL; 251 252 if (inl_mask & 1) 253 ctx->adata.key_virt = ctx->key; 254 else 255 ctx->adata.key_dma = ctx->key_dma; 256 257 if (inl_mask & 2) 258 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 259 else 260 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 261 262 ctx->adata.key_inline = !!(inl_mask & 1); 263 ctx->cdata.key_inline = !!(inl_mask & 2); 264 265 /* aead_decrypt shared descriptor */ 266 desc = ctx->sh_desc_dec; 267 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 268 ctx->authsize, alg->caam.geniv, is_rfc3686, 269 nonce, ctx1_iv_off, false); 270 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 271 desc_bytes(desc), DMA_TO_DEVICE); 272 273 if (!alg->caam.geniv) 274 goto skip_givenc; 275 276 /* 277 * Job Descriptor and Shared Descriptors 278 * must all fit into the 64-word Descriptor h/w Buffer 279 */ 280 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 281 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 282 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 283 ARRAY_SIZE(data_len)) < 0) 284 return -EINVAL; 285 286 if (inl_mask & 1) 287 ctx->adata.key_virt = ctx->key; 288 else 289 ctx->adata.key_dma = ctx->key_dma; 290 291 if (inl_mask & 2) 292 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 293 else 294 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 295 296 ctx->adata.key_inline = !!(inl_mask & 1); 297 ctx->cdata.key_inline = !!(inl_mask & 2); 298 299 /* aead_givencrypt shared descriptor */ 300 desc = ctx->sh_desc_enc; 301 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 302 ctx->authsize, is_rfc3686, nonce, 303 ctx1_iv_off, false); 304 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 305 desc_bytes(desc), DMA_TO_DEVICE); 306 307 skip_givenc: 308 return 0; 309 } 310 311 static int aead_setauthsize(struct crypto_aead *authenc, 312 unsigned int authsize) 313 { 314 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 315 316 ctx->authsize = authsize; 317 aead_set_sh_desc(authenc); 318 319 return 0; 320 } 321 322 static int gcm_set_sh_desc(struct crypto_aead *aead) 323 { 324 struct caam_ctx *ctx = crypto_aead_ctx(aead); 325 struct device *jrdev = ctx->jrdev; 326 u32 *desc; 327 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 328 ctx->cdata.keylen; 329 330 if (!ctx->cdata.keylen || !ctx->authsize) 331 return 0; 332 333 /* 334 * AES GCM encrypt shared descriptor 335 * Job Descriptor and Shared Descriptor 336 * must fit into the 64-word Descriptor h/w Buffer 337 */ 338 if (rem_bytes >= DESC_GCM_ENC_LEN) { 339 ctx->cdata.key_inline = true; 340 ctx->cdata.key_virt = ctx->key; 341 } else { 342 ctx->cdata.key_inline = false; 343 ctx->cdata.key_dma = ctx->key_dma; 344 } 345 346 desc = ctx->sh_desc_enc; 347 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize); 348 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 349 desc_bytes(desc), DMA_TO_DEVICE); 350 351 /* 352 * Job Descriptor and Shared Descriptors 353 * must all fit into the 64-word Descriptor h/w Buffer 354 */ 355 if (rem_bytes >= DESC_GCM_DEC_LEN) { 356 ctx->cdata.key_inline = true; 357 ctx->cdata.key_virt = ctx->key; 358 } else { 359 ctx->cdata.key_inline = false; 360 ctx->cdata.key_dma = ctx->key_dma; 361 } 362 363 desc = ctx->sh_desc_dec; 364 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize); 365 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 366 desc_bytes(desc), DMA_TO_DEVICE); 367 368 return 0; 369 } 370 371 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 372 { 373 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 374 375 ctx->authsize = authsize; 376 gcm_set_sh_desc(authenc); 377 378 return 0; 379 } 380 381 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 382 { 383 struct caam_ctx *ctx = crypto_aead_ctx(aead); 384 struct device *jrdev = ctx->jrdev; 385 u32 *desc; 386 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 387 ctx->cdata.keylen; 388 389 if (!ctx->cdata.keylen || !ctx->authsize) 390 return 0; 391 392 /* 393 * RFC4106 encrypt shared descriptor 394 * Job Descriptor and Shared Descriptor 395 * must fit into the 64-word Descriptor h/w Buffer 396 */ 397 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 398 ctx->cdata.key_inline = true; 399 ctx->cdata.key_virt = ctx->key; 400 } else { 401 ctx->cdata.key_inline = false; 402 ctx->cdata.key_dma = ctx->key_dma; 403 } 404 405 desc = ctx->sh_desc_enc; 406 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize); 407 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 408 desc_bytes(desc), DMA_TO_DEVICE); 409 410 /* 411 * Job Descriptor and Shared Descriptors 412 * must all fit into the 64-word Descriptor h/w Buffer 413 */ 414 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 415 ctx->cdata.key_inline = true; 416 ctx->cdata.key_virt = ctx->key; 417 } else { 418 ctx->cdata.key_inline = false; 419 ctx->cdata.key_dma = ctx->key_dma; 420 } 421 422 desc = ctx->sh_desc_dec; 423 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize); 424 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 425 desc_bytes(desc), DMA_TO_DEVICE); 426 427 return 0; 428 } 429 430 static int rfc4106_setauthsize(struct crypto_aead *authenc, 431 unsigned int authsize) 432 { 433 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 434 435 ctx->authsize = authsize; 436 rfc4106_set_sh_desc(authenc); 437 438 return 0; 439 } 440 441 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 442 { 443 struct caam_ctx *ctx = crypto_aead_ctx(aead); 444 struct device *jrdev = ctx->jrdev; 445 u32 *desc; 446 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 447 ctx->cdata.keylen; 448 449 if (!ctx->cdata.keylen || !ctx->authsize) 450 return 0; 451 452 /* 453 * RFC4543 encrypt shared descriptor 454 * Job Descriptor and Shared Descriptor 455 * must fit into the 64-word Descriptor h/w Buffer 456 */ 457 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 458 ctx->cdata.key_inline = true; 459 ctx->cdata.key_virt = ctx->key; 460 } else { 461 ctx->cdata.key_inline = false; 462 ctx->cdata.key_dma = ctx->key_dma; 463 } 464 465 desc = ctx->sh_desc_enc; 466 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize); 467 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 468 desc_bytes(desc), DMA_TO_DEVICE); 469 470 /* 471 * Job Descriptor and Shared Descriptors 472 * must all fit into the 64-word Descriptor h/w Buffer 473 */ 474 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 475 ctx->cdata.key_inline = true; 476 ctx->cdata.key_virt = ctx->key; 477 } else { 478 ctx->cdata.key_inline = false; 479 ctx->cdata.key_dma = ctx->key_dma; 480 } 481 482 desc = ctx->sh_desc_dec; 483 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize); 484 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 485 desc_bytes(desc), DMA_TO_DEVICE); 486 487 return 0; 488 } 489 490 static int rfc4543_setauthsize(struct crypto_aead *authenc, 491 unsigned int authsize) 492 { 493 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 494 495 ctx->authsize = authsize; 496 rfc4543_set_sh_desc(authenc); 497 498 return 0; 499 } 500 501 static int aead_setkey(struct crypto_aead *aead, 502 const u8 *key, unsigned int keylen) 503 { 504 struct caam_ctx *ctx = crypto_aead_ctx(aead); 505 struct device *jrdev = ctx->jrdev; 506 struct crypto_authenc_keys keys; 507 int ret = 0; 508 509 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 510 goto badkey; 511 512 #ifdef DEBUG 513 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 514 keys.authkeylen + keys.enckeylen, keys.enckeylen, 515 keys.authkeylen); 516 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 517 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 518 #endif 519 520 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 521 keys.authkeylen, CAAM_MAX_KEY_SIZE - 522 keys.enckeylen); 523 if (ret) { 524 goto badkey; 525 } 526 527 /* postpend encryption key to auth split key */ 528 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 529 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 530 keys.enckeylen, DMA_TO_DEVICE); 531 #ifdef DEBUG 532 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 533 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 534 ctx->adata.keylen_pad + keys.enckeylen, 1); 535 #endif 536 ctx->cdata.keylen = keys.enckeylen; 537 return aead_set_sh_desc(aead); 538 badkey: 539 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 540 return -EINVAL; 541 } 542 543 static int gcm_setkey(struct crypto_aead *aead, 544 const u8 *key, unsigned int keylen) 545 { 546 struct caam_ctx *ctx = crypto_aead_ctx(aead); 547 struct device *jrdev = ctx->jrdev; 548 549 #ifdef DEBUG 550 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 551 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 552 #endif 553 554 memcpy(ctx->key, key, keylen); 555 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 556 ctx->cdata.keylen = keylen; 557 558 return gcm_set_sh_desc(aead); 559 } 560 561 static int rfc4106_setkey(struct crypto_aead *aead, 562 const u8 *key, unsigned int keylen) 563 { 564 struct caam_ctx *ctx = crypto_aead_ctx(aead); 565 struct device *jrdev = ctx->jrdev; 566 567 if (keylen < 4) 568 return -EINVAL; 569 570 #ifdef DEBUG 571 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 572 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 573 #endif 574 575 memcpy(ctx->key, key, keylen); 576 577 /* 578 * The last four bytes of the key material are used as the salt value 579 * in the nonce. Update the AES key length. 580 */ 581 ctx->cdata.keylen = keylen - 4; 582 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 583 DMA_TO_DEVICE); 584 return rfc4106_set_sh_desc(aead); 585 } 586 587 static int rfc4543_setkey(struct crypto_aead *aead, 588 const u8 *key, unsigned int keylen) 589 { 590 struct caam_ctx *ctx = crypto_aead_ctx(aead); 591 struct device *jrdev = ctx->jrdev; 592 593 if (keylen < 4) 594 return -EINVAL; 595 596 #ifdef DEBUG 597 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 598 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 599 #endif 600 601 memcpy(ctx->key, key, keylen); 602 603 /* 604 * The last four bytes of the key material are used as the salt value 605 * in the nonce. Update the AES key length. 606 */ 607 ctx->cdata.keylen = keylen - 4; 608 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 609 DMA_TO_DEVICE); 610 return rfc4543_set_sh_desc(aead); 611 } 612 613 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 614 const u8 *key, unsigned int keylen) 615 { 616 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 617 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher); 618 const char *alg_name = crypto_tfm_alg_name(tfm); 619 struct device *jrdev = ctx->jrdev; 620 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 621 u32 *desc; 622 u32 ctx1_iv_off = 0; 623 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 624 OP_ALG_AAI_CTR_MOD128); 625 const bool is_rfc3686 = (ctr_mode && 626 (strstr(alg_name, "rfc3686") != NULL)); 627 628 memcpy(ctx->key, key, keylen); 629 #ifdef DEBUG 630 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 631 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 632 #endif 633 /* 634 * AES-CTR needs to load IV in CONTEXT1 reg 635 * at an offset of 128bits (16bytes) 636 * CONTEXT1[255:128] = IV 637 */ 638 if (ctr_mode) 639 ctx1_iv_off = 16; 640 641 /* 642 * RFC3686 specific: 643 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 644 * | *key = {KEY, NONCE} 645 */ 646 if (is_rfc3686) { 647 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 648 keylen -= CTR_RFC3686_NONCE_SIZE; 649 } 650 651 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 652 ctx->cdata.keylen = keylen; 653 ctx->cdata.key_virt = ctx->key; 654 ctx->cdata.key_inline = true; 655 656 /* ablkcipher_encrypt shared descriptor */ 657 desc = ctx->sh_desc_enc; 658 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 659 ctx1_iv_off); 660 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 661 desc_bytes(desc), DMA_TO_DEVICE); 662 663 /* ablkcipher_decrypt shared descriptor */ 664 desc = ctx->sh_desc_dec; 665 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 666 ctx1_iv_off); 667 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 668 desc_bytes(desc), DMA_TO_DEVICE); 669 670 /* ablkcipher_givencrypt shared descriptor */ 671 desc = ctx->sh_desc_givenc; 672 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686, 673 ctx1_iv_off); 674 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma, 675 desc_bytes(desc), DMA_TO_DEVICE); 676 677 return 0; 678 } 679 680 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 681 const u8 *key, unsigned int keylen) 682 { 683 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 684 struct device *jrdev = ctx->jrdev; 685 u32 *desc; 686 687 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 688 crypto_ablkcipher_set_flags(ablkcipher, 689 CRYPTO_TFM_RES_BAD_KEY_LEN); 690 dev_err(jrdev, "key size mismatch\n"); 691 return -EINVAL; 692 } 693 694 memcpy(ctx->key, key, keylen); 695 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE); 696 ctx->cdata.keylen = keylen; 697 ctx->cdata.key_virt = ctx->key; 698 ctx->cdata.key_inline = true; 699 700 /* xts_ablkcipher_encrypt shared descriptor */ 701 desc = ctx->sh_desc_enc; 702 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata); 703 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 704 desc_bytes(desc), DMA_TO_DEVICE); 705 706 /* xts_ablkcipher_decrypt shared descriptor */ 707 desc = ctx->sh_desc_dec; 708 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata); 709 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 710 desc_bytes(desc), DMA_TO_DEVICE); 711 712 return 0; 713 } 714 715 /* 716 * aead_edesc - s/w-extended aead descriptor 717 * @src_nents: number of segments in input s/w scatterlist 718 * @dst_nents: number of segments in output s/w scatterlist 719 * @sec4_sg_bytes: length of dma mapped sec4_sg space 720 * @sec4_sg_dma: bus physical mapped address of h/w link table 721 * @sec4_sg: pointer to h/w link table 722 * @hw_desc: the h/w job descriptor followed by any referenced link tables 723 */ 724 struct aead_edesc { 725 int src_nents; 726 int dst_nents; 727 int sec4_sg_bytes; 728 dma_addr_t sec4_sg_dma; 729 struct sec4_sg_entry *sec4_sg; 730 u32 hw_desc[]; 731 }; 732 733 /* 734 * ablkcipher_edesc - s/w-extended ablkcipher descriptor 735 * @src_nents: number of segments in input s/w scatterlist 736 * @dst_nents: number of segments in output s/w scatterlist 737 * @iv_dma: dma address of iv for checking continuity and link table 738 * @sec4_sg_bytes: length of dma mapped sec4_sg space 739 * @sec4_sg_dma: bus physical mapped address of h/w link table 740 * @sec4_sg: pointer to h/w link table 741 * @hw_desc: the h/w job descriptor followed by any referenced link tables 742 */ 743 struct ablkcipher_edesc { 744 int src_nents; 745 int dst_nents; 746 dma_addr_t iv_dma; 747 int sec4_sg_bytes; 748 dma_addr_t sec4_sg_dma; 749 struct sec4_sg_entry *sec4_sg; 750 u32 hw_desc[0]; 751 }; 752 753 static void caam_unmap(struct device *dev, struct scatterlist *src, 754 struct scatterlist *dst, int src_nents, 755 int dst_nents, 756 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 757 int sec4_sg_bytes) 758 { 759 if (dst != src) { 760 if (src_nents) 761 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 762 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 763 } else { 764 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 765 } 766 767 if (iv_dma) 768 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 769 if (sec4_sg_bytes) 770 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 771 DMA_TO_DEVICE); 772 } 773 774 static void aead_unmap(struct device *dev, 775 struct aead_edesc *edesc, 776 struct aead_request *req) 777 { 778 caam_unmap(dev, req->src, req->dst, 779 edesc->src_nents, edesc->dst_nents, 0, 0, 780 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 781 } 782 783 static void ablkcipher_unmap(struct device *dev, 784 struct ablkcipher_edesc *edesc, 785 struct ablkcipher_request *req) 786 { 787 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 788 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 789 790 caam_unmap(dev, req->src, req->dst, 791 edesc->src_nents, edesc->dst_nents, 792 edesc->iv_dma, ivsize, 793 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 794 } 795 796 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 797 void *context) 798 { 799 struct aead_request *req = context; 800 struct aead_edesc *edesc; 801 802 #ifdef DEBUG 803 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 804 #endif 805 806 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 807 808 if (err) 809 caam_jr_strstatus(jrdev, err); 810 811 aead_unmap(jrdev, edesc, req); 812 813 kfree(edesc); 814 815 aead_request_complete(req, err); 816 } 817 818 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 819 void *context) 820 { 821 struct aead_request *req = context; 822 struct aead_edesc *edesc; 823 824 #ifdef DEBUG 825 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 826 #endif 827 828 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 829 830 if (err) 831 caam_jr_strstatus(jrdev, err); 832 833 aead_unmap(jrdev, edesc, req); 834 835 /* 836 * verify hw auth check passed else return -EBADMSG 837 */ 838 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 839 err = -EBADMSG; 840 841 kfree(edesc); 842 843 aead_request_complete(req, err); 844 } 845 846 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 847 void *context) 848 { 849 struct ablkcipher_request *req = context; 850 struct ablkcipher_edesc *edesc; 851 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 852 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 853 854 #ifdef DEBUG 855 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 856 #endif 857 858 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 859 860 if (err) 861 caam_jr_strstatus(jrdev, err); 862 863 #ifdef DEBUG 864 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 865 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 866 edesc->src_nents > 1 ? 100 : ivsize, 1); 867 #endif 868 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 869 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 870 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 871 872 ablkcipher_unmap(jrdev, edesc, req); 873 874 /* 875 * The crypto API expects us to set the IV (req->info) to the last 876 * ciphertext block. This is used e.g. by the CTS mode. 877 */ 878 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize, 879 ivsize, 0); 880 881 kfree(edesc); 882 883 ablkcipher_request_complete(req, err); 884 } 885 886 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 887 void *context) 888 { 889 struct ablkcipher_request *req = context; 890 struct ablkcipher_edesc *edesc; 891 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 892 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 893 894 #ifdef DEBUG 895 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 896 #endif 897 898 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 899 if (err) 900 caam_jr_strstatus(jrdev, err); 901 902 #ifdef DEBUG 903 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 904 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 905 ivsize, 1); 906 #endif 907 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 908 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 909 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 910 911 ablkcipher_unmap(jrdev, edesc, req); 912 913 /* 914 * The crypto API expects us to set the IV (req->info) to the last 915 * ciphertext block. 916 */ 917 scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize, 918 ivsize, 0); 919 920 kfree(edesc); 921 922 ablkcipher_request_complete(req, err); 923 } 924 925 /* 926 * Fill in aead job descriptor 927 */ 928 static void init_aead_job(struct aead_request *req, 929 struct aead_edesc *edesc, 930 bool all_contig, bool encrypt) 931 { 932 struct crypto_aead *aead = crypto_aead_reqtfm(req); 933 struct caam_ctx *ctx = crypto_aead_ctx(aead); 934 int authsize = ctx->authsize; 935 u32 *desc = edesc->hw_desc; 936 u32 out_options, in_options; 937 dma_addr_t dst_dma, src_dma; 938 int len, sec4_sg_index = 0; 939 dma_addr_t ptr; 940 u32 *sh_desc; 941 942 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 943 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 944 945 len = desc_len(sh_desc); 946 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 947 948 if (all_contig) { 949 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; 950 in_options = 0; 951 } else { 952 src_dma = edesc->sec4_sg_dma; 953 sec4_sg_index += edesc->src_nents; 954 in_options = LDST_SGF; 955 } 956 957 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 958 in_options); 959 960 dst_dma = src_dma; 961 out_options = in_options; 962 963 if (unlikely(req->src != req->dst)) { 964 if (edesc->dst_nents == 1) { 965 dst_dma = sg_dma_address(req->dst); 966 } else { 967 dst_dma = edesc->sec4_sg_dma + 968 sec4_sg_index * 969 sizeof(struct sec4_sg_entry); 970 out_options = LDST_SGF; 971 } 972 } 973 974 if (encrypt) 975 append_seq_out_ptr(desc, dst_dma, 976 req->assoclen + req->cryptlen + authsize, 977 out_options); 978 else 979 append_seq_out_ptr(desc, dst_dma, 980 req->assoclen + req->cryptlen - authsize, 981 out_options); 982 983 /* REG3 = assoclen */ 984 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 985 } 986 987 static void init_gcm_job(struct aead_request *req, 988 struct aead_edesc *edesc, 989 bool all_contig, bool encrypt) 990 { 991 struct crypto_aead *aead = crypto_aead_reqtfm(req); 992 struct caam_ctx *ctx = crypto_aead_ctx(aead); 993 unsigned int ivsize = crypto_aead_ivsize(aead); 994 u32 *desc = edesc->hw_desc; 995 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 996 unsigned int last; 997 998 init_aead_job(req, edesc, all_contig, encrypt); 999 1000 /* BUG This should not be specific to generic GCM. */ 1001 last = 0; 1002 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1003 last = FIFOLD_TYPE_LAST1; 1004 1005 /* Read GCM IV */ 1006 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1007 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1008 /* Append Salt */ 1009 if (!generic_gcm) 1010 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1011 /* Append IV */ 1012 append_data(desc, req->iv, ivsize); 1013 /* End of blank commands */ 1014 } 1015 1016 static void init_authenc_job(struct aead_request *req, 1017 struct aead_edesc *edesc, 1018 bool all_contig, bool encrypt) 1019 { 1020 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1021 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1022 struct caam_aead_alg, aead); 1023 unsigned int ivsize = crypto_aead_ivsize(aead); 1024 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1025 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1026 OP_ALG_AAI_CTR_MOD128); 1027 const bool is_rfc3686 = alg->caam.rfc3686; 1028 u32 *desc = edesc->hw_desc; 1029 u32 ivoffset = 0; 1030 1031 /* 1032 * AES-CTR needs to load IV in CONTEXT1 reg 1033 * at an offset of 128bits (16bytes) 1034 * CONTEXT1[255:128] = IV 1035 */ 1036 if (ctr_mode) 1037 ivoffset = 16; 1038 1039 /* 1040 * RFC3686 specific: 1041 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1042 */ 1043 if (is_rfc3686) 1044 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1045 1046 init_aead_job(req, edesc, all_contig, encrypt); 1047 1048 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1049 append_load_as_imm(desc, req->iv, ivsize, 1050 LDST_CLASS_1_CCB | 1051 LDST_SRCDST_BYTE_CONTEXT | 1052 (ivoffset << LDST_OFFSET_SHIFT)); 1053 } 1054 1055 /* 1056 * Fill in ablkcipher job descriptor 1057 */ 1058 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr, 1059 struct ablkcipher_edesc *edesc, 1060 struct ablkcipher_request *req, 1061 bool iv_contig) 1062 { 1063 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1064 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1065 u32 *desc = edesc->hw_desc; 1066 u32 out_options = 0, in_options; 1067 dma_addr_t dst_dma, src_dma; 1068 int len, sec4_sg_index = 0; 1069 1070 #ifdef DEBUG 1071 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1072 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1073 ivsize, 1); 1074 pr_err("asked=%d, nbytes%d\n", 1075 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes); 1076 #endif 1077 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1078 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1079 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1080 1081 len = desc_len(sh_desc); 1082 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1083 1084 if (iv_contig) { 1085 src_dma = edesc->iv_dma; 1086 in_options = 0; 1087 } else { 1088 src_dma = edesc->sec4_sg_dma; 1089 sec4_sg_index += edesc->src_nents + 1; 1090 in_options = LDST_SGF; 1091 } 1092 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options); 1093 1094 if (likely(req->src == req->dst)) { 1095 if (edesc->src_nents == 1 && iv_contig) { 1096 dst_dma = sg_dma_address(req->src); 1097 } else { 1098 dst_dma = edesc->sec4_sg_dma + 1099 sizeof(struct sec4_sg_entry); 1100 out_options = LDST_SGF; 1101 } 1102 } else { 1103 if (edesc->dst_nents == 1) { 1104 dst_dma = sg_dma_address(req->dst); 1105 } else { 1106 dst_dma = edesc->sec4_sg_dma + 1107 sec4_sg_index * sizeof(struct sec4_sg_entry); 1108 out_options = LDST_SGF; 1109 } 1110 } 1111 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options); 1112 } 1113 1114 /* 1115 * Fill in ablkcipher givencrypt job descriptor 1116 */ 1117 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr, 1118 struct ablkcipher_edesc *edesc, 1119 struct ablkcipher_request *req, 1120 bool iv_contig) 1121 { 1122 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1123 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1124 u32 *desc = edesc->hw_desc; 1125 u32 out_options, in_options; 1126 dma_addr_t dst_dma, src_dma; 1127 int len, sec4_sg_index = 0; 1128 1129 #ifdef DEBUG 1130 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ", 1131 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1132 ivsize, 1); 1133 #endif 1134 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ", 1135 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1136 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1137 1138 len = desc_len(sh_desc); 1139 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1140 1141 if (edesc->src_nents == 1) { 1142 src_dma = sg_dma_address(req->src); 1143 in_options = 0; 1144 } else { 1145 src_dma = edesc->sec4_sg_dma; 1146 sec4_sg_index += edesc->src_nents; 1147 in_options = LDST_SGF; 1148 } 1149 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options); 1150 1151 if (iv_contig) { 1152 dst_dma = edesc->iv_dma; 1153 out_options = 0; 1154 } else { 1155 dst_dma = edesc->sec4_sg_dma + 1156 sec4_sg_index * sizeof(struct sec4_sg_entry); 1157 out_options = LDST_SGF; 1158 } 1159 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options); 1160 } 1161 1162 /* 1163 * allocate and map the aead extended descriptor 1164 */ 1165 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1166 int desc_bytes, bool *all_contig_ptr, 1167 bool encrypt) 1168 { 1169 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1170 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1171 struct device *jrdev = ctx->jrdev; 1172 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1173 GFP_KERNEL : GFP_ATOMIC; 1174 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1175 struct aead_edesc *edesc; 1176 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1177 unsigned int authsize = ctx->authsize; 1178 1179 if (unlikely(req->dst != req->src)) { 1180 src_nents = sg_nents_for_len(req->src, req->assoclen + 1181 req->cryptlen); 1182 if (unlikely(src_nents < 0)) { 1183 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1184 req->assoclen + req->cryptlen); 1185 return ERR_PTR(src_nents); 1186 } 1187 1188 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1189 req->cryptlen + 1190 (encrypt ? authsize : 1191 (-authsize))); 1192 if (unlikely(dst_nents < 0)) { 1193 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1194 req->assoclen + req->cryptlen + 1195 (encrypt ? authsize : (-authsize))); 1196 return ERR_PTR(dst_nents); 1197 } 1198 } else { 1199 src_nents = sg_nents_for_len(req->src, req->assoclen + 1200 req->cryptlen + 1201 (encrypt ? authsize : 0)); 1202 if (unlikely(src_nents < 0)) { 1203 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1204 req->assoclen + req->cryptlen + 1205 (encrypt ? authsize : 0)); 1206 return ERR_PTR(src_nents); 1207 } 1208 } 1209 1210 if (likely(req->src == req->dst)) { 1211 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1212 DMA_BIDIRECTIONAL); 1213 if (unlikely(!mapped_src_nents)) { 1214 dev_err(jrdev, "unable to map source\n"); 1215 return ERR_PTR(-ENOMEM); 1216 } 1217 } else { 1218 /* Cover also the case of null (zero length) input data */ 1219 if (src_nents) { 1220 mapped_src_nents = dma_map_sg(jrdev, req->src, 1221 src_nents, DMA_TO_DEVICE); 1222 if (unlikely(!mapped_src_nents)) { 1223 dev_err(jrdev, "unable to map source\n"); 1224 return ERR_PTR(-ENOMEM); 1225 } 1226 } else { 1227 mapped_src_nents = 0; 1228 } 1229 1230 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1231 DMA_FROM_DEVICE); 1232 if (unlikely(!mapped_dst_nents)) { 1233 dev_err(jrdev, "unable to map destination\n"); 1234 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1235 return ERR_PTR(-ENOMEM); 1236 } 1237 } 1238 1239 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1240 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1241 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1242 1243 /* allocate space for base edesc and hw desc commands, link tables */ 1244 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1245 GFP_DMA | flags); 1246 if (!edesc) { 1247 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1248 0, 0, 0); 1249 return ERR_PTR(-ENOMEM); 1250 } 1251 1252 edesc->src_nents = src_nents; 1253 edesc->dst_nents = dst_nents; 1254 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1255 desc_bytes; 1256 *all_contig_ptr = !(mapped_src_nents > 1); 1257 1258 sec4_sg_index = 0; 1259 if (mapped_src_nents > 1) { 1260 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1261 edesc->sec4_sg + sec4_sg_index, 0); 1262 sec4_sg_index += mapped_src_nents; 1263 } 1264 if (mapped_dst_nents > 1) { 1265 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1266 edesc->sec4_sg + sec4_sg_index, 0); 1267 } 1268 1269 if (!sec4_sg_bytes) 1270 return edesc; 1271 1272 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1273 sec4_sg_bytes, DMA_TO_DEVICE); 1274 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1275 dev_err(jrdev, "unable to map S/G table\n"); 1276 aead_unmap(jrdev, edesc, req); 1277 kfree(edesc); 1278 return ERR_PTR(-ENOMEM); 1279 } 1280 1281 edesc->sec4_sg_bytes = sec4_sg_bytes; 1282 1283 return edesc; 1284 } 1285 1286 static int gcm_encrypt(struct aead_request *req) 1287 { 1288 struct aead_edesc *edesc; 1289 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1290 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1291 struct device *jrdev = ctx->jrdev; 1292 bool all_contig; 1293 u32 *desc; 1294 int ret = 0; 1295 1296 /* allocate extended descriptor */ 1297 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1298 if (IS_ERR(edesc)) 1299 return PTR_ERR(edesc); 1300 1301 /* Create and submit job descriptor */ 1302 init_gcm_job(req, edesc, all_contig, true); 1303 #ifdef DEBUG 1304 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1305 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1306 desc_bytes(edesc->hw_desc), 1); 1307 #endif 1308 1309 desc = edesc->hw_desc; 1310 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1311 if (!ret) { 1312 ret = -EINPROGRESS; 1313 } else { 1314 aead_unmap(jrdev, edesc, req); 1315 kfree(edesc); 1316 } 1317 1318 return ret; 1319 } 1320 1321 static int ipsec_gcm_encrypt(struct aead_request *req) 1322 { 1323 if (req->assoclen < 8) 1324 return -EINVAL; 1325 1326 return gcm_encrypt(req); 1327 } 1328 1329 static int aead_encrypt(struct aead_request *req) 1330 { 1331 struct aead_edesc *edesc; 1332 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1333 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1334 struct device *jrdev = ctx->jrdev; 1335 bool all_contig; 1336 u32 *desc; 1337 int ret = 0; 1338 1339 /* allocate extended descriptor */ 1340 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1341 &all_contig, true); 1342 if (IS_ERR(edesc)) 1343 return PTR_ERR(edesc); 1344 1345 /* Create and submit job descriptor */ 1346 init_authenc_job(req, edesc, all_contig, true); 1347 #ifdef DEBUG 1348 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1349 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1350 desc_bytes(edesc->hw_desc), 1); 1351 #endif 1352 1353 desc = edesc->hw_desc; 1354 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1355 if (!ret) { 1356 ret = -EINPROGRESS; 1357 } else { 1358 aead_unmap(jrdev, edesc, req); 1359 kfree(edesc); 1360 } 1361 1362 return ret; 1363 } 1364 1365 static int gcm_decrypt(struct aead_request *req) 1366 { 1367 struct aead_edesc *edesc; 1368 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1369 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1370 struct device *jrdev = ctx->jrdev; 1371 bool all_contig; 1372 u32 *desc; 1373 int ret = 0; 1374 1375 /* allocate extended descriptor */ 1376 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1377 if (IS_ERR(edesc)) 1378 return PTR_ERR(edesc); 1379 1380 /* Create and submit job descriptor*/ 1381 init_gcm_job(req, edesc, all_contig, false); 1382 #ifdef DEBUG 1383 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1384 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1385 desc_bytes(edesc->hw_desc), 1); 1386 #endif 1387 1388 desc = edesc->hw_desc; 1389 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1390 if (!ret) { 1391 ret = -EINPROGRESS; 1392 } else { 1393 aead_unmap(jrdev, edesc, req); 1394 kfree(edesc); 1395 } 1396 1397 return ret; 1398 } 1399 1400 static int ipsec_gcm_decrypt(struct aead_request *req) 1401 { 1402 if (req->assoclen < 8) 1403 return -EINVAL; 1404 1405 return gcm_decrypt(req); 1406 } 1407 1408 static int aead_decrypt(struct aead_request *req) 1409 { 1410 struct aead_edesc *edesc; 1411 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1412 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1413 struct device *jrdev = ctx->jrdev; 1414 bool all_contig; 1415 u32 *desc; 1416 int ret = 0; 1417 1418 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1419 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1420 req->assoclen + req->cryptlen, 1); 1421 1422 /* allocate extended descriptor */ 1423 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1424 &all_contig, false); 1425 if (IS_ERR(edesc)) 1426 return PTR_ERR(edesc); 1427 1428 /* Create and submit job descriptor*/ 1429 init_authenc_job(req, edesc, all_contig, false); 1430 #ifdef DEBUG 1431 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1432 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1433 desc_bytes(edesc->hw_desc), 1); 1434 #endif 1435 1436 desc = edesc->hw_desc; 1437 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1438 if (!ret) { 1439 ret = -EINPROGRESS; 1440 } else { 1441 aead_unmap(jrdev, edesc, req); 1442 kfree(edesc); 1443 } 1444 1445 return ret; 1446 } 1447 1448 /* 1449 * allocate and map the ablkcipher extended descriptor for ablkcipher 1450 */ 1451 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request 1452 *req, int desc_bytes, 1453 bool *iv_contig_out) 1454 { 1455 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1456 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1457 struct device *jrdev = ctx->jrdev; 1458 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1459 GFP_KERNEL : GFP_ATOMIC; 1460 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1461 struct ablkcipher_edesc *edesc; 1462 dma_addr_t iv_dma = 0; 1463 bool in_contig; 1464 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1465 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1466 1467 src_nents = sg_nents_for_len(req->src, req->nbytes); 1468 if (unlikely(src_nents < 0)) { 1469 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1470 req->nbytes); 1471 return ERR_PTR(src_nents); 1472 } 1473 1474 if (req->dst != req->src) { 1475 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1476 if (unlikely(dst_nents < 0)) { 1477 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1478 req->nbytes); 1479 return ERR_PTR(dst_nents); 1480 } 1481 } 1482 1483 if (likely(req->src == req->dst)) { 1484 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1485 DMA_BIDIRECTIONAL); 1486 if (unlikely(!mapped_src_nents)) { 1487 dev_err(jrdev, "unable to map source\n"); 1488 return ERR_PTR(-ENOMEM); 1489 } 1490 } else { 1491 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1492 DMA_TO_DEVICE); 1493 if (unlikely(!mapped_src_nents)) { 1494 dev_err(jrdev, "unable to map source\n"); 1495 return ERR_PTR(-ENOMEM); 1496 } 1497 1498 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1499 DMA_FROM_DEVICE); 1500 if (unlikely(!mapped_dst_nents)) { 1501 dev_err(jrdev, "unable to map destination\n"); 1502 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1503 return ERR_PTR(-ENOMEM); 1504 } 1505 } 1506 1507 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); 1508 if (dma_mapping_error(jrdev, iv_dma)) { 1509 dev_err(jrdev, "unable to map IV\n"); 1510 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1511 0, 0, 0); 1512 return ERR_PTR(-ENOMEM); 1513 } 1514 1515 if (mapped_src_nents == 1 && 1516 iv_dma + ivsize == sg_dma_address(req->src)) { 1517 in_contig = true; 1518 sec4_sg_ents = 0; 1519 } else { 1520 in_contig = false; 1521 sec4_sg_ents = 1 + mapped_src_nents; 1522 } 1523 dst_sg_idx = sec4_sg_ents; 1524 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1525 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1526 1527 /* allocate space for base edesc and hw desc commands, link tables */ 1528 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1529 GFP_DMA | flags); 1530 if (!edesc) { 1531 dev_err(jrdev, "could not allocate extended descriptor\n"); 1532 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1533 iv_dma, ivsize, 0, 0); 1534 return ERR_PTR(-ENOMEM); 1535 } 1536 1537 edesc->src_nents = src_nents; 1538 edesc->dst_nents = dst_nents; 1539 edesc->sec4_sg_bytes = sec4_sg_bytes; 1540 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1541 desc_bytes; 1542 1543 if (!in_contig) { 1544 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1545 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1546 edesc->sec4_sg + 1, 0); 1547 } 1548 1549 if (mapped_dst_nents > 1) { 1550 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1551 edesc->sec4_sg + dst_sg_idx, 0); 1552 } 1553 1554 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1555 sec4_sg_bytes, DMA_TO_DEVICE); 1556 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1557 dev_err(jrdev, "unable to map S/G table\n"); 1558 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1559 iv_dma, ivsize, 0, 0); 1560 kfree(edesc); 1561 return ERR_PTR(-ENOMEM); 1562 } 1563 1564 edesc->iv_dma = iv_dma; 1565 1566 #ifdef DEBUG 1567 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ", 1568 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1569 sec4_sg_bytes, 1); 1570 #endif 1571 1572 *iv_contig_out = in_contig; 1573 return edesc; 1574 } 1575 1576 static int ablkcipher_encrypt(struct ablkcipher_request *req) 1577 { 1578 struct ablkcipher_edesc *edesc; 1579 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1580 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1581 struct device *jrdev = ctx->jrdev; 1582 bool iv_contig; 1583 u32 *desc; 1584 int ret = 0; 1585 1586 /* allocate extended descriptor */ 1587 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1588 CAAM_CMD_SZ, &iv_contig); 1589 if (IS_ERR(edesc)) 1590 return PTR_ERR(edesc); 1591 1592 /* Create and submit job descriptor*/ 1593 init_ablkcipher_job(ctx->sh_desc_enc, 1594 ctx->sh_desc_enc_dma, edesc, req, iv_contig); 1595 #ifdef DEBUG 1596 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1597 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1598 desc_bytes(edesc->hw_desc), 1); 1599 #endif 1600 desc = edesc->hw_desc; 1601 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1602 1603 if (!ret) { 1604 ret = -EINPROGRESS; 1605 } else { 1606 ablkcipher_unmap(jrdev, edesc, req); 1607 kfree(edesc); 1608 } 1609 1610 return ret; 1611 } 1612 1613 static int ablkcipher_decrypt(struct ablkcipher_request *req) 1614 { 1615 struct ablkcipher_edesc *edesc; 1616 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1617 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1618 struct device *jrdev = ctx->jrdev; 1619 bool iv_contig; 1620 u32 *desc; 1621 int ret = 0; 1622 1623 /* allocate extended descriptor */ 1624 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1625 CAAM_CMD_SZ, &iv_contig); 1626 if (IS_ERR(edesc)) 1627 return PTR_ERR(edesc); 1628 1629 /* Create and submit job descriptor*/ 1630 init_ablkcipher_job(ctx->sh_desc_dec, 1631 ctx->sh_desc_dec_dma, edesc, req, iv_contig); 1632 desc = edesc->hw_desc; 1633 #ifdef DEBUG 1634 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1635 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1636 desc_bytes(edesc->hw_desc), 1); 1637 #endif 1638 1639 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); 1640 if (!ret) { 1641 ret = -EINPROGRESS; 1642 } else { 1643 ablkcipher_unmap(jrdev, edesc, req); 1644 kfree(edesc); 1645 } 1646 1647 return ret; 1648 } 1649 1650 /* 1651 * allocate and map the ablkcipher extended descriptor 1652 * for ablkcipher givencrypt 1653 */ 1654 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc( 1655 struct skcipher_givcrypt_request *greq, 1656 int desc_bytes, 1657 bool *iv_contig_out) 1658 { 1659 struct ablkcipher_request *req = &greq->creq; 1660 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1661 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1662 struct device *jrdev = ctx->jrdev; 1663 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1664 GFP_KERNEL : GFP_ATOMIC; 1665 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 1666 struct ablkcipher_edesc *edesc; 1667 dma_addr_t iv_dma = 0; 1668 bool out_contig; 1669 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1670 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1671 1672 src_nents = sg_nents_for_len(req->src, req->nbytes); 1673 if (unlikely(src_nents < 0)) { 1674 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1675 req->nbytes); 1676 return ERR_PTR(src_nents); 1677 } 1678 1679 if (likely(req->src == req->dst)) { 1680 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1681 DMA_BIDIRECTIONAL); 1682 if (unlikely(!mapped_src_nents)) { 1683 dev_err(jrdev, "unable to map source\n"); 1684 return ERR_PTR(-ENOMEM); 1685 } 1686 1687 dst_nents = src_nents; 1688 mapped_dst_nents = src_nents; 1689 } else { 1690 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1691 DMA_TO_DEVICE); 1692 if (unlikely(!mapped_src_nents)) { 1693 dev_err(jrdev, "unable to map source\n"); 1694 return ERR_PTR(-ENOMEM); 1695 } 1696 1697 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1698 if (unlikely(dst_nents < 0)) { 1699 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1700 req->nbytes); 1701 return ERR_PTR(dst_nents); 1702 } 1703 1704 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1705 DMA_FROM_DEVICE); 1706 if (unlikely(!mapped_dst_nents)) { 1707 dev_err(jrdev, "unable to map destination\n"); 1708 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1709 return ERR_PTR(-ENOMEM); 1710 } 1711 } 1712 1713 /* 1714 * Check if iv can be contiguous with source and destination. 1715 * If so, include it. If not, create scatterlist. 1716 */ 1717 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE); 1718 if (dma_mapping_error(jrdev, iv_dma)) { 1719 dev_err(jrdev, "unable to map IV\n"); 1720 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1721 0, 0, 0); 1722 return ERR_PTR(-ENOMEM); 1723 } 1724 1725 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0; 1726 dst_sg_idx = sec4_sg_ents; 1727 if (mapped_dst_nents == 1 && 1728 iv_dma + ivsize == sg_dma_address(req->dst)) { 1729 out_contig = true; 1730 } else { 1731 out_contig = false; 1732 sec4_sg_ents += 1 + mapped_dst_nents; 1733 } 1734 1735 /* allocate space for base edesc and hw desc commands, link tables */ 1736 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1737 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1738 GFP_DMA | flags); 1739 if (!edesc) { 1740 dev_err(jrdev, "could not allocate extended descriptor\n"); 1741 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1742 iv_dma, ivsize, 0, 0); 1743 return ERR_PTR(-ENOMEM); 1744 } 1745 1746 edesc->src_nents = src_nents; 1747 edesc->dst_nents = dst_nents; 1748 edesc->sec4_sg_bytes = sec4_sg_bytes; 1749 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1750 desc_bytes; 1751 1752 if (mapped_src_nents > 1) 1753 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg, 1754 0); 1755 1756 if (!out_contig) { 1757 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, 1758 iv_dma, ivsize, 0); 1759 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1760 edesc->sec4_sg + dst_sg_idx + 1, 0); 1761 } 1762 1763 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1764 sec4_sg_bytes, DMA_TO_DEVICE); 1765 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1766 dev_err(jrdev, "unable to map S/G table\n"); 1767 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1768 iv_dma, ivsize, 0, 0); 1769 kfree(edesc); 1770 return ERR_PTR(-ENOMEM); 1771 } 1772 edesc->iv_dma = iv_dma; 1773 1774 #ifdef DEBUG 1775 print_hex_dump(KERN_ERR, 1776 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ", 1777 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1778 sec4_sg_bytes, 1); 1779 #endif 1780 1781 *iv_contig_out = out_contig; 1782 return edesc; 1783 } 1784 1785 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq) 1786 { 1787 struct ablkcipher_request *req = &creq->creq; 1788 struct ablkcipher_edesc *edesc; 1789 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1790 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1791 struct device *jrdev = ctx->jrdev; 1792 bool iv_contig = false; 1793 u32 *desc; 1794 int ret = 0; 1795 1796 /* allocate extended descriptor */ 1797 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * 1798 CAAM_CMD_SZ, &iv_contig); 1799 if (IS_ERR(edesc)) 1800 return PTR_ERR(edesc); 1801 1802 /* Create and submit job descriptor*/ 1803 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma, 1804 edesc, req, iv_contig); 1805 #ifdef DEBUG 1806 print_hex_dump(KERN_ERR, 1807 "ablkcipher jobdesc@" __stringify(__LINE__) ": ", 1808 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1809 desc_bytes(edesc->hw_desc), 1); 1810 #endif 1811 desc = edesc->hw_desc; 1812 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1813 1814 if (!ret) { 1815 ret = -EINPROGRESS; 1816 } else { 1817 ablkcipher_unmap(jrdev, edesc, req); 1818 kfree(edesc); 1819 } 1820 1821 return ret; 1822 } 1823 1824 #define template_aead template_u.aead 1825 #define template_ablkcipher template_u.ablkcipher 1826 struct caam_alg_template { 1827 char name[CRYPTO_MAX_ALG_NAME]; 1828 char driver_name[CRYPTO_MAX_ALG_NAME]; 1829 unsigned int blocksize; 1830 u32 type; 1831 union { 1832 struct ablkcipher_alg ablkcipher; 1833 } template_u; 1834 u32 class1_alg_type; 1835 u32 class2_alg_type; 1836 }; 1837 1838 static struct caam_alg_template driver_algs[] = { 1839 /* ablkcipher descriptor */ 1840 { 1841 .name = "cbc(aes)", 1842 .driver_name = "cbc-aes-caam", 1843 .blocksize = AES_BLOCK_SIZE, 1844 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1845 .template_ablkcipher = { 1846 .setkey = ablkcipher_setkey, 1847 .encrypt = ablkcipher_encrypt, 1848 .decrypt = ablkcipher_decrypt, 1849 .givencrypt = ablkcipher_givencrypt, 1850 .geniv = "<built-in>", 1851 .min_keysize = AES_MIN_KEY_SIZE, 1852 .max_keysize = AES_MAX_KEY_SIZE, 1853 .ivsize = AES_BLOCK_SIZE, 1854 }, 1855 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1856 }, 1857 { 1858 .name = "cbc(des3_ede)", 1859 .driver_name = "cbc-3des-caam", 1860 .blocksize = DES3_EDE_BLOCK_SIZE, 1861 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1862 .template_ablkcipher = { 1863 .setkey = ablkcipher_setkey, 1864 .encrypt = ablkcipher_encrypt, 1865 .decrypt = ablkcipher_decrypt, 1866 .givencrypt = ablkcipher_givencrypt, 1867 .geniv = "<built-in>", 1868 .min_keysize = DES3_EDE_KEY_SIZE, 1869 .max_keysize = DES3_EDE_KEY_SIZE, 1870 .ivsize = DES3_EDE_BLOCK_SIZE, 1871 }, 1872 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1873 }, 1874 { 1875 .name = "cbc(des)", 1876 .driver_name = "cbc-des-caam", 1877 .blocksize = DES_BLOCK_SIZE, 1878 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1879 .template_ablkcipher = { 1880 .setkey = ablkcipher_setkey, 1881 .encrypt = ablkcipher_encrypt, 1882 .decrypt = ablkcipher_decrypt, 1883 .givencrypt = ablkcipher_givencrypt, 1884 .geniv = "<built-in>", 1885 .min_keysize = DES_KEY_SIZE, 1886 .max_keysize = DES_KEY_SIZE, 1887 .ivsize = DES_BLOCK_SIZE, 1888 }, 1889 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1890 }, 1891 { 1892 .name = "ctr(aes)", 1893 .driver_name = "ctr-aes-caam", 1894 .blocksize = 1, 1895 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1896 .template_ablkcipher = { 1897 .setkey = ablkcipher_setkey, 1898 .encrypt = ablkcipher_encrypt, 1899 .decrypt = ablkcipher_decrypt, 1900 .geniv = "chainiv", 1901 .min_keysize = AES_MIN_KEY_SIZE, 1902 .max_keysize = AES_MAX_KEY_SIZE, 1903 .ivsize = AES_BLOCK_SIZE, 1904 }, 1905 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1906 }, 1907 { 1908 .name = "rfc3686(ctr(aes))", 1909 .driver_name = "rfc3686-ctr-aes-caam", 1910 .blocksize = 1, 1911 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1912 .template_ablkcipher = { 1913 .setkey = ablkcipher_setkey, 1914 .encrypt = ablkcipher_encrypt, 1915 .decrypt = ablkcipher_decrypt, 1916 .givencrypt = ablkcipher_givencrypt, 1917 .geniv = "<built-in>", 1918 .min_keysize = AES_MIN_KEY_SIZE + 1919 CTR_RFC3686_NONCE_SIZE, 1920 .max_keysize = AES_MAX_KEY_SIZE + 1921 CTR_RFC3686_NONCE_SIZE, 1922 .ivsize = CTR_RFC3686_IV_SIZE, 1923 }, 1924 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1925 }, 1926 { 1927 .name = "xts(aes)", 1928 .driver_name = "xts-aes-caam", 1929 .blocksize = AES_BLOCK_SIZE, 1930 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1931 .template_ablkcipher = { 1932 .setkey = xts_ablkcipher_setkey, 1933 .encrypt = ablkcipher_encrypt, 1934 .decrypt = ablkcipher_decrypt, 1935 .geniv = "eseqiv", 1936 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1937 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1938 .ivsize = AES_BLOCK_SIZE, 1939 }, 1940 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1941 }, 1942 }; 1943 1944 static struct caam_aead_alg driver_aeads[] = { 1945 { 1946 .aead = { 1947 .base = { 1948 .cra_name = "rfc4106(gcm(aes))", 1949 .cra_driver_name = "rfc4106-gcm-aes-caam", 1950 .cra_blocksize = 1, 1951 }, 1952 .setkey = rfc4106_setkey, 1953 .setauthsize = rfc4106_setauthsize, 1954 .encrypt = ipsec_gcm_encrypt, 1955 .decrypt = ipsec_gcm_decrypt, 1956 .ivsize = GCM_RFC4106_IV_SIZE, 1957 .maxauthsize = AES_BLOCK_SIZE, 1958 }, 1959 .caam = { 1960 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1961 }, 1962 }, 1963 { 1964 .aead = { 1965 .base = { 1966 .cra_name = "rfc4543(gcm(aes))", 1967 .cra_driver_name = "rfc4543-gcm-aes-caam", 1968 .cra_blocksize = 1, 1969 }, 1970 .setkey = rfc4543_setkey, 1971 .setauthsize = rfc4543_setauthsize, 1972 .encrypt = ipsec_gcm_encrypt, 1973 .decrypt = ipsec_gcm_decrypt, 1974 .ivsize = GCM_RFC4543_IV_SIZE, 1975 .maxauthsize = AES_BLOCK_SIZE, 1976 }, 1977 .caam = { 1978 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1979 }, 1980 }, 1981 /* Galois Counter Mode */ 1982 { 1983 .aead = { 1984 .base = { 1985 .cra_name = "gcm(aes)", 1986 .cra_driver_name = "gcm-aes-caam", 1987 .cra_blocksize = 1, 1988 }, 1989 .setkey = gcm_setkey, 1990 .setauthsize = gcm_setauthsize, 1991 .encrypt = gcm_encrypt, 1992 .decrypt = gcm_decrypt, 1993 .ivsize = GCM_AES_IV_SIZE, 1994 .maxauthsize = AES_BLOCK_SIZE, 1995 }, 1996 .caam = { 1997 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1998 }, 1999 }, 2000 /* single-pass ipsec_esp descriptor */ 2001 { 2002 .aead = { 2003 .base = { 2004 .cra_name = "authenc(hmac(md5)," 2005 "ecb(cipher_null))", 2006 .cra_driver_name = "authenc-hmac-md5-" 2007 "ecb-cipher_null-caam", 2008 .cra_blocksize = NULL_BLOCK_SIZE, 2009 }, 2010 .setkey = aead_setkey, 2011 .setauthsize = aead_setauthsize, 2012 .encrypt = aead_encrypt, 2013 .decrypt = aead_decrypt, 2014 .ivsize = NULL_IV_SIZE, 2015 .maxauthsize = MD5_DIGEST_SIZE, 2016 }, 2017 .caam = { 2018 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2019 OP_ALG_AAI_HMAC_PRECOMP, 2020 }, 2021 }, 2022 { 2023 .aead = { 2024 .base = { 2025 .cra_name = "authenc(hmac(sha1)," 2026 "ecb(cipher_null))", 2027 .cra_driver_name = "authenc-hmac-sha1-" 2028 "ecb-cipher_null-caam", 2029 .cra_blocksize = NULL_BLOCK_SIZE, 2030 }, 2031 .setkey = aead_setkey, 2032 .setauthsize = aead_setauthsize, 2033 .encrypt = aead_encrypt, 2034 .decrypt = aead_decrypt, 2035 .ivsize = NULL_IV_SIZE, 2036 .maxauthsize = SHA1_DIGEST_SIZE, 2037 }, 2038 .caam = { 2039 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2040 OP_ALG_AAI_HMAC_PRECOMP, 2041 }, 2042 }, 2043 { 2044 .aead = { 2045 .base = { 2046 .cra_name = "authenc(hmac(sha224)," 2047 "ecb(cipher_null))", 2048 .cra_driver_name = "authenc-hmac-sha224-" 2049 "ecb-cipher_null-caam", 2050 .cra_blocksize = NULL_BLOCK_SIZE, 2051 }, 2052 .setkey = aead_setkey, 2053 .setauthsize = aead_setauthsize, 2054 .encrypt = aead_encrypt, 2055 .decrypt = aead_decrypt, 2056 .ivsize = NULL_IV_SIZE, 2057 .maxauthsize = SHA224_DIGEST_SIZE, 2058 }, 2059 .caam = { 2060 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2061 OP_ALG_AAI_HMAC_PRECOMP, 2062 }, 2063 }, 2064 { 2065 .aead = { 2066 .base = { 2067 .cra_name = "authenc(hmac(sha256)," 2068 "ecb(cipher_null))", 2069 .cra_driver_name = "authenc-hmac-sha256-" 2070 "ecb-cipher_null-caam", 2071 .cra_blocksize = NULL_BLOCK_SIZE, 2072 }, 2073 .setkey = aead_setkey, 2074 .setauthsize = aead_setauthsize, 2075 .encrypt = aead_encrypt, 2076 .decrypt = aead_decrypt, 2077 .ivsize = NULL_IV_SIZE, 2078 .maxauthsize = SHA256_DIGEST_SIZE, 2079 }, 2080 .caam = { 2081 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2082 OP_ALG_AAI_HMAC_PRECOMP, 2083 }, 2084 }, 2085 { 2086 .aead = { 2087 .base = { 2088 .cra_name = "authenc(hmac(sha384)," 2089 "ecb(cipher_null))", 2090 .cra_driver_name = "authenc-hmac-sha384-" 2091 "ecb-cipher_null-caam", 2092 .cra_blocksize = NULL_BLOCK_SIZE, 2093 }, 2094 .setkey = aead_setkey, 2095 .setauthsize = aead_setauthsize, 2096 .encrypt = aead_encrypt, 2097 .decrypt = aead_decrypt, 2098 .ivsize = NULL_IV_SIZE, 2099 .maxauthsize = SHA384_DIGEST_SIZE, 2100 }, 2101 .caam = { 2102 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2103 OP_ALG_AAI_HMAC_PRECOMP, 2104 }, 2105 }, 2106 { 2107 .aead = { 2108 .base = { 2109 .cra_name = "authenc(hmac(sha512)," 2110 "ecb(cipher_null))", 2111 .cra_driver_name = "authenc-hmac-sha512-" 2112 "ecb-cipher_null-caam", 2113 .cra_blocksize = NULL_BLOCK_SIZE, 2114 }, 2115 .setkey = aead_setkey, 2116 .setauthsize = aead_setauthsize, 2117 .encrypt = aead_encrypt, 2118 .decrypt = aead_decrypt, 2119 .ivsize = NULL_IV_SIZE, 2120 .maxauthsize = SHA512_DIGEST_SIZE, 2121 }, 2122 .caam = { 2123 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2124 OP_ALG_AAI_HMAC_PRECOMP, 2125 }, 2126 }, 2127 { 2128 .aead = { 2129 .base = { 2130 .cra_name = "authenc(hmac(md5),cbc(aes))", 2131 .cra_driver_name = "authenc-hmac-md5-" 2132 "cbc-aes-caam", 2133 .cra_blocksize = AES_BLOCK_SIZE, 2134 }, 2135 .setkey = aead_setkey, 2136 .setauthsize = aead_setauthsize, 2137 .encrypt = aead_encrypt, 2138 .decrypt = aead_decrypt, 2139 .ivsize = AES_BLOCK_SIZE, 2140 .maxauthsize = MD5_DIGEST_SIZE, 2141 }, 2142 .caam = { 2143 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2144 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2145 OP_ALG_AAI_HMAC_PRECOMP, 2146 }, 2147 }, 2148 { 2149 .aead = { 2150 .base = { 2151 .cra_name = "echainiv(authenc(hmac(md5)," 2152 "cbc(aes)))", 2153 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2154 "cbc-aes-caam", 2155 .cra_blocksize = AES_BLOCK_SIZE, 2156 }, 2157 .setkey = aead_setkey, 2158 .setauthsize = aead_setauthsize, 2159 .encrypt = aead_encrypt, 2160 .decrypt = aead_decrypt, 2161 .ivsize = AES_BLOCK_SIZE, 2162 .maxauthsize = MD5_DIGEST_SIZE, 2163 }, 2164 .caam = { 2165 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2166 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2167 OP_ALG_AAI_HMAC_PRECOMP, 2168 .geniv = true, 2169 }, 2170 }, 2171 { 2172 .aead = { 2173 .base = { 2174 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2175 .cra_driver_name = "authenc-hmac-sha1-" 2176 "cbc-aes-caam", 2177 .cra_blocksize = AES_BLOCK_SIZE, 2178 }, 2179 .setkey = aead_setkey, 2180 .setauthsize = aead_setauthsize, 2181 .encrypt = aead_encrypt, 2182 .decrypt = aead_decrypt, 2183 .ivsize = AES_BLOCK_SIZE, 2184 .maxauthsize = SHA1_DIGEST_SIZE, 2185 }, 2186 .caam = { 2187 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2188 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2189 OP_ALG_AAI_HMAC_PRECOMP, 2190 }, 2191 }, 2192 { 2193 .aead = { 2194 .base = { 2195 .cra_name = "echainiv(authenc(hmac(sha1)," 2196 "cbc(aes)))", 2197 .cra_driver_name = "echainiv-authenc-" 2198 "hmac-sha1-cbc-aes-caam", 2199 .cra_blocksize = AES_BLOCK_SIZE, 2200 }, 2201 .setkey = aead_setkey, 2202 .setauthsize = aead_setauthsize, 2203 .encrypt = aead_encrypt, 2204 .decrypt = aead_decrypt, 2205 .ivsize = AES_BLOCK_SIZE, 2206 .maxauthsize = SHA1_DIGEST_SIZE, 2207 }, 2208 .caam = { 2209 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2210 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2211 OP_ALG_AAI_HMAC_PRECOMP, 2212 .geniv = true, 2213 }, 2214 }, 2215 { 2216 .aead = { 2217 .base = { 2218 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2219 .cra_driver_name = "authenc-hmac-sha224-" 2220 "cbc-aes-caam", 2221 .cra_blocksize = AES_BLOCK_SIZE, 2222 }, 2223 .setkey = aead_setkey, 2224 .setauthsize = aead_setauthsize, 2225 .encrypt = aead_encrypt, 2226 .decrypt = aead_decrypt, 2227 .ivsize = AES_BLOCK_SIZE, 2228 .maxauthsize = SHA224_DIGEST_SIZE, 2229 }, 2230 .caam = { 2231 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2232 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2233 OP_ALG_AAI_HMAC_PRECOMP, 2234 }, 2235 }, 2236 { 2237 .aead = { 2238 .base = { 2239 .cra_name = "echainiv(authenc(hmac(sha224)," 2240 "cbc(aes)))", 2241 .cra_driver_name = "echainiv-authenc-" 2242 "hmac-sha224-cbc-aes-caam", 2243 .cra_blocksize = AES_BLOCK_SIZE, 2244 }, 2245 .setkey = aead_setkey, 2246 .setauthsize = aead_setauthsize, 2247 .encrypt = aead_encrypt, 2248 .decrypt = aead_decrypt, 2249 .ivsize = AES_BLOCK_SIZE, 2250 .maxauthsize = SHA224_DIGEST_SIZE, 2251 }, 2252 .caam = { 2253 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2254 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2255 OP_ALG_AAI_HMAC_PRECOMP, 2256 .geniv = true, 2257 }, 2258 }, 2259 { 2260 .aead = { 2261 .base = { 2262 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2263 .cra_driver_name = "authenc-hmac-sha256-" 2264 "cbc-aes-caam", 2265 .cra_blocksize = AES_BLOCK_SIZE, 2266 }, 2267 .setkey = aead_setkey, 2268 .setauthsize = aead_setauthsize, 2269 .encrypt = aead_encrypt, 2270 .decrypt = aead_decrypt, 2271 .ivsize = AES_BLOCK_SIZE, 2272 .maxauthsize = SHA256_DIGEST_SIZE, 2273 }, 2274 .caam = { 2275 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2276 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2277 OP_ALG_AAI_HMAC_PRECOMP, 2278 }, 2279 }, 2280 { 2281 .aead = { 2282 .base = { 2283 .cra_name = "echainiv(authenc(hmac(sha256)," 2284 "cbc(aes)))", 2285 .cra_driver_name = "echainiv-authenc-" 2286 "hmac-sha256-cbc-aes-caam", 2287 .cra_blocksize = AES_BLOCK_SIZE, 2288 }, 2289 .setkey = aead_setkey, 2290 .setauthsize = aead_setauthsize, 2291 .encrypt = aead_encrypt, 2292 .decrypt = aead_decrypt, 2293 .ivsize = AES_BLOCK_SIZE, 2294 .maxauthsize = SHA256_DIGEST_SIZE, 2295 }, 2296 .caam = { 2297 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2298 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2299 OP_ALG_AAI_HMAC_PRECOMP, 2300 .geniv = true, 2301 }, 2302 }, 2303 { 2304 .aead = { 2305 .base = { 2306 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2307 .cra_driver_name = "authenc-hmac-sha384-" 2308 "cbc-aes-caam", 2309 .cra_blocksize = AES_BLOCK_SIZE, 2310 }, 2311 .setkey = aead_setkey, 2312 .setauthsize = aead_setauthsize, 2313 .encrypt = aead_encrypt, 2314 .decrypt = aead_decrypt, 2315 .ivsize = AES_BLOCK_SIZE, 2316 .maxauthsize = SHA384_DIGEST_SIZE, 2317 }, 2318 .caam = { 2319 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2320 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2321 OP_ALG_AAI_HMAC_PRECOMP, 2322 }, 2323 }, 2324 { 2325 .aead = { 2326 .base = { 2327 .cra_name = "echainiv(authenc(hmac(sha384)," 2328 "cbc(aes)))", 2329 .cra_driver_name = "echainiv-authenc-" 2330 "hmac-sha384-cbc-aes-caam", 2331 .cra_blocksize = AES_BLOCK_SIZE, 2332 }, 2333 .setkey = aead_setkey, 2334 .setauthsize = aead_setauthsize, 2335 .encrypt = aead_encrypt, 2336 .decrypt = aead_decrypt, 2337 .ivsize = AES_BLOCK_SIZE, 2338 .maxauthsize = SHA384_DIGEST_SIZE, 2339 }, 2340 .caam = { 2341 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2342 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2343 OP_ALG_AAI_HMAC_PRECOMP, 2344 .geniv = true, 2345 }, 2346 }, 2347 { 2348 .aead = { 2349 .base = { 2350 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2351 .cra_driver_name = "authenc-hmac-sha512-" 2352 "cbc-aes-caam", 2353 .cra_blocksize = AES_BLOCK_SIZE, 2354 }, 2355 .setkey = aead_setkey, 2356 .setauthsize = aead_setauthsize, 2357 .encrypt = aead_encrypt, 2358 .decrypt = aead_decrypt, 2359 .ivsize = AES_BLOCK_SIZE, 2360 .maxauthsize = SHA512_DIGEST_SIZE, 2361 }, 2362 .caam = { 2363 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2364 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2365 OP_ALG_AAI_HMAC_PRECOMP, 2366 }, 2367 }, 2368 { 2369 .aead = { 2370 .base = { 2371 .cra_name = "echainiv(authenc(hmac(sha512)," 2372 "cbc(aes)))", 2373 .cra_driver_name = "echainiv-authenc-" 2374 "hmac-sha512-cbc-aes-caam", 2375 .cra_blocksize = AES_BLOCK_SIZE, 2376 }, 2377 .setkey = aead_setkey, 2378 .setauthsize = aead_setauthsize, 2379 .encrypt = aead_encrypt, 2380 .decrypt = aead_decrypt, 2381 .ivsize = AES_BLOCK_SIZE, 2382 .maxauthsize = SHA512_DIGEST_SIZE, 2383 }, 2384 .caam = { 2385 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2386 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2387 OP_ALG_AAI_HMAC_PRECOMP, 2388 .geniv = true, 2389 }, 2390 }, 2391 { 2392 .aead = { 2393 .base = { 2394 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2395 .cra_driver_name = "authenc-hmac-md5-" 2396 "cbc-des3_ede-caam", 2397 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2398 }, 2399 .setkey = aead_setkey, 2400 .setauthsize = aead_setauthsize, 2401 .encrypt = aead_encrypt, 2402 .decrypt = aead_decrypt, 2403 .ivsize = DES3_EDE_BLOCK_SIZE, 2404 .maxauthsize = MD5_DIGEST_SIZE, 2405 }, 2406 .caam = { 2407 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2408 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2409 OP_ALG_AAI_HMAC_PRECOMP, 2410 } 2411 }, 2412 { 2413 .aead = { 2414 .base = { 2415 .cra_name = "echainiv(authenc(hmac(md5)," 2416 "cbc(des3_ede)))", 2417 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2418 "cbc-des3_ede-caam", 2419 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2420 }, 2421 .setkey = aead_setkey, 2422 .setauthsize = aead_setauthsize, 2423 .encrypt = aead_encrypt, 2424 .decrypt = aead_decrypt, 2425 .ivsize = DES3_EDE_BLOCK_SIZE, 2426 .maxauthsize = MD5_DIGEST_SIZE, 2427 }, 2428 .caam = { 2429 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2430 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2431 OP_ALG_AAI_HMAC_PRECOMP, 2432 .geniv = true, 2433 } 2434 }, 2435 { 2436 .aead = { 2437 .base = { 2438 .cra_name = "authenc(hmac(sha1)," 2439 "cbc(des3_ede))", 2440 .cra_driver_name = "authenc-hmac-sha1-" 2441 "cbc-des3_ede-caam", 2442 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2443 }, 2444 .setkey = aead_setkey, 2445 .setauthsize = aead_setauthsize, 2446 .encrypt = aead_encrypt, 2447 .decrypt = aead_decrypt, 2448 .ivsize = DES3_EDE_BLOCK_SIZE, 2449 .maxauthsize = SHA1_DIGEST_SIZE, 2450 }, 2451 .caam = { 2452 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2453 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2454 OP_ALG_AAI_HMAC_PRECOMP, 2455 }, 2456 }, 2457 { 2458 .aead = { 2459 .base = { 2460 .cra_name = "echainiv(authenc(hmac(sha1)," 2461 "cbc(des3_ede)))", 2462 .cra_driver_name = "echainiv-authenc-" 2463 "hmac-sha1-" 2464 "cbc-des3_ede-caam", 2465 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2466 }, 2467 .setkey = aead_setkey, 2468 .setauthsize = aead_setauthsize, 2469 .encrypt = aead_encrypt, 2470 .decrypt = aead_decrypt, 2471 .ivsize = DES3_EDE_BLOCK_SIZE, 2472 .maxauthsize = SHA1_DIGEST_SIZE, 2473 }, 2474 .caam = { 2475 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2476 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2477 OP_ALG_AAI_HMAC_PRECOMP, 2478 .geniv = true, 2479 }, 2480 }, 2481 { 2482 .aead = { 2483 .base = { 2484 .cra_name = "authenc(hmac(sha224)," 2485 "cbc(des3_ede))", 2486 .cra_driver_name = "authenc-hmac-sha224-" 2487 "cbc-des3_ede-caam", 2488 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2489 }, 2490 .setkey = aead_setkey, 2491 .setauthsize = aead_setauthsize, 2492 .encrypt = aead_encrypt, 2493 .decrypt = aead_decrypt, 2494 .ivsize = DES3_EDE_BLOCK_SIZE, 2495 .maxauthsize = SHA224_DIGEST_SIZE, 2496 }, 2497 .caam = { 2498 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2499 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2500 OP_ALG_AAI_HMAC_PRECOMP, 2501 }, 2502 }, 2503 { 2504 .aead = { 2505 .base = { 2506 .cra_name = "echainiv(authenc(hmac(sha224)," 2507 "cbc(des3_ede)))", 2508 .cra_driver_name = "echainiv-authenc-" 2509 "hmac-sha224-" 2510 "cbc-des3_ede-caam", 2511 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2512 }, 2513 .setkey = aead_setkey, 2514 .setauthsize = aead_setauthsize, 2515 .encrypt = aead_encrypt, 2516 .decrypt = aead_decrypt, 2517 .ivsize = DES3_EDE_BLOCK_SIZE, 2518 .maxauthsize = SHA224_DIGEST_SIZE, 2519 }, 2520 .caam = { 2521 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2522 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2523 OP_ALG_AAI_HMAC_PRECOMP, 2524 .geniv = true, 2525 }, 2526 }, 2527 { 2528 .aead = { 2529 .base = { 2530 .cra_name = "authenc(hmac(sha256)," 2531 "cbc(des3_ede))", 2532 .cra_driver_name = "authenc-hmac-sha256-" 2533 "cbc-des3_ede-caam", 2534 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2535 }, 2536 .setkey = aead_setkey, 2537 .setauthsize = aead_setauthsize, 2538 .encrypt = aead_encrypt, 2539 .decrypt = aead_decrypt, 2540 .ivsize = DES3_EDE_BLOCK_SIZE, 2541 .maxauthsize = SHA256_DIGEST_SIZE, 2542 }, 2543 .caam = { 2544 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2545 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2546 OP_ALG_AAI_HMAC_PRECOMP, 2547 }, 2548 }, 2549 { 2550 .aead = { 2551 .base = { 2552 .cra_name = "echainiv(authenc(hmac(sha256)," 2553 "cbc(des3_ede)))", 2554 .cra_driver_name = "echainiv-authenc-" 2555 "hmac-sha256-" 2556 "cbc-des3_ede-caam", 2557 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2558 }, 2559 .setkey = aead_setkey, 2560 .setauthsize = aead_setauthsize, 2561 .encrypt = aead_encrypt, 2562 .decrypt = aead_decrypt, 2563 .ivsize = DES3_EDE_BLOCK_SIZE, 2564 .maxauthsize = SHA256_DIGEST_SIZE, 2565 }, 2566 .caam = { 2567 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2568 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2569 OP_ALG_AAI_HMAC_PRECOMP, 2570 .geniv = true, 2571 }, 2572 }, 2573 { 2574 .aead = { 2575 .base = { 2576 .cra_name = "authenc(hmac(sha384)," 2577 "cbc(des3_ede))", 2578 .cra_driver_name = "authenc-hmac-sha384-" 2579 "cbc-des3_ede-caam", 2580 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2581 }, 2582 .setkey = aead_setkey, 2583 .setauthsize = aead_setauthsize, 2584 .encrypt = aead_encrypt, 2585 .decrypt = aead_decrypt, 2586 .ivsize = DES3_EDE_BLOCK_SIZE, 2587 .maxauthsize = SHA384_DIGEST_SIZE, 2588 }, 2589 .caam = { 2590 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2591 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2592 OP_ALG_AAI_HMAC_PRECOMP, 2593 }, 2594 }, 2595 { 2596 .aead = { 2597 .base = { 2598 .cra_name = "echainiv(authenc(hmac(sha384)," 2599 "cbc(des3_ede)))", 2600 .cra_driver_name = "echainiv-authenc-" 2601 "hmac-sha384-" 2602 "cbc-des3_ede-caam", 2603 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2604 }, 2605 .setkey = aead_setkey, 2606 .setauthsize = aead_setauthsize, 2607 .encrypt = aead_encrypt, 2608 .decrypt = aead_decrypt, 2609 .ivsize = DES3_EDE_BLOCK_SIZE, 2610 .maxauthsize = SHA384_DIGEST_SIZE, 2611 }, 2612 .caam = { 2613 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2614 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2615 OP_ALG_AAI_HMAC_PRECOMP, 2616 .geniv = true, 2617 }, 2618 }, 2619 { 2620 .aead = { 2621 .base = { 2622 .cra_name = "authenc(hmac(sha512)," 2623 "cbc(des3_ede))", 2624 .cra_driver_name = "authenc-hmac-sha512-" 2625 "cbc-des3_ede-caam", 2626 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2627 }, 2628 .setkey = aead_setkey, 2629 .setauthsize = aead_setauthsize, 2630 .encrypt = aead_encrypt, 2631 .decrypt = aead_decrypt, 2632 .ivsize = DES3_EDE_BLOCK_SIZE, 2633 .maxauthsize = SHA512_DIGEST_SIZE, 2634 }, 2635 .caam = { 2636 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2637 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2638 OP_ALG_AAI_HMAC_PRECOMP, 2639 }, 2640 }, 2641 { 2642 .aead = { 2643 .base = { 2644 .cra_name = "echainiv(authenc(hmac(sha512)," 2645 "cbc(des3_ede)))", 2646 .cra_driver_name = "echainiv-authenc-" 2647 "hmac-sha512-" 2648 "cbc-des3_ede-caam", 2649 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2650 }, 2651 .setkey = aead_setkey, 2652 .setauthsize = aead_setauthsize, 2653 .encrypt = aead_encrypt, 2654 .decrypt = aead_decrypt, 2655 .ivsize = DES3_EDE_BLOCK_SIZE, 2656 .maxauthsize = SHA512_DIGEST_SIZE, 2657 }, 2658 .caam = { 2659 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2660 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2661 OP_ALG_AAI_HMAC_PRECOMP, 2662 .geniv = true, 2663 }, 2664 }, 2665 { 2666 .aead = { 2667 .base = { 2668 .cra_name = "authenc(hmac(md5),cbc(des))", 2669 .cra_driver_name = "authenc-hmac-md5-" 2670 "cbc-des-caam", 2671 .cra_blocksize = DES_BLOCK_SIZE, 2672 }, 2673 .setkey = aead_setkey, 2674 .setauthsize = aead_setauthsize, 2675 .encrypt = aead_encrypt, 2676 .decrypt = aead_decrypt, 2677 .ivsize = DES_BLOCK_SIZE, 2678 .maxauthsize = MD5_DIGEST_SIZE, 2679 }, 2680 .caam = { 2681 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2682 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2683 OP_ALG_AAI_HMAC_PRECOMP, 2684 }, 2685 }, 2686 { 2687 .aead = { 2688 .base = { 2689 .cra_name = "echainiv(authenc(hmac(md5)," 2690 "cbc(des)))", 2691 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2692 "cbc-des-caam", 2693 .cra_blocksize = DES_BLOCK_SIZE, 2694 }, 2695 .setkey = aead_setkey, 2696 .setauthsize = aead_setauthsize, 2697 .encrypt = aead_encrypt, 2698 .decrypt = aead_decrypt, 2699 .ivsize = DES_BLOCK_SIZE, 2700 .maxauthsize = MD5_DIGEST_SIZE, 2701 }, 2702 .caam = { 2703 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2704 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2705 OP_ALG_AAI_HMAC_PRECOMP, 2706 .geniv = true, 2707 }, 2708 }, 2709 { 2710 .aead = { 2711 .base = { 2712 .cra_name = "authenc(hmac(sha1),cbc(des))", 2713 .cra_driver_name = "authenc-hmac-sha1-" 2714 "cbc-des-caam", 2715 .cra_blocksize = DES_BLOCK_SIZE, 2716 }, 2717 .setkey = aead_setkey, 2718 .setauthsize = aead_setauthsize, 2719 .encrypt = aead_encrypt, 2720 .decrypt = aead_decrypt, 2721 .ivsize = DES_BLOCK_SIZE, 2722 .maxauthsize = SHA1_DIGEST_SIZE, 2723 }, 2724 .caam = { 2725 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2726 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2727 OP_ALG_AAI_HMAC_PRECOMP, 2728 }, 2729 }, 2730 { 2731 .aead = { 2732 .base = { 2733 .cra_name = "echainiv(authenc(hmac(sha1)," 2734 "cbc(des)))", 2735 .cra_driver_name = "echainiv-authenc-" 2736 "hmac-sha1-cbc-des-caam", 2737 .cra_blocksize = DES_BLOCK_SIZE, 2738 }, 2739 .setkey = aead_setkey, 2740 .setauthsize = aead_setauthsize, 2741 .encrypt = aead_encrypt, 2742 .decrypt = aead_decrypt, 2743 .ivsize = DES_BLOCK_SIZE, 2744 .maxauthsize = SHA1_DIGEST_SIZE, 2745 }, 2746 .caam = { 2747 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2748 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2749 OP_ALG_AAI_HMAC_PRECOMP, 2750 .geniv = true, 2751 }, 2752 }, 2753 { 2754 .aead = { 2755 .base = { 2756 .cra_name = "authenc(hmac(sha224),cbc(des))", 2757 .cra_driver_name = "authenc-hmac-sha224-" 2758 "cbc-des-caam", 2759 .cra_blocksize = DES_BLOCK_SIZE, 2760 }, 2761 .setkey = aead_setkey, 2762 .setauthsize = aead_setauthsize, 2763 .encrypt = aead_encrypt, 2764 .decrypt = aead_decrypt, 2765 .ivsize = DES_BLOCK_SIZE, 2766 .maxauthsize = SHA224_DIGEST_SIZE, 2767 }, 2768 .caam = { 2769 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2770 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2771 OP_ALG_AAI_HMAC_PRECOMP, 2772 }, 2773 }, 2774 { 2775 .aead = { 2776 .base = { 2777 .cra_name = "echainiv(authenc(hmac(sha224)," 2778 "cbc(des)))", 2779 .cra_driver_name = "echainiv-authenc-" 2780 "hmac-sha224-cbc-des-caam", 2781 .cra_blocksize = DES_BLOCK_SIZE, 2782 }, 2783 .setkey = aead_setkey, 2784 .setauthsize = aead_setauthsize, 2785 .encrypt = aead_encrypt, 2786 .decrypt = aead_decrypt, 2787 .ivsize = DES_BLOCK_SIZE, 2788 .maxauthsize = SHA224_DIGEST_SIZE, 2789 }, 2790 .caam = { 2791 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2792 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2793 OP_ALG_AAI_HMAC_PRECOMP, 2794 .geniv = true, 2795 }, 2796 }, 2797 { 2798 .aead = { 2799 .base = { 2800 .cra_name = "authenc(hmac(sha256),cbc(des))", 2801 .cra_driver_name = "authenc-hmac-sha256-" 2802 "cbc-des-caam", 2803 .cra_blocksize = DES_BLOCK_SIZE, 2804 }, 2805 .setkey = aead_setkey, 2806 .setauthsize = aead_setauthsize, 2807 .encrypt = aead_encrypt, 2808 .decrypt = aead_decrypt, 2809 .ivsize = DES_BLOCK_SIZE, 2810 .maxauthsize = SHA256_DIGEST_SIZE, 2811 }, 2812 .caam = { 2813 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2814 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2815 OP_ALG_AAI_HMAC_PRECOMP, 2816 }, 2817 }, 2818 { 2819 .aead = { 2820 .base = { 2821 .cra_name = "echainiv(authenc(hmac(sha256)," 2822 "cbc(des)))", 2823 .cra_driver_name = "echainiv-authenc-" 2824 "hmac-sha256-cbc-des-caam", 2825 .cra_blocksize = DES_BLOCK_SIZE, 2826 }, 2827 .setkey = aead_setkey, 2828 .setauthsize = aead_setauthsize, 2829 .encrypt = aead_encrypt, 2830 .decrypt = aead_decrypt, 2831 .ivsize = DES_BLOCK_SIZE, 2832 .maxauthsize = SHA256_DIGEST_SIZE, 2833 }, 2834 .caam = { 2835 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2836 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2837 OP_ALG_AAI_HMAC_PRECOMP, 2838 .geniv = true, 2839 }, 2840 }, 2841 { 2842 .aead = { 2843 .base = { 2844 .cra_name = "authenc(hmac(sha384),cbc(des))", 2845 .cra_driver_name = "authenc-hmac-sha384-" 2846 "cbc-des-caam", 2847 .cra_blocksize = DES_BLOCK_SIZE, 2848 }, 2849 .setkey = aead_setkey, 2850 .setauthsize = aead_setauthsize, 2851 .encrypt = aead_encrypt, 2852 .decrypt = aead_decrypt, 2853 .ivsize = DES_BLOCK_SIZE, 2854 .maxauthsize = SHA384_DIGEST_SIZE, 2855 }, 2856 .caam = { 2857 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2858 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2859 OP_ALG_AAI_HMAC_PRECOMP, 2860 }, 2861 }, 2862 { 2863 .aead = { 2864 .base = { 2865 .cra_name = "echainiv(authenc(hmac(sha384)," 2866 "cbc(des)))", 2867 .cra_driver_name = "echainiv-authenc-" 2868 "hmac-sha384-cbc-des-caam", 2869 .cra_blocksize = DES_BLOCK_SIZE, 2870 }, 2871 .setkey = aead_setkey, 2872 .setauthsize = aead_setauthsize, 2873 .encrypt = aead_encrypt, 2874 .decrypt = aead_decrypt, 2875 .ivsize = DES_BLOCK_SIZE, 2876 .maxauthsize = SHA384_DIGEST_SIZE, 2877 }, 2878 .caam = { 2879 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2880 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2881 OP_ALG_AAI_HMAC_PRECOMP, 2882 .geniv = true, 2883 }, 2884 }, 2885 { 2886 .aead = { 2887 .base = { 2888 .cra_name = "authenc(hmac(sha512),cbc(des))", 2889 .cra_driver_name = "authenc-hmac-sha512-" 2890 "cbc-des-caam", 2891 .cra_blocksize = DES_BLOCK_SIZE, 2892 }, 2893 .setkey = aead_setkey, 2894 .setauthsize = aead_setauthsize, 2895 .encrypt = aead_encrypt, 2896 .decrypt = aead_decrypt, 2897 .ivsize = DES_BLOCK_SIZE, 2898 .maxauthsize = SHA512_DIGEST_SIZE, 2899 }, 2900 .caam = { 2901 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2902 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2903 OP_ALG_AAI_HMAC_PRECOMP, 2904 }, 2905 }, 2906 { 2907 .aead = { 2908 .base = { 2909 .cra_name = "echainiv(authenc(hmac(sha512)," 2910 "cbc(des)))", 2911 .cra_driver_name = "echainiv-authenc-" 2912 "hmac-sha512-cbc-des-caam", 2913 .cra_blocksize = DES_BLOCK_SIZE, 2914 }, 2915 .setkey = aead_setkey, 2916 .setauthsize = aead_setauthsize, 2917 .encrypt = aead_encrypt, 2918 .decrypt = aead_decrypt, 2919 .ivsize = DES_BLOCK_SIZE, 2920 .maxauthsize = SHA512_DIGEST_SIZE, 2921 }, 2922 .caam = { 2923 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2924 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2925 OP_ALG_AAI_HMAC_PRECOMP, 2926 .geniv = true, 2927 }, 2928 }, 2929 { 2930 .aead = { 2931 .base = { 2932 .cra_name = "authenc(hmac(md5)," 2933 "rfc3686(ctr(aes)))", 2934 .cra_driver_name = "authenc-hmac-md5-" 2935 "rfc3686-ctr-aes-caam", 2936 .cra_blocksize = 1, 2937 }, 2938 .setkey = aead_setkey, 2939 .setauthsize = aead_setauthsize, 2940 .encrypt = aead_encrypt, 2941 .decrypt = aead_decrypt, 2942 .ivsize = CTR_RFC3686_IV_SIZE, 2943 .maxauthsize = MD5_DIGEST_SIZE, 2944 }, 2945 .caam = { 2946 .class1_alg_type = OP_ALG_ALGSEL_AES | 2947 OP_ALG_AAI_CTR_MOD128, 2948 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2949 OP_ALG_AAI_HMAC_PRECOMP, 2950 .rfc3686 = true, 2951 }, 2952 }, 2953 { 2954 .aead = { 2955 .base = { 2956 .cra_name = "seqiv(authenc(" 2957 "hmac(md5),rfc3686(ctr(aes))))", 2958 .cra_driver_name = "seqiv-authenc-hmac-md5-" 2959 "rfc3686-ctr-aes-caam", 2960 .cra_blocksize = 1, 2961 }, 2962 .setkey = aead_setkey, 2963 .setauthsize = aead_setauthsize, 2964 .encrypt = aead_encrypt, 2965 .decrypt = aead_decrypt, 2966 .ivsize = CTR_RFC3686_IV_SIZE, 2967 .maxauthsize = MD5_DIGEST_SIZE, 2968 }, 2969 .caam = { 2970 .class1_alg_type = OP_ALG_ALGSEL_AES | 2971 OP_ALG_AAI_CTR_MOD128, 2972 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2973 OP_ALG_AAI_HMAC_PRECOMP, 2974 .rfc3686 = true, 2975 .geniv = true, 2976 }, 2977 }, 2978 { 2979 .aead = { 2980 .base = { 2981 .cra_name = "authenc(hmac(sha1)," 2982 "rfc3686(ctr(aes)))", 2983 .cra_driver_name = "authenc-hmac-sha1-" 2984 "rfc3686-ctr-aes-caam", 2985 .cra_blocksize = 1, 2986 }, 2987 .setkey = aead_setkey, 2988 .setauthsize = aead_setauthsize, 2989 .encrypt = aead_encrypt, 2990 .decrypt = aead_decrypt, 2991 .ivsize = CTR_RFC3686_IV_SIZE, 2992 .maxauthsize = SHA1_DIGEST_SIZE, 2993 }, 2994 .caam = { 2995 .class1_alg_type = OP_ALG_ALGSEL_AES | 2996 OP_ALG_AAI_CTR_MOD128, 2997 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2998 OP_ALG_AAI_HMAC_PRECOMP, 2999 .rfc3686 = true, 3000 }, 3001 }, 3002 { 3003 .aead = { 3004 .base = { 3005 .cra_name = "seqiv(authenc(" 3006 "hmac(sha1),rfc3686(ctr(aes))))", 3007 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3008 "rfc3686-ctr-aes-caam", 3009 .cra_blocksize = 1, 3010 }, 3011 .setkey = aead_setkey, 3012 .setauthsize = aead_setauthsize, 3013 .encrypt = aead_encrypt, 3014 .decrypt = aead_decrypt, 3015 .ivsize = CTR_RFC3686_IV_SIZE, 3016 .maxauthsize = SHA1_DIGEST_SIZE, 3017 }, 3018 .caam = { 3019 .class1_alg_type = OP_ALG_ALGSEL_AES | 3020 OP_ALG_AAI_CTR_MOD128, 3021 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3022 OP_ALG_AAI_HMAC_PRECOMP, 3023 .rfc3686 = true, 3024 .geniv = true, 3025 }, 3026 }, 3027 { 3028 .aead = { 3029 .base = { 3030 .cra_name = "authenc(hmac(sha224)," 3031 "rfc3686(ctr(aes)))", 3032 .cra_driver_name = "authenc-hmac-sha224-" 3033 "rfc3686-ctr-aes-caam", 3034 .cra_blocksize = 1, 3035 }, 3036 .setkey = aead_setkey, 3037 .setauthsize = aead_setauthsize, 3038 .encrypt = aead_encrypt, 3039 .decrypt = aead_decrypt, 3040 .ivsize = CTR_RFC3686_IV_SIZE, 3041 .maxauthsize = SHA224_DIGEST_SIZE, 3042 }, 3043 .caam = { 3044 .class1_alg_type = OP_ALG_ALGSEL_AES | 3045 OP_ALG_AAI_CTR_MOD128, 3046 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3047 OP_ALG_AAI_HMAC_PRECOMP, 3048 .rfc3686 = true, 3049 }, 3050 }, 3051 { 3052 .aead = { 3053 .base = { 3054 .cra_name = "seqiv(authenc(" 3055 "hmac(sha224),rfc3686(ctr(aes))))", 3056 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3057 "rfc3686-ctr-aes-caam", 3058 .cra_blocksize = 1, 3059 }, 3060 .setkey = aead_setkey, 3061 .setauthsize = aead_setauthsize, 3062 .encrypt = aead_encrypt, 3063 .decrypt = aead_decrypt, 3064 .ivsize = CTR_RFC3686_IV_SIZE, 3065 .maxauthsize = SHA224_DIGEST_SIZE, 3066 }, 3067 .caam = { 3068 .class1_alg_type = OP_ALG_ALGSEL_AES | 3069 OP_ALG_AAI_CTR_MOD128, 3070 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3071 OP_ALG_AAI_HMAC_PRECOMP, 3072 .rfc3686 = true, 3073 .geniv = true, 3074 }, 3075 }, 3076 { 3077 .aead = { 3078 .base = { 3079 .cra_name = "authenc(hmac(sha256)," 3080 "rfc3686(ctr(aes)))", 3081 .cra_driver_name = "authenc-hmac-sha256-" 3082 "rfc3686-ctr-aes-caam", 3083 .cra_blocksize = 1, 3084 }, 3085 .setkey = aead_setkey, 3086 .setauthsize = aead_setauthsize, 3087 .encrypt = aead_encrypt, 3088 .decrypt = aead_decrypt, 3089 .ivsize = CTR_RFC3686_IV_SIZE, 3090 .maxauthsize = SHA256_DIGEST_SIZE, 3091 }, 3092 .caam = { 3093 .class1_alg_type = OP_ALG_ALGSEL_AES | 3094 OP_ALG_AAI_CTR_MOD128, 3095 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3096 OP_ALG_AAI_HMAC_PRECOMP, 3097 .rfc3686 = true, 3098 }, 3099 }, 3100 { 3101 .aead = { 3102 .base = { 3103 .cra_name = "seqiv(authenc(hmac(sha256)," 3104 "rfc3686(ctr(aes))))", 3105 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3106 "rfc3686-ctr-aes-caam", 3107 .cra_blocksize = 1, 3108 }, 3109 .setkey = aead_setkey, 3110 .setauthsize = aead_setauthsize, 3111 .encrypt = aead_encrypt, 3112 .decrypt = aead_decrypt, 3113 .ivsize = CTR_RFC3686_IV_SIZE, 3114 .maxauthsize = SHA256_DIGEST_SIZE, 3115 }, 3116 .caam = { 3117 .class1_alg_type = OP_ALG_ALGSEL_AES | 3118 OP_ALG_AAI_CTR_MOD128, 3119 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3120 OP_ALG_AAI_HMAC_PRECOMP, 3121 .rfc3686 = true, 3122 .geniv = true, 3123 }, 3124 }, 3125 { 3126 .aead = { 3127 .base = { 3128 .cra_name = "authenc(hmac(sha384)," 3129 "rfc3686(ctr(aes)))", 3130 .cra_driver_name = "authenc-hmac-sha384-" 3131 "rfc3686-ctr-aes-caam", 3132 .cra_blocksize = 1, 3133 }, 3134 .setkey = aead_setkey, 3135 .setauthsize = aead_setauthsize, 3136 .encrypt = aead_encrypt, 3137 .decrypt = aead_decrypt, 3138 .ivsize = CTR_RFC3686_IV_SIZE, 3139 .maxauthsize = SHA384_DIGEST_SIZE, 3140 }, 3141 .caam = { 3142 .class1_alg_type = OP_ALG_ALGSEL_AES | 3143 OP_ALG_AAI_CTR_MOD128, 3144 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3145 OP_ALG_AAI_HMAC_PRECOMP, 3146 .rfc3686 = true, 3147 }, 3148 }, 3149 { 3150 .aead = { 3151 .base = { 3152 .cra_name = "seqiv(authenc(hmac(sha384)," 3153 "rfc3686(ctr(aes))))", 3154 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3155 "rfc3686-ctr-aes-caam", 3156 .cra_blocksize = 1, 3157 }, 3158 .setkey = aead_setkey, 3159 .setauthsize = aead_setauthsize, 3160 .encrypt = aead_encrypt, 3161 .decrypt = aead_decrypt, 3162 .ivsize = CTR_RFC3686_IV_SIZE, 3163 .maxauthsize = SHA384_DIGEST_SIZE, 3164 }, 3165 .caam = { 3166 .class1_alg_type = OP_ALG_ALGSEL_AES | 3167 OP_ALG_AAI_CTR_MOD128, 3168 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3169 OP_ALG_AAI_HMAC_PRECOMP, 3170 .rfc3686 = true, 3171 .geniv = true, 3172 }, 3173 }, 3174 { 3175 .aead = { 3176 .base = { 3177 .cra_name = "authenc(hmac(sha512)," 3178 "rfc3686(ctr(aes)))", 3179 .cra_driver_name = "authenc-hmac-sha512-" 3180 "rfc3686-ctr-aes-caam", 3181 .cra_blocksize = 1, 3182 }, 3183 .setkey = aead_setkey, 3184 .setauthsize = aead_setauthsize, 3185 .encrypt = aead_encrypt, 3186 .decrypt = aead_decrypt, 3187 .ivsize = CTR_RFC3686_IV_SIZE, 3188 .maxauthsize = SHA512_DIGEST_SIZE, 3189 }, 3190 .caam = { 3191 .class1_alg_type = OP_ALG_ALGSEL_AES | 3192 OP_ALG_AAI_CTR_MOD128, 3193 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3194 OP_ALG_AAI_HMAC_PRECOMP, 3195 .rfc3686 = true, 3196 }, 3197 }, 3198 { 3199 .aead = { 3200 .base = { 3201 .cra_name = "seqiv(authenc(hmac(sha512)," 3202 "rfc3686(ctr(aes))))", 3203 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3204 "rfc3686-ctr-aes-caam", 3205 .cra_blocksize = 1, 3206 }, 3207 .setkey = aead_setkey, 3208 .setauthsize = aead_setauthsize, 3209 .encrypt = aead_encrypt, 3210 .decrypt = aead_decrypt, 3211 .ivsize = CTR_RFC3686_IV_SIZE, 3212 .maxauthsize = SHA512_DIGEST_SIZE, 3213 }, 3214 .caam = { 3215 .class1_alg_type = OP_ALG_ALGSEL_AES | 3216 OP_ALG_AAI_CTR_MOD128, 3217 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3218 OP_ALG_AAI_HMAC_PRECOMP, 3219 .rfc3686 = true, 3220 .geniv = true, 3221 }, 3222 }, 3223 }; 3224 3225 struct caam_crypto_alg { 3226 struct crypto_alg crypto_alg; 3227 struct list_head entry; 3228 struct caam_alg_entry caam; 3229 }; 3230 3231 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam) 3232 { 3233 dma_addr_t dma_addr; 3234 3235 ctx->jrdev = caam_jr_alloc(); 3236 if (IS_ERR(ctx->jrdev)) { 3237 pr_err("Job Ring Device allocation for transform failed\n"); 3238 return PTR_ERR(ctx->jrdev); 3239 } 3240 3241 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3242 offsetof(struct caam_ctx, 3243 sh_desc_enc_dma), 3244 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3245 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3246 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3247 caam_jr_free(ctx->jrdev); 3248 return -ENOMEM; 3249 } 3250 3251 ctx->sh_desc_enc_dma = dma_addr; 3252 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3253 sh_desc_dec); 3254 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx, 3255 sh_desc_givenc); 3256 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3257 3258 /* copy descriptor header template value */ 3259 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3260 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3261 3262 return 0; 3263 } 3264 3265 static int caam_cra_init(struct crypto_tfm *tfm) 3266 { 3267 struct crypto_alg *alg = tfm->__crt_alg; 3268 struct caam_crypto_alg *caam_alg = 3269 container_of(alg, struct caam_crypto_alg, crypto_alg); 3270 struct caam_ctx *ctx = crypto_tfm_ctx(tfm); 3271 3272 return caam_init_common(ctx, &caam_alg->caam); 3273 } 3274 3275 static int caam_aead_init(struct crypto_aead *tfm) 3276 { 3277 struct aead_alg *alg = crypto_aead_alg(tfm); 3278 struct caam_aead_alg *caam_alg = 3279 container_of(alg, struct caam_aead_alg, aead); 3280 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3281 3282 return caam_init_common(ctx, &caam_alg->caam); 3283 } 3284 3285 static void caam_exit_common(struct caam_ctx *ctx) 3286 { 3287 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3288 offsetof(struct caam_ctx, sh_desc_enc_dma), 3289 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC); 3290 caam_jr_free(ctx->jrdev); 3291 } 3292 3293 static void caam_cra_exit(struct crypto_tfm *tfm) 3294 { 3295 caam_exit_common(crypto_tfm_ctx(tfm)); 3296 } 3297 3298 static void caam_aead_exit(struct crypto_aead *tfm) 3299 { 3300 caam_exit_common(crypto_aead_ctx(tfm)); 3301 } 3302 3303 static void __exit caam_algapi_exit(void) 3304 { 3305 3306 struct caam_crypto_alg *t_alg, *n; 3307 int i; 3308 3309 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3310 struct caam_aead_alg *t_alg = driver_aeads + i; 3311 3312 if (t_alg->registered) 3313 crypto_unregister_aead(&t_alg->aead); 3314 } 3315 3316 if (!alg_list.next) 3317 return; 3318 3319 list_for_each_entry_safe(t_alg, n, &alg_list, entry) { 3320 crypto_unregister_alg(&t_alg->crypto_alg); 3321 list_del(&t_alg->entry); 3322 kfree(t_alg); 3323 } 3324 } 3325 3326 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template 3327 *template) 3328 { 3329 struct caam_crypto_alg *t_alg; 3330 struct crypto_alg *alg; 3331 3332 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 3333 if (!t_alg) { 3334 pr_err("failed to allocate t_alg\n"); 3335 return ERR_PTR(-ENOMEM); 3336 } 3337 3338 alg = &t_alg->crypto_alg; 3339 3340 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name); 3341 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 3342 template->driver_name); 3343 alg->cra_module = THIS_MODULE; 3344 alg->cra_init = caam_cra_init; 3345 alg->cra_exit = caam_cra_exit; 3346 alg->cra_priority = CAAM_CRA_PRIORITY; 3347 alg->cra_blocksize = template->blocksize; 3348 alg->cra_alignmask = 0; 3349 alg->cra_ctxsize = sizeof(struct caam_ctx); 3350 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY | 3351 template->type; 3352 switch (template->type) { 3353 case CRYPTO_ALG_TYPE_GIVCIPHER: 3354 alg->cra_type = &crypto_givcipher_type; 3355 alg->cra_ablkcipher = template->template_ablkcipher; 3356 break; 3357 case CRYPTO_ALG_TYPE_ABLKCIPHER: 3358 alg->cra_type = &crypto_ablkcipher_type; 3359 alg->cra_ablkcipher = template->template_ablkcipher; 3360 break; 3361 } 3362 3363 t_alg->caam.class1_alg_type = template->class1_alg_type; 3364 t_alg->caam.class2_alg_type = template->class2_alg_type; 3365 3366 return t_alg; 3367 } 3368 3369 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3370 { 3371 struct aead_alg *alg = &t_alg->aead; 3372 3373 alg->base.cra_module = THIS_MODULE; 3374 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3375 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3376 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3377 3378 alg->init = caam_aead_init; 3379 alg->exit = caam_aead_exit; 3380 } 3381 3382 static int __init caam_algapi_init(void) 3383 { 3384 struct device_node *dev_node; 3385 struct platform_device *pdev; 3386 struct device *ctrldev; 3387 struct caam_drv_private *priv; 3388 int i = 0, err = 0; 3389 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst; 3390 unsigned int md_limit = SHA512_DIGEST_SIZE; 3391 bool registered = false; 3392 3393 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3394 if (!dev_node) { 3395 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3396 if (!dev_node) 3397 return -ENODEV; 3398 } 3399 3400 pdev = of_find_device_by_node(dev_node); 3401 if (!pdev) { 3402 of_node_put(dev_node); 3403 return -ENODEV; 3404 } 3405 3406 ctrldev = &pdev->dev; 3407 priv = dev_get_drvdata(ctrldev); 3408 of_node_put(dev_node); 3409 3410 /* 3411 * If priv is NULL, it's probably because the caam driver wasn't 3412 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3413 */ 3414 if (!priv) 3415 return -ENODEV; 3416 3417 3418 INIT_LIST_HEAD(&alg_list); 3419 3420 /* 3421 * Register crypto algorithms the device supports. 3422 * First, detect presence and attributes of DES, AES, and MD blocks. 3423 */ 3424 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3425 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3426 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT; 3427 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT; 3428 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3429 3430 /* If MD is present, limit digest size based on LP256 */ 3431 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256)) 3432 md_limit = SHA256_DIGEST_SIZE; 3433 3434 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3435 struct caam_crypto_alg *t_alg; 3436 struct caam_alg_template *alg = driver_algs + i; 3437 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK; 3438 3439 /* Skip DES algorithms if not supported by device */ 3440 if (!des_inst && 3441 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3442 (alg_sel == OP_ALG_ALGSEL_DES))) 3443 continue; 3444 3445 /* Skip AES algorithms if not supported by device */ 3446 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3447 continue; 3448 3449 /* 3450 * Check support for AES modes not available 3451 * on LP devices. 3452 */ 3453 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3454 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) == 3455 OP_ALG_AAI_XTS) 3456 continue; 3457 3458 t_alg = caam_alg_alloc(alg); 3459 if (IS_ERR(t_alg)) { 3460 err = PTR_ERR(t_alg); 3461 pr_warn("%s alg allocation failed\n", alg->driver_name); 3462 continue; 3463 } 3464 3465 err = crypto_register_alg(&t_alg->crypto_alg); 3466 if (err) { 3467 pr_warn("%s alg registration failed\n", 3468 t_alg->crypto_alg.cra_driver_name); 3469 kfree(t_alg); 3470 continue; 3471 } 3472 3473 list_add_tail(&t_alg->entry, &alg_list); 3474 registered = true; 3475 } 3476 3477 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3478 struct caam_aead_alg *t_alg = driver_aeads + i; 3479 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3480 OP_ALG_ALGSEL_MASK; 3481 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3482 OP_ALG_ALGSEL_MASK; 3483 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3484 3485 /* Skip DES algorithms if not supported by device */ 3486 if (!des_inst && 3487 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3488 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3489 continue; 3490 3491 /* Skip AES algorithms if not supported by device */ 3492 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3493 continue; 3494 3495 /* 3496 * Check support for AES algorithms not available 3497 * on LP devices. 3498 */ 3499 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3500 if (alg_aai == OP_ALG_AAI_GCM) 3501 continue; 3502 3503 /* 3504 * Skip algorithms requiring message digests 3505 * if MD or MD size is not supported by device. 3506 */ 3507 if (c2_alg_sel && 3508 (!md_inst || (t_alg->aead.maxauthsize > md_limit))) 3509 continue; 3510 3511 caam_aead_alg_init(t_alg); 3512 3513 err = crypto_register_aead(&t_alg->aead); 3514 if (err) { 3515 pr_warn("%s alg registration failed\n", 3516 t_alg->aead.base.cra_driver_name); 3517 continue; 3518 } 3519 3520 t_alg->registered = true; 3521 registered = true; 3522 } 3523 3524 if (registered) 3525 pr_info("caam algorithms registered in /proc/crypto\n"); 3526 3527 return err; 3528 } 3529 3530 module_init(caam_algapi_init); 3531 module_exit(caam_algapi_exit); 3532 3533 MODULE_LICENSE("GPL"); 3534 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3535 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3536