1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 #include <crypto/engine.h> 60 #include <crypto/xts.h> 61 #include <asm/unaligned.h> 62 #include <linux/dma-mapping.h> 63 #include <linux/kernel.h> 64 65 /* 66 * crypto alg 67 */ 68 #define CAAM_CRA_PRIORITY 3000 69 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 70 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 71 CTR_RFC3686_NONCE_SIZE + \ 72 SHA512_DIGEST_SIZE * 2) 73 74 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 75 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 76 CAAM_CMD_SZ * 4) 77 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 78 CAAM_CMD_SZ * 5) 79 80 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 81 82 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN) 83 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 84 85 struct caam_alg_entry { 86 int class1_alg_type; 87 int class2_alg_type; 88 bool rfc3686; 89 bool geniv; 90 bool nodkp; 91 }; 92 93 struct caam_aead_alg { 94 struct aead_alg aead; 95 struct caam_alg_entry caam; 96 bool registered; 97 }; 98 99 struct caam_skcipher_alg { 100 struct skcipher_alg skcipher; 101 struct caam_alg_entry caam; 102 bool registered; 103 }; 104 105 /* 106 * per-session context 107 */ 108 struct caam_ctx { 109 struct crypto_engine_ctx enginectx; 110 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 111 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 112 u8 key[CAAM_MAX_KEY_SIZE]; 113 dma_addr_t sh_desc_enc_dma; 114 dma_addr_t sh_desc_dec_dma; 115 dma_addr_t key_dma; 116 enum dma_data_direction dir; 117 struct device *jrdev; 118 struct alginfo adata; 119 struct alginfo cdata; 120 unsigned int authsize; 121 bool xts_key_fallback; 122 struct crypto_skcipher *fallback; 123 }; 124 125 struct caam_skcipher_req_ctx { 126 struct skcipher_edesc *edesc; 127 struct skcipher_request fallback_req; 128 }; 129 130 struct caam_aead_req_ctx { 131 struct aead_edesc *edesc; 132 }; 133 134 static int aead_null_set_sh_desc(struct crypto_aead *aead) 135 { 136 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 137 struct device *jrdev = ctx->jrdev; 138 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 139 u32 *desc; 140 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 141 ctx->adata.keylen_pad; 142 143 /* 144 * Job Descriptor and Shared Descriptors 145 * must all fit into the 64-word Descriptor h/w Buffer 146 */ 147 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 148 ctx->adata.key_inline = true; 149 ctx->adata.key_virt = ctx->key; 150 } else { 151 ctx->adata.key_inline = false; 152 ctx->adata.key_dma = ctx->key_dma; 153 } 154 155 /* aead_encrypt shared descriptor */ 156 desc = ctx->sh_desc_enc; 157 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 158 ctrlpriv->era); 159 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 160 desc_bytes(desc), ctx->dir); 161 162 /* 163 * Job Descriptor and Shared Descriptors 164 * must all fit into the 64-word Descriptor h/w Buffer 165 */ 166 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 167 ctx->adata.key_inline = true; 168 ctx->adata.key_virt = ctx->key; 169 } else { 170 ctx->adata.key_inline = false; 171 ctx->adata.key_dma = ctx->key_dma; 172 } 173 174 /* aead_decrypt shared descriptor */ 175 desc = ctx->sh_desc_dec; 176 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 177 ctrlpriv->era); 178 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 179 desc_bytes(desc), ctx->dir); 180 181 return 0; 182 } 183 184 static int aead_set_sh_desc(struct crypto_aead *aead) 185 { 186 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 187 struct caam_aead_alg, aead); 188 unsigned int ivsize = crypto_aead_ivsize(aead); 189 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 190 struct device *jrdev = ctx->jrdev; 191 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 192 u32 ctx1_iv_off = 0; 193 u32 *desc, *nonce = NULL; 194 u32 inl_mask; 195 unsigned int data_len[2]; 196 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 197 OP_ALG_AAI_CTR_MOD128); 198 const bool is_rfc3686 = alg->caam.rfc3686; 199 200 if (!ctx->authsize) 201 return 0; 202 203 /* NULL encryption / decryption */ 204 if (!ctx->cdata.keylen) 205 return aead_null_set_sh_desc(aead); 206 207 /* 208 * AES-CTR needs to load IV in CONTEXT1 reg 209 * at an offset of 128bits (16bytes) 210 * CONTEXT1[255:128] = IV 211 */ 212 if (ctr_mode) 213 ctx1_iv_off = 16; 214 215 /* 216 * RFC3686 specific: 217 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 218 */ 219 if (is_rfc3686) { 220 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 221 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 222 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 223 } 224 225 /* 226 * In case |user key| > |derived key|, using DKP<imm,imm> 227 * would result in invalid opcodes (last bytes of user key) in 228 * the resulting descriptor. Use DKP<ptr,imm> instead => both 229 * virtual and dma key addresses are needed. 230 */ 231 ctx->adata.key_virt = ctx->key; 232 ctx->adata.key_dma = ctx->key_dma; 233 234 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 235 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 236 237 data_len[0] = ctx->adata.keylen_pad; 238 data_len[1] = ctx->cdata.keylen; 239 240 if (alg->caam.geniv) 241 goto skip_enc; 242 243 /* 244 * Job Descriptor and Shared Descriptors 245 * must all fit into the 64-word Descriptor h/w Buffer 246 */ 247 if (desc_inline_query(DESC_AEAD_ENC_LEN + 248 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 249 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 250 ARRAY_SIZE(data_len)) < 0) 251 return -EINVAL; 252 253 ctx->adata.key_inline = !!(inl_mask & 1); 254 ctx->cdata.key_inline = !!(inl_mask & 2); 255 256 /* aead_encrypt shared descriptor */ 257 desc = ctx->sh_desc_enc; 258 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 259 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 260 false, ctrlpriv->era); 261 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 262 desc_bytes(desc), ctx->dir); 263 264 skip_enc: 265 /* 266 * Job Descriptor and Shared Descriptors 267 * must all fit into the 64-word Descriptor h/w Buffer 268 */ 269 if (desc_inline_query(DESC_AEAD_DEC_LEN + 270 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 271 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 272 ARRAY_SIZE(data_len)) < 0) 273 return -EINVAL; 274 275 ctx->adata.key_inline = !!(inl_mask & 1); 276 ctx->cdata.key_inline = !!(inl_mask & 2); 277 278 /* aead_decrypt shared descriptor */ 279 desc = ctx->sh_desc_dec; 280 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 281 ctx->authsize, alg->caam.geniv, is_rfc3686, 282 nonce, ctx1_iv_off, false, ctrlpriv->era); 283 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 284 desc_bytes(desc), ctx->dir); 285 286 if (!alg->caam.geniv) 287 goto skip_givenc; 288 289 /* 290 * Job Descriptor and Shared Descriptors 291 * must all fit into the 64-word Descriptor h/w Buffer 292 */ 293 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 294 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 295 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 296 ARRAY_SIZE(data_len)) < 0) 297 return -EINVAL; 298 299 ctx->adata.key_inline = !!(inl_mask & 1); 300 ctx->cdata.key_inline = !!(inl_mask & 2); 301 302 /* aead_givencrypt shared descriptor */ 303 desc = ctx->sh_desc_enc; 304 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 305 ctx->authsize, is_rfc3686, nonce, 306 ctx1_iv_off, false, ctrlpriv->era); 307 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 308 desc_bytes(desc), ctx->dir); 309 310 skip_givenc: 311 return 0; 312 } 313 314 static int aead_setauthsize(struct crypto_aead *authenc, 315 unsigned int authsize) 316 { 317 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 318 319 ctx->authsize = authsize; 320 aead_set_sh_desc(authenc); 321 322 return 0; 323 } 324 325 static int gcm_set_sh_desc(struct crypto_aead *aead) 326 { 327 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 328 struct device *jrdev = ctx->jrdev; 329 unsigned int ivsize = crypto_aead_ivsize(aead); 330 u32 *desc; 331 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 332 ctx->cdata.keylen; 333 334 if (!ctx->cdata.keylen || !ctx->authsize) 335 return 0; 336 337 /* 338 * AES GCM encrypt shared descriptor 339 * Job Descriptor and Shared Descriptor 340 * must fit into the 64-word Descriptor h/w Buffer 341 */ 342 if (rem_bytes >= DESC_GCM_ENC_LEN) { 343 ctx->cdata.key_inline = true; 344 ctx->cdata.key_virt = ctx->key; 345 } else { 346 ctx->cdata.key_inline = false; 347 ctx->cdata.key_dma = ctx->key_dma; 348 } 349 350 desc = ctx->sh_desc_enc; 351 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 352 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 353 desc_bytes(desc), ctx->dir); 354 355 /* 356 * Job Descriptor and Shared Descriptors 357 * must all fit into the 64-word Descriptor h/w Buffer 358 */ 359 if (rem_bytes >= DESC_GCM_DEC_LEN) { 360 ctx->cdata.key_inline = true; 361 ctx->cdata.key_virt = ctx->key; 362 } else { 363 ctx->cdata.key_inline = false; 364 ctx->cdata.key_dma = ctx->key_dma; 365 } 366 367 desc = ctx->sh_desc_dec; 368 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 369 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 370 desc_bytes(desc), ctx->dir); 371 372 return 0; 373 } 374 375 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 376 { 377 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 378 int err; 379 380 err = crypto_gcm_check_authsize(authsize); 381 if (err) 382 return err; 383 384 ctx->authsize = authsize; 385 gcm_set_sh_desc(authenc); 386 387 return 0; 388 } 389 390 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 391 { 392 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 393 struct device *jrdev = ctx->jrdev; 394 unsigned int ivsize = crypto_aead_ivsize(aead); 395 u32 *desc; 396 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 397 ctx->cdata.keylen; 398 399 if (!ctx->cdata.keylen || !ctx->authsize) 400 return 0; 401 402 /* 403 * RFC4106 encrypt shared descriptor 404 * Job Descriptor and Shared Descriptor 405 * must fit into the 64-word Descriptor h/w Buffer 406 */ 407 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 408 ctx->cdata.key_inline = true; 409 ctx->cdata.key_virt = ctx->key; 410 } else { 411 ctx->cdata.key_inline = false; 412 ctx->cdata.key_dma = ctx->key_dma; 413 } 414 415 desc = ctx->sh_desc_enc; 416 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 417 false); 418 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 419 desc_bytes(desc), ctx->dir); 420 421 /* 422 * Job Descriptor and Shared Descriptors 423 * must all fit into the 64-word Descriptor h/w Buffer 424 */ 425 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 426 ctx->cdata.key_inline = true; 427 ctx->cdata.key_virt = ctx->key; 428 } else { 429 ctx->cdata.key_inline = false; 430 ctx->cdata.key_dma = ctx->key_dma; 431 } 432 433 desc = ctx->sh_desc_dec; 434 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 435 false); 436 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 437 desc_bytes(desc), ctx->dir); 438 439 return 0; 440 } 441 442 static int rfc4106_setauthsize(struct crypto_aead *authenc, 443 unsigned int authsize) 444 { 445 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 446 int err; 447 448 err = crypto_rfc4106_check_authsize(authsize); 449 if (err) 450 return err; 451 452 ctx->authsize = authsize; 453 rfc4106_set_sh_desc(authenc); 454 455 return 0; 456 } 457 458 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 459 { 460 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 461 struct device *jrdev = ctx->jrdev; 462 unsigned int ivsize = crypto_aead_ivsize(aead); 463 u32 *desc; 464 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 465 ctx->cdata.keylen; 466 467 if (!ctx->cdata.keylen || !ctx->authsize) 468 return 0; 469 470 /* 471 * RFC4543 encrypt shared descriptor 472 * Job Descriptor and Shared Descriptor 473 * must fit into the 64-word Descriptor h/w Buffer 474 */ 475 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 476 ctx->cdata.key_inline = true; 477 ctx->cdata.key_virt = ctx->key; 478 } else { 479 ctx->cdata.key_inline = false; 480 ctx->cdata.key_dma = ctx->key_dma; 481 } 482 483 desc = ctx->sh_desc_enc; 484 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 485 false); 486 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 487 desc_bytes(desc), ctx->dir); 488 489 /* 490 * Job Descriptor and Shared Descriptors 491 * must all fit into the 64-word Descriptor h/w Buffer 492 */ 493 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 494 ctx->cdata.key_inline = true; 495 ctx->cdata.key_virt = ctx->key; 496 } else { 497 ctx->cdata.key_inline = false; 498 ctx->cdata.key_dma = ctx->key_dma; 499 } 500 501 desc = ctx->sh_desc_dec; 502 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 503 false); 504 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 505 desc_bytes(desc), ctx->dir); 506 507 return 0; 508 } 509 510 static int rfc4543_setauthsize(struct crypto_aead *authenc, 511 unsigned int authsize) 512 { 513 struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc); 514 515 if (authsize != 16) 516 return -EINVAL; 517 518 ctx->authsize = authsize; 519 rfc4543_set_sh_desc(authenc); 520 521 return 0; 522 } 523 524 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 525 { 526 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 527 struct device *jrdev = ctx->jrdev; 528 unsigned int ivsize = crypto_aead_ivsize(aead); 529 u32 *desc; 530 531 if (!ctx->cdata.keylen || !ctx->authsize) 532 return 0; 533 534 desc = ctx->sh_desc_enc; 535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 536 ctx->authsize, true, false); 537 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 538 desc_bytes(desc), ctx->dir); 539 540 desc = ctx->sh_desc_dec; 541 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 542 ctx->authsize, false, false); 543 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 544 desc_bytes(desc), ctx->dir); 545 546 return 0; 547 } 548 549 static int chachapoly_setauthsize(struct crypto_aead *aead, 550 unsigned int authsize) 551 { 552 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 553 554 if (authsize != POLY1305_DIGEST_SIZE) 555 return -EINVAL; 556 557 ctx->authsize = authsize; 558 return chachapoly_set_sh_desc(aead); 559 } 560 561 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 562 unsigned int keylen) 563 { 564 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 565 unsigned int ivsize = crypto_aead_ivsize(aead); 566 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 567 568 if (keylen != CHACHA_KEY_SIZE + saltlen) 569 return -EINVAL; 570 571 ctx->cdata.key_virt = key; 572 ctx->cdata.keylen = keylen - saltlen; 573 574 return chachapoly_set_sh_desc(aead); 575 } 576 577 static int aead_setkey(struct crypto_aead *aead, 578 const u8 *key, unsigned int keylen) 579 { 580 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 581 struct device *jrdev = ctx->jrdev; 582 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 583 struct crypto_authenc_keys keys; 584 int ret = 0; 585 586 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 587 goto badkey; 588 589 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", 590 keys.authkeylen + keys.enckeylen, keys.enckeylen, 591 keys.authkeylen); 592 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 593 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 594 595 /* 596 * If DKP is supported, use it in the shared descriptor to generate 597 * the split key. 598 */ 599 if (ctrlpriv->era >= 6) { 600 ctx->adata.keylen = keys.authkeylen; 601 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 602 OP_ALG_ALGSEL_MASK); 603 604 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 605 goto badkey; 606 607 memcpy(ctx->key, keys.authkey, keys.authkeylen); 608 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 609 keys.enckeylen); 610 dma_sync_single_for_device(jrdev, ctx->key_dma, 611 ctx->adata.keylen_pad + 612 keys.enckeylen, ctx->dir); 613 goto skip_split_key; 614 } 615 616 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 617 keys.authkeylen, CAAM_MAX_KEY_SIZE - 618 keys.enckeylen); 619 if (ret) { 620 goto badkey; 621 } 622 623 /* postpend encryption key to auth split key */ 624 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 625 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 626 keys.enckeylen, ctx->dir); 627 628 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ", 629 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 630 ctx->adata.keylen_pad + keys.enckeylen, 1); 631 632 skip_split_key: 633 ctx->cdata.keylen = keys.enckeylen; 634 memzero_explicit(&keys, sizeof(keys)); 635 return aead_set_sh_desc(aead); 636 badkey: 637 memzero_explicit(&keys, sizeof(keys)); 638 return -EINVAL; 639 } 640 641 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 642 unsigned int keylen) 643 { 644 struct crypto_authenc_keys keys; 645 int err; 646 647 err = crypto_authenc_extractkeys(&keys, key, keylen); 648 if (unlikely(err)) 649 return err; 650 651 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?: 652 aead_setkey(aead, key, keylen); 653 654 memzero_explicit(&keys, sizeof(keys)); 655 return err; 656 } 657 658 static int gcm_setkey(struct crypto_aead *aead, 659 const u8 *key, unsigned int keylen) 660 { 661 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 662 struct device *jrdev = ctx->jrdev; 663 int err; 664 665 err = aes_check_keylen(keylen); 666 if (err) 667 return err; 668 669 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 670 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 671 672 memcpy(ctx->key, key, keylen); 673 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 674 ctx->cdata.keylen = keylen; 675 676 return gcm_set_sh_desc(aead); 677 } 678 679 static int rfc4106_setkey(struct crypto_aead *aead, 680 const u8 *key, unsigned int keylen) 681 { 682 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 683 struct device *jrdev = ctx->jrdev; 684 int err; 685 686 err = aes_check_keylen(keylen - 4); 687 if (err) 688 return err; 689 690 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 691 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 692 693 memcpy(ctx->key, key, keylen); 694 695 /* 696 * The last four bytes of the key material are used as the salt value 697 * in the nonce. Update the AES key length. 698 */ 699 ctx->cdata.keylen = keylen - 4; 700 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 701 ctx->dir); 702 return rfc4106_set_sh_desc(aead); 703 } 704 705 static int rfc4543_setkey(struct crypto_aead *aead, 706 const u8 *key, unsigned int keylen) 707 { 708 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 709 struct device *jrdev = ctx->jrdev; 710 int err; 711 712 err = aes_check_keylen(keylen - 4); 713 if (err) 714 return err; 715 716 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 717 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 718 719 memcpy(ctx->key, key, keylen); 720 721 /* 722 * The last four bytes of the key material are used as the salt value 723 * in the nonce. Update the AES key length. 724 */ 725 ctx->cdata.keylen = keylen - 4; 726 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 727 ctx->dir); 728 return rfc4543_set_sh_desc(aead); 729 } 730 731 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 732 unsigned int keylen, const u32 ctx1_iv_off) 733 { 734 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 735 struct caam_skcipher_alg *alg = 736 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 737 skcipher); 738 struct device *jrdev = ctx->jrdev; 739 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 740 u32 *desc; 741 const bool is_rfc3686 = alg->caam.rfc3686; 742 743 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 744 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 745 746 ctx->cdata.keylen = keylen; 747 ctx->cdata.key_virt = key; 748 ctx->cdata.key_inline = true; 749 750 /* skcipher_encrypt shared descriptor */ 751 desc = ctx->sh_desc_enc; 752 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 753 ctx1_iv_off); 754 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 755 desc_bytes(desc), ctx->dir); 756 757 /* skcipher_decrypt shared descriptor */ 758 desc = ctx->sh_desc_dec; 759 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 760 ctx1_iv_off); 761 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 762 desc_bytes(desc), ctx->dir); 763 764 return 0; 765 } 766 767 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher, 768 const u8 *key, unsigned int keylen) 769 { 770 int err; 771 772 err = aes_check_keylen(keylen); 773 if (err) 774 return err; 775 776 return skcipher_setkey(skcipher, key, keylen, 0); 777 } 778 779 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher, 780 const u8 *key, unsigned int keylen) 781 { 782 u32 ctx1_iv_off; 783 int err; 784 785 /* 786 * RFC3686 specific: 787 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 788 * | *key = {KEY, NONCE} 789 */ 790 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 791 keylen -= CTR_RFC3686_NONCE_SIZE; 792 793 err = aes_check_keylen(keylen); 794 if (err) 795 return err; 796 797 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 798 } 799 800 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher, 801 const u8 *key, unsigned int keylen) 802 { 803 u32 ctx1_iv_off; 804 int err; 805 806 /* 807 * AES-CTR needs to load IV in CONTEXT1 reg 808 * at an offset of 128bits (16bytes) 809 * CONTEXT1[255:128] = IV 810 */ 811 ctx1_iv_off = 16; 812 813 err = aes_check_keylen(keylen); 814 if (err) 815 return err; 816 817 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 818 } 819 820 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 821 const u8 *key, unsigned int keylen) 822 { 823 return verify_skcipher_des_key(skcipher, key) ?: 824 skcipher_setkey(skcipher, key, keylen, 0); 825 } 826 827 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher, 828 const u8 *key, unsigned int keylen) 829 { 830 return verify_skcipher_des3_key(skcipher, key) ?: 831 skcipher_setkey(skcipher, key, keylen, 0); 832 } 833 834 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 835 unsigned int keylen) 836 { 837 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 838 struct device *jrdev = ctx->jrdev; 839 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 840 u32 *desc; 841 int err; 842 843 err = xts_verify_key(skcipher, key, keylen); 844 if (err) { 845 dev_dbg(jrdev, "key size mismatch\n"); 846 return err; 847 } 848 849 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256) 850 ctx->xts_key_fallback = true; 851 852 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) { 853 err = crypto_skcipher_setkey(ctx->fallback, key, keylen); 854 if (err) 855 return err; 856 } 857 858 ctx->cdata.keylen = keylen; 859 ctx->cdata.key_virt = key; 860 ctx->cdata.key_inline = true; 861 862 /* xts_skcipher_encrypt shared descriptor */ 863 desc = ctx->sh_desc_enc; 864 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 865 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 866 desc_bytes(desc), ctx->dir); 867 868 /* xts_skcipher_decrypt shared descriptor */ 869 desc = ctx->sh_desc_dec; 870 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 871 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 872 desc_bytes(desc), ctx->dir); 873 874 return 0; 875 } 876 877 /* 878 * aead_edesc - s/w-extended aead descriptor 879 * @src_nents: number of segments in input s/w scatterlist 880 * @dst_nents: number of segments in output s/w scatterlist 881 * @mapped_src_nents: number of segments in input h/w link table 882 * @mapped_dst_nents: number of segments in output h/w link table 883 * @sec4_sg_bytes: length of dma mapped sec4_sg space 884 * @bklog: stored to determine if the request needs backlog 885 * @sec4_sg_dma: bus physical mapped address of h/w link table 886 * @sec4_sg: pointer to h/w link table 887 * @hw_desc: the h/w job descriptor followed by any referenced link tables 888 */ 889 struct aead_edesc { 890 int src_nents; 891 int dst_nents; 892 int mapped_src_nents; 893 int mapped_dst_nents; 894 int sec4_sg_bytes; 895 bool bklog; 896 dma_addr_t sec4_sg_dma; 897 struct sec4_sg_entry *sec4_sg; 898 u32 hw_desc[]; 899 }; 900 901 /* 902 * skcipher_edesc - s/w-extended skcipher descriptor 903 * @src_nents: number of segments in input s/w scatterlist 904 * @dst_nents: number of segments in output s/w scatterlist 905 * @mapped_src_nents: number of segments in input h/w link table 906 * @mapped_dst_nents: number of segments in output h/w link table 907 * @iv_dma: dma address of iv for checking continuity and link table 908 * @sec4_sg_bytes: length of dma mapped sec4_sg space 909 * @bklog: stored to determine if the request needs backlog 910 * @sec4_sg_dma: bus physical mapped address of h/w link table 911 * @sec4_sg: pointer to h/w link table 912 * @hw_desc: the h/w job descriptor followed by any referenced link tables 913 * and IV 914 */ 915 struct skcipher_edesc { 916 int src_nents; 917 int dst_nents; 918 int mapped_src_nents; 919 int mapped_dst_nents; 920 dma_addr_t iv_dma; 921 int sec4_sg_bytes; 922 bool bklog; 923 dma_addr_t sec4_sg_dma; 924 struct sec4_sg_entry *sec4_sg; 925 u32 hw_desc[]; 926 }; 927 928 static void caam_unmap(struct device *dev, struct scatterlist *src, 929 struct scatterlist *dst, int src_nents, 930 int dst_nents, 931 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 932 int sec4_sg_bytes) 933 { 934 if (dst != src) { 935 if (src_nents) 936 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 937 if (dst_nents) 938 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 939 } else { 940 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 941 } 942 943 if (iv_dma) 944 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); 945 if (sec4_sg_bytes) 946 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 947 DMA_TO_DEVICE); 948 } 949 950 static void aead_unmap(struct device *dev, 951 struct aead_edesc *edesc, 952 struct aead_request *req) 953 { 954 caam_unmap(dev, req->src, req->dst, 955 edesc->src_nents, edesc->dst_nents, 0, 0, 956 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 957 } 958 959 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 960 struct skcipher_request *req) 961 { 962 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 963 int ivsize = crypto_skcipher_ivsize(skcipher); 964 965 caam_unmap(dev, req->src, req->dst, 966 edesc->src_nents, edesc->dst_nents, 967 edesc->iv_dma, ivsize, 968 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 969 } 970 971 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err, 972 void *context) 973 { 974 struct aead_request *req = context; 975 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 976 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 977 struct aead_edesc *edesc; 978 int ecode = 0; 979 bool has_bklog; 980 981 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 982 983 edesc = rctx->edesc; 984 has_bklog = edesc->bklog; 985 986 if (err) 987 ecode = caam_jr_strstatus(jrdev, err); 988 989 aead_unmap(jrdev, edesc, req); 990 991 kfree(edesc); 992 993 /* 994 * If no backlog flag, the completion of the request is done 995 * by CAAM, not crypto engine. 996 */ 997 if (!has_bklog) 998 aead_request_complete(req, ecode); 999 else 1000 crypto_finalize_aead_request(jrp->engine, req, ecode); 1001 } 1002 1003 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, 1004 void *context) 1005 { 1006 struct skcipher_request *req = context; 1007 struct skcipher_edesc *edesc; 1008 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1009 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1010 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 1011 int ivsize = crypto_skcipher_ivsize(skcipher); 1012 int ecode = 0; 1013 bool has_bklog; 1014 1015 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1016 1017 edesc = rctx->edesc; 1018 has_bklog = edesc->bklog; 1019 if (err) 1020 ecode = caam_jr_strstatus(jrdev, err); 1021 1022 skcipher_unmap(jrdev, edesc, req); 1023 1024 /* 1025 * The crypto API expects us to set the IV (req->iv) to the last 1026 * ciphertext block (CBC mode) or last counter (CTR mode). 1027 * This is used e.g. by the CTS mode. 1028 */ 1029 if (ivsize && !ecode) { 1030 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1031 ivsize); 1032 1033 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ", 1034 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1035 ivsize, 1); 1036 } 1037 1038 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1039 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1040 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1041 1042 kfree(edesc); 1043 1044 /* 1045 * If no backlog flag, the completion of the request is done 1046 * by CAAM, not crypto engine. 1047 */ 1048 if (!has_bklog) 1049 skcipher_request_complete(req, ecode); 1050 else 1051 crypto_finalize_skcipher_request(jrp->engine, req, ecode); 1052 } 1053 1054 /* 1055 * Fill in aead job descriptor 1056 */ 1057 static void init_aead_job(struct aead_request *req, 1058 struct aead_edesc *edesc, 1059 bool all_contig, bool encrypt) 1060 { 1061 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1062 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1063 int authsize = ctx->authsize; 1064 u32 *desc = edesc->hw_desc; 1065 u32 out_options, in_options; 1066 dma_addr_t dst_dma, src_dma; 1067 int len, sec4_sg_index = 0; 1068 dma_addr_t ptr; 1069 u32 *sh_desc; 1070 1071 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1072 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1073 1074 len = desc_len(sh_desc); 1075 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1076 1077 if (all_contig) { 1078 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1079 0; 1080 in_options = 0; 1081 } else { 1082 src_dma = edesc->sec4_sg_dma; 1083 sec4_sg_index += edesc->mapped_src_nents; 1084 in_options = LDST_SGF; 1085 } 1086 1087 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1088 in_options); 1089 1090 dst_dma = src_dma; 1091 out_options = in_options; 1092 1093 if (unlikely(req->src != req->dst)) { 1094 if (!edesc->mapped_dst_nents) { 1095 dst_dma = 0; 1096 out_options = 0; 1097 } else if (edesc->mapped_dst_nents == 1) { 1098 dst_dma = sg_dma_address(req->dst); 1099 out_options = 0; 1100 } else { 1101 dst_dma = edesc->sec4_sg_dma + 1102 sec4_sg_index * 1103 sizeof(struct sec4_sg_entry); 1104 out_options = LDST_SGF; 1105 } 1106 } 1107 1108 if (encrypt) 1109 append_seq_out_ptr(desc, dst_dma, 1110 req->assoclen + req->cryptlen + authsize, 1111 out_options); 1112 else 1113 append_seq_out_ptr(desc, dst_dma, 1114 req->assoclen + req->cryptlen - authsize, 1115 out_options); 1116 } 1117 1118 static void init_gcm_job(struct aead_request *req, 1119 struct aead_edesc *edesc, 1120 bool all_contig, bool encrypt) 1121 { 1122 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1123 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1124 unsigned int ivsize = crypto_aead_ivsize(aead); 1125 u32 *desc = edesc->hw_desc; 1126 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1127 unsigned int last; 1128 1129 init_aead_job(req, edesc, all_contig, encrypt); 1130 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1131 1132 /* BUG This should not be specific to generic GCM. */ 1133 last = 0; 1134 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1135 last = FIFOLD_TYPE_LAST1; 1136 1137 /* Read GCM IV */ 1138 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1139 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1140 /* Append Salt */ 1141 if (!generic_gcm) 1142 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1143 /* Append IV */ 1144 append_data(desc, req->iv, ivsize); 1145 /* End of blank commands */ 1146 } 1147 1148 static void init_chachapoly_job(struct aead_request *req, 1149 struct aead_edesc *edesc, bool all_contig, 1150 bool encrypt) 1151 { 1152 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1153 unsigned int ivsize = crypto_aead_ivsize(aead); 1154 unsigned int assoclen = req->assoclen; 1155 u32 *desc = edesc->hw_desc; 1156 u32 ctx_iv_off = 4; 1157 1158 init_aead_job(req, edesc, all_contig, encrypt); 1159 1160 if (ivsize != CHACHAPOLY_IV_SIZE) { 1161 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1162 ctx_iv_off += 4; 1163 1164 /* 1165 * The associated data comes already with the IV but we need 1166 * to skip it when we authenticate or encrypt... 1167 */ 1168 assoclen -= ivsize; 1169 } 1170 1171 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1172 1173 /* 1174 * For IPsec load the IV further in the same register. 1175 * For RFC7539 simply load the 12 bytes nonce in a single operation 1176 */ 1177 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1178 LDST_SRCDST_BYTE_CONTEXT | 1179 ctx_iv_off << LDST_OFFSET_SHIFT); 1180 } 1181 1182 static void init_authenc_job(struct aead_request *req, 1183 struct aead_edesc *edesc, 1184 bool all_contig, bool encrypt) 1185 { 1186 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1187 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1188 struct caam_aead_alg, aead); 1189 unsigned int ivsize = crypto_aead_ivsize(aead); 1190 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1191 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1192 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1193 OP_ALG_AAI_CTR_MOD128); 1194 const bool is_rfc3686 = alg->caam.rfc3686; 1195 u32 *desc = edesc->hw_desc; 1196 u32 ivoffset = 0; 1197 1198 /* 1199 * AES-CTR needs to load IV in CONTEXT1 reg 1200 * at an offset of 128bits (16bytes) 1201 * CONTEXT1[255:128] = IV 1202 */ 1203 if (ctr_mode) 1204 ivoffset = 16; 1205 1206 /* 1207 * RFC3686 specific: 1208 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1209 */ 1210 if (is_rfc3686) 1211 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1212 1213 init_aead_job(req, edesc, all_contig, encrypt); 1214 1215 /* 1216 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1217 * having DPOVRD as destination. 1218 */ 1219 if (ctrlpriv->era < 3) 1220 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1221 else 1222 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1223 1224 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1225 append_load_as_imm(desc, req->iv, ivsize, 1226 LDST_CLASS_1_CCB | 1227 LDST_SRCDST_BYTE_CONTEXT | 1228 (ivoffset << LDST_OFFSET_SHIFT)); 1229 } 1230 1231 /* 1232 * Fill in skcipher job descriptor 1233 */ 1234 static void init_skcipher_job(struct skcipher_request *req, 1235 struct skcipher_edesc *edesc, 1236 const bool encrypt) 1237 { 1238 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1239 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1240 struct device *jrdev = ctx->jrdev; 1241 int ivsize = crypto_skcipher_ivsize(skcipher); 1242 u32 *desc = edesc->hw_desc; 1243 u32 *sh_desc; 1244 u32 in_options = 0, out_options = 0; 1245 dma_addr_t src_dma, dst_dma, ptr; 1246 int len, sec4_sg_index = 0; 1247 1248 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ", 1249 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1250 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", 1251 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1252 1253 caam_dump_sg("src @" __stringify(__LINE__)": ", 1254 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1255 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1256 1257 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1258 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1259 1260 len = desc_len(sh_desc); 1261 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1262 1263 if (ivsize || edesc->mapped_src_nents > 1) { 1264 src_dma = edesc->sec4_sg_dma; 1265 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1266 in_options = LDST_SGF; 1267 } else { 1268 src_dma = sg_dma_address(req->src); 1269 } 1270 1271 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1272 1273 if (likely(req->src == req->dst)) { 1274 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1275 out_options = in_options; 1276 } else if (!ivsize && edesc->mapped_dst_nents == 1) { 1277 dst_dma = sg_dma_address(req->dst); 1278 } else { 1279 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1280 sizeof(struct sec4_sg_entry); 1281 out_options = LDST_SGF; 1282 } 1283 1284 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); 1285 } 1286 1287 /* 1288 * allocate and map the aead extended descriptor 1289 */ 1290 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1291 int desc_bytes, bool *all_contig_ptr, 1292 bool encrypt) 1293 { 1294 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1295 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1296 struct device *jrdev = ctx->jrdev; 1297 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1298 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1299 GFP_KERNEL : GFP_ATOMIC; 1300 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1301 int src_len, dst_len = 0; 1302 struct aead_edesc *edesc; 1303 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1304 unsigned int authsize = ctx->authsize; 1305 1306 if (unlikely(req->dst != req->src)) { 1307 src_len = req->assoclen + req->cryptlen; 1308 dst_len = src_len + (encrypt ? authsize : (-authsize)); 1309 1310 src_nents = sg_nents_for_len(req->src, src_len); 1311 if (unlikely(src_nents < 0)) { 1312 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1313 src_len); 1314 return ERR_PTR(src_nents); 1315 } 1316 1317 dst_nents = sg_nents_for_len(req->dst, dst_len); 1318 if (unlikely(dst_nents < 0)) { 1319 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1320 dst_len); 1321 return ERR_PTR(dst_nents); 1322 } 1323 } else { 1324 src_len = req->assoclen + req->cryptlen + 1325 (encrypt ? authsize : 0); 1326 1327 src_nents = sg_nents_for_len(req->src, src_len); 1328 if (unlikely(src_nents < 0)) { 1329 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1330 src_len); 1331 return ERR_PTR(src_nents); 1332 } 1333 } 1334 1335 if (likely(req->src == req->dst)) { 1336 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1337 DMA_BIDIRECTIONAL); 1338 if (unlikely(!mapped_src_nents)) { 1339 dev_err(jrdev, "unable to map source\n"); 1340 return ERR_PTR(-ENOMEM); 1341 } 1342 } else { 1343 /* Cover also the case of null (zero length) input data */ 1344 if (src_nents) { 1345 mapped_src_nents = dma_map_sg(jrdev, req->src, 1346 src_nents, DMA_TO_DEVICE); 1347 if (unlikely(!mapped_src_nents)) { 1348 dev_err(jrdev, "unable to map source\n"); 1349 return ERR_PTR(-ENOMEM); 1350 } 1351 } else { 1352 mapped_src_nents = 0; 1353 } 1354 1355 /* Cover also the case of null (zero length) output data */ 1356 if (dst_nents) { 1357 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1358 dst_nents, 1359 DMA_FROM_DEVICE); 1360 if (unlikely(!mapped_dst_nents)) { 1361 dev_err(jrdev, "unable to map destination\n"); 1362 dma_unmap_sg(jrdev, req->src, src_nents, 1363 DMA_TO_DEVICE); 1364 return ERR_PTR(-ENOMEM); 1365 } 1366 } else { 1367 mapped_dst_nents = 0; 1368 } 1369 } 1370 1371 /* 1372 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1373 * the end of the table by allocating more S/G entries. 1374 */ 1375 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1376 if (mapped_dst_nents > 1) 1377 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1378 else 1379 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1380 1381 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1382 1383 /* allocate space for base edesc and hw desc commands, link tables */ 1384 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags); 1385 if (!edesc) { 1386 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1387 0, 0, 0); 1388 return ERR_PTR(-ENOMEM); 1389 } 1390 1391 edesc->src_nents = src_nents; 1392 edesc->dst_nents = dst_nents; 1393 edesc->mapped_src_nents = mapped_src_nents; 1394 edesc->mapped_dst_nents = mapped_dst_nents; 1395 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1396 desc_bytes; 1397 1398 rctx->edesc = edesc; 1399 1400 *all_contig_ptr = !(mapped_src_nents > 1); 1401 1402 sec4_sg_index = 0; 1403 if (mapped_src_nents > 1) { 1404 sg_to_sec4_sg_last(req->src, src_len, 1405 edesc->sec4_sg + sec4_sg_index, 0); 1406 sec4_sg_index += mapped_src_nents; 1407 } 1408 if (mapped_dst_nents > 1) { 1409 sg_to_sec4_sg_last(req->dst, dst_len, 1410 edesc->sec4_sg + sec4_sg_index, 0); 1411 } 1412 1413 if (!sec4_sg_bytes) 1414 return edesc; 1415 1416 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1417 sec4_sg_bytes, DMA_TO_DEVICE); 1418 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1419 dev_err(jrdev, "unable to map S/G table\n"); 1420 aead_unmap(jrdev, edesc, req); 1421 kfree(edesc); 1422 return ERR_PTR(-ENOMEM); 1423 } 1424 1425 edesc->sec4_sg_bytes = sec4_sg_bytes; 1426 1427 return edesc; 1428 } 1429 1430 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req) 1431 { 1432 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1433 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1434 struct aead_edesc *edesc = rctx->edesc; 1435 u32 *desc = edesc->hw_desc; 1436 int ret; 1437 1438 /* 1439 * Only the backlog request are sent to crypto-engine since the others 1440 * can be handled by CAAM, if free, especially since JR has up to 1024 1441 * entries (more than the 10 entries from crypto-engine). 1442 */ 1443 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1444 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine, 1445 req); 1446 else 1447 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req); 1448 1449 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1450 aead_unmap(jrdev, edesc, req); 1451 kfree(rctx->edesc); 1452 } 1453 1454 return ret; 1455 } 1456 1457 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt) 1458 { 1459 struct aead_edesc *edesc; 1460 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1461 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1462 struct device *jrdev = ctx->jrdev; 1463 bool all_contig; 1464 u32 *desc; 1465 1466 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1467 encrypt); 1468 if (IS_ERR(edesc)) 1469 return PTR_ERR(edesc); 1470 1471 desc = edesc->hw_desc; 1472 1473 init_chachapoly_job(req, edesc, all_contig, encrypt); 1474 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1475 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1476 1); 1477 1478 return aead_enqueue_req(jrdev, req); 1479 } 1480 1481 static int chachapoly_encrypt(struct aead_request *req) 1482 { 1483 return chachapoly_crypt(req, true); 1484 } 1485 1486 static int chachapoly_decrypt(struct aead_request *req) 1487 { 1488 return chachapoly_crypt(req, false); 1489 } 1490 1491 static inline int aead_crypt(struct aead_request *req, bool encrypt) 1492 { 1493 struct aead_edesc *edesc; 1494 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1495 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1496 struct device *jrdev = ctx->jrdev; 1497 bool all_contig; 1498 1499 /* allocate extended descriptor */ 1500 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1501 &all_contig, encrypt); 1502 if (IS_ERR(edesc)) 1503 return PTR_ERR(edesc); 1504 1505 /* Create and submit job descriptor */ 1506 init_authenc_job(req, edesc, all_contig, encrypt); 1507 1508 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1509 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1510 desc_bytes(edesc->hw_desc), 1); 1511 1512 return aead_enqueue_req(jrdev, req); 1513 } 1514 1515 static int aead_encrypt(struct aead_request *req) 1516 { 1517 return aead_crypt(req, true); 1518 } 1519 1520 static int aead_decrypt(struct aead_request *req) 1521 { 1522 return aead_crypt(req, false); 1523 } 1524 1525 static int aead_do_one_req(struct crypto_engine *engine, void *areq) 1526 { 1527 struct aead_request *req = aead_request_cast(areq); 1528 struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req)); 1529 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1530 u32 *desc = rctx->edesc->hw_desc; 1531 int ret; 1532 1533 rctx->edesc->bklog = true; 1534 1535 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); 1536 1537 if (ret == -ENOSPC && engine->retry_support) 1538 return ret; 1539 1540 if (ret != -EINPROGRESS) { 1541 aead_unmap(ctx->jrdev, rctx->edesc, req); 1542 kfree(rctx->edesc); 1543 } else { 1544 ret = 0; 1545 } 1546 1547 return ret; 1548 } 1549 1550 static inline int gcm_crypt(struct aead_request *req, bool encrypt) 1551 { 1552 struct aead_edesc *edesc; 1553 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1554 struct caam_ctx *ctx = crypto_aead_ctx_dma(aead); 1555 struct device *jrdev = ctx->jrdev; 1556 bool all_contig; 1557 1558 /* allocate extended descriptor */ 1559 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, 1560 encrypt); 1561 if (IS_ERR(edesc)) 1562 return PTR_ERR(edesc); 1563 1564 /* Create and submit job descriptor */ 1565 init_gcm_job(req, edesc, all_contig, encrypt); 1566 1567 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1568 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1569 desc_bytes(edesc->hw_desc), 1); 1570 1571 return aead_enqueue_req(jrdev, req); 1572 } 1573 1574 static int gcm_encrypt(struct aead_request *req) 1575 { 1576 return gcm_crypt(req, true); 1577 } 1578 1579 static int gcm_decrypt(struct aead_request *req) 1580 { 1581 return gcm_crypt(req, false); 1582 } 1583 1584 static int ipsec_gcm_encrypt(struct aead_request *req) 1585 { 1586 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); 1587 } 1588 1589 static int ipsec_gcm_decrypt(struct aead_request *req) 1590 { 1591 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); 1592 } 1593 1594 /* 1595 * allocate and map the skcipher extended descriptor for skcipher 1596 */ 1597 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1598 int desc_bytes) 1599 { 1600 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1601 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1602 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1603 struct device *jrdev = ctx->jrdev; 1604 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1605 GFP_KERNEL : GFP_ATOMIC; 1606 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1607 struct skcipher_edesc *edesc; 1608 dma_addr_t iv_dma = 0; 1609 u8 *iv; 1610 int ivsize = crypto_skcipher_ivsize(skcipher); 1611 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1612 unsigned int aligned_size; 1613 1614 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1615 if (unlikely(src_nents < 0)) { 1616 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1617 req->cryptlen); 1618 return ERR_PTR(src_nents); 1619 } 1620 1621 if (req->dst != req->src) { 1622 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1623 if (unlikely(dst_nents < 0)) { 1624 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1625 req->cryptlen); 1626 return ERR_PTR(dst_nents); 1627 } 1628 } 1629 1630 if (likely(req->src == req->dst)) { 1631 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1632 DMA_BIDIRECTIONAL); 1633 if (unlikely(!mapped_src_nents)) { 1634 dev_err(jrdev, "unable to map source\n"); 1635 return ERR_PTR(-ENOMEM); 1636 } 1637 } else { 1638 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1639 DMA_TO_DEVICE); 1640 if (unlikely(!mapped_src_nents)) { 1641 dev_err(jrdev, "unable to map source\n"); 1642 return ERR_PTR(-ENOMEM); 1643 } 1644 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1645 DMA_FROM_DEVICE); 1646 if (unlikely(!mapped_dst_nents)) { 1647 dev_err(jrdev, "unable to map destination\n"); 1648 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1649 return ERR_PTR(-ENOMEM); 1650 } 1651 } 1652 1653 if (!ivsize && mapped_src_nents == 1) 1654 sec4_sg_ents = 0; // no need for an input hw s/g table 1655 else 1656 sec4_sg_ents = mapped_src_nents + !!ivsize; 1657 dst_sg_idx = sec4_sg_ents; 1658 1659 /* 1660 * Input, output HW S/G tables: [IV, src][dst, IV] 1661 * IV entries point to the same buffer 1662 * If src == dst, S/G entries are reused (S/G tables overlap) 1663 * 1664 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1665 * the end of the table by allocating more S/G entries. Logic: 1666 * if (output S/G) 1667 * pad output S/G, if needed 1668 * else if (input S/G) ... 1669 * pad input S/G, if needed 1670 */ 1671 if (ivsize || mapped_dst_nents > 1) { 1672 if (req->src == req->dst) 1673 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents); 1674 else 1675 sec4_sg_ents += pad_sg_nents(mapped_dst_nents + 1676 !!ivsize); 1677 } else { 1678 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1679 } 1680 1681 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1682 1683 /* 1684 * allocate space for base edesc and hw desc commands, link tables, IV 1685 */ 1686 aligned_size = ALIGN(ivsize, __alignof__(*edesc)); 1687 aligned_size += sizeof(*edesc) + desc_bytes + sec4_sg_bytes; 1688 aligned_size = ALIGN(aligned_size, dma_get_cache_alignment()); 1689 iv = kzalloc(aligned_size, flags); 1690 if (!iv) { 1691 dev_err(jrdev, "could not allocate extended descriptor\n"); 1692 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1693 0, 0, 0); 1694 return ERR_PTR(-ENOMEM); 1695 } 1696 1697 edesc = (void *)(iv + ALIGN(ivsize, __alignof__(*edesc))); 1698 edesc->src_nents = src_nents; 1699 edesc->dst_nents = dst_nents; 1700 edesc->mapped_src_nents = mapped_src_nents; 1701 edesc->mapped_dst_nents = mapped_dst_nents; 1702 edesc->sec4_sg_bytes = sec4_sg_bytes; 1703 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1704 desc_bytes); 1705 rctx->edesc = edesc; 1706 1707 /* Make sure IV is located in a DMAable area */ 1708 if (ivsize) { 1709 memcpy(iv, req->iv, ivsize); 1710 1711 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1712 if (dma_mapping_error(jrdev, iv_dma)) { 1713 dev_err(jrdev, "unable to map IV\n"); 1714 caam_unmap(jrdev, req->src, req->dst, src_nents, 1715 dst_nents, 0, 0, 0, 0); 1716 kfree(edesc); 1717 return ERR_PTR(-ENOMEM); 1718 } 1719 1720 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1721 } 1722 if (dst_sg_idx) 1723 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + 1724 !!ivsize, 0); 1725 1726 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) 1727 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + 1728 dst_sg_idx, 0); 1729 1730 if (ivsize) 1731 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + 1732 mapped_dst_nents, iv_dma, ivsize, 0); 1733 1734 if (ivsize || mapped_dst_nents > 1) 1735 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + 1736 mapped_dst_nents - 1 + !!ivsize); 1737 1738 if (sec4_sg_bytes) { 1739 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1740 sec4_sg_bytes, 1741 DMA_TO_DEVICE); 1742 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1743 dev_err(jrdev, "unable to map S/G table\n"); 1744 caam_unmap(jrdev, req->src, req->dst, src_nents, 1745 dst_nents, iv_dma, ivsize, 0, 0); 1746 kfree(edesc); 1747 return ERR_PTR(-ENOMEM); 1748 } 1749 } 1750 1751 edesc->iv_dma = iv_dma; 1752 1753 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1754 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1755 sec4_sg_bytes, 1); 1756 1757 return edesc; 1758 } 1759 1760 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq) 1761 { 1762 struct skcipher_request *req = skcipher_request_cast(areq); 1763 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req)); 1764 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1765 u32 *desc = rctx->edesc->hw_desc; 1766 int ret; 1767 1768 rctx->edesc->bklog = true; 1769 1770 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); 1771 1772 if (ret == -ENOSPC && engine->retry_support) 1773 return ret; 1774 1775 if (ret != -EINPROGRESS) { 1776 skcipher_unmap(ctx->jrdev, rctx->edesc, req); 1777 kfree(rctx->edesc); 1778 } else { 1779 ret = 0; 1780 } 1781 1782 return ret; 1783 } 1784 1785 static inline bool xts_skcipher_ivsize(struct skcipher_request *req) 1786 { 1787 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1788 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 1789 1790 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2))); 1791 } 1792 1793 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) 1794 { 1795 struct skcipher_edesc *edesc; 1796 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1797 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher); 1798 struct device *jrdev = ctx->jrdev; 1799 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1800 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 1801 u32 *desc; 1802 int ret = 0; 1803 1804 /* 1805 * XTS is expected to return an error even for input length = 0 1806 * Note that the case input length < block size will be caught during 1807 * HW offloading and return an error. 1808 */ 1809 if (!req->cryptlen && !ctx->fallback) 1810 return 0; 1811 1812 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) || 1813 ctx->xts_key_fallback)) { 1814 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1815 1816 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); 1817 skcipher_request_set_callback(&rctx->fallback_req, 1818 req->base.flags, 1819 req->base.complete, 1820 req->base.data); 1821 skcipher_request_set_crypt(&rctx->fallback_req, req->src, 1822 req->dst, req->cryptlen, req->iv); 1823 1824 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : 1825 crypto_skcipher_decrypt(&rctx->fallback_req); 1826 } 1827 1828 /* allocate extended descriptor */ 1829 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1830 if (IS_ERR(edesc)) 1831 return PTR_ERR(edesc); 1832 1833 /* Create and submit job descriptor*/ 1834 init_skcipher_job(req, edesc, encrypt); 1835 1836 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1837 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1838 desc_bytes(edesc->hw_desc), 1); 1839 1840 desc = edesc->hw_desc; 1841 /* 1842 * Only the backlog request are sent to crypto-engine since the others 1843 * can be handled by CAAM, if free, especially since JR has up to 1024 1844 * entries (more than the 10 entries from crypto-engine). 1845 */ 1846 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1847 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine, 1848 req); 1849 else 1850 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); 1851 1852 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1853 skcipher_unmap(jrdev, edesc, req); 1854 kfree(edesc); 1855 } 1856 1857 return ret; 1858 } 1859 1860 static int skcipher_encrypt(struct skcipher_request *req) 1861 { 1862 return skcipher_crypt(req, true); 1863 } 1864 1865 static int skcipher_decrypt(struct skcipher_request *req) 1866 { 1867 return skcipher_crypt(req, false); 1868 } 1869 1870 static struct caam_skcipher_alg driver_algs[] = { 1871 { 1872 .skcipher = { 1873 .base = { 1874 .cra_name = "cbc(aes)", 1875 .cra_driver_name = "cbc-aes-caam", 1876 .cra_blocksize = AES_BLOCK_SIZE, 1877 }, 1878 .setkey = aes_skcipher_setkey, 1879 .encrypt = skcipher_encrypt, 1880 .decrypt = skcipher_decrypt, 1881 .min_keysize = AES_MIN_KEY_SIZE, 1882 .max_keysize = AES_MAX_KEY_SIZE, 1883 .ivsize = AES_BLOCK_SIZE, 1884 }, 1885 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1886 }, 1887 { 1888 .skcipher = { 1889 .base = { 1890 .cra_name = "cbc(des3_ede)", 1891 .cra_driver_name = "cbc-3des-caam", 1892 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1893 }, 1894 .setkey = des3_skcipher_setkey, 1895 .encrypt = skcipher_encrypt, 1896 .decrypt = skcipher_decrypt, 1897 .min_keysize = DES3_EDE_KEY_SIZE, 1898 .max_keysize = DES3_EDE_KEY_SIZE, 1899 .ivsize = DES3_EDE_BLOCK_SIZE, 1900 }, 1901 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1902 }, 1903 { 1904 .skcipher = { 1905 .base = { 1906 .cra_name = "cbc(des)", 1907 .cra_driver_name = "cbc-des-caam", 1908 .cra_blocksize = DES_BLOCK_SIZE, 1909 }, 1910 .setkey = des_skcipher_setkey, 1911 .encrypt = skcipher_encrypt, 1912 .decrypt = skcipher_decrypt, 1913 .min_keysize = DES_KEY_SIZE, 1914 .max_keysize = DES_KEY_SIZE, 1915 .ivsize = DES_BLOCK_SIZE, 1916 }, 1917 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1918 }, 1919 { 1920 .skcipher = { 1921 .base = { 1922 .cra_name = "ctr(aes)", 1923 .cra_driver_name = "ctr-aes-caam", 1924 .cra_blocksize = 1, 1925 }, 1926 .setkey = ctr_skcipher_setkey, 1927 .encrypt = skcipher_encrypt, 1928 .decrypt = skcipher_decrypt, 1929 .min_keysize = AES_MIN_KEY_SIZE, 1930 .max_keysize = AES_MAX_KEY_SIZE, 1931 .ivsize = AES_BLOCK_SIZE, 1932 .chunksize = AES_BLOCK_SIZE, 1933 }, 1934 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1935 OP_ALG_AAI_CTR_MOD128, 1936 }, 1937 { 1938 .skcipher = { 1939 .base = { 1940 .cra_name = "rfc3686(ctr(aes))", 1941 .cra_driver_name = "rfc3686-ctr-aes-caam", 1942 .cra_blocksize = 1, 1943 }, 1944 .setkey = rfc3686_skcipher_setkey, 1945 .encrypt = skcipher_encrypt, 1946 .decrypt = skcipher_decrypt, 1947 .min_keysize = AES_MIN_KEY_SIZE + 1948 CTR_RFC3686_NONCE_SIZE, 1949 .max_keysize = AES_MAX_KEY_SIZE + 1950 CTR_RFC3686_NONCE_SIZE, 1951 .ivsize = CTR_RFC3686_IV_SIZE, 1952 .chunksize = AES_BLOCK_SIZE, 1953 }, 1954 .caam = { 1955 .class1_alg_type = OP_ALG_ALGSEL_AES | 1956 OP_ALG_AAI_CTR_MOD128, 1957 .rfc3686 = true, 1958 }, 1959 }, 1960 { 1961 .skcipher = { 1962 .base = { 1963 .cra_name = "xts(aes)", 1964 .cra_driver_name = "xts-aes-caam", 1965 .cra_flags = CRYPTO_ALG_NEED_FALLBACK, 1966 .cra_blocksize = AES_BLOCK_SIZE, 1967 }, 1968 .setkey = xts_skcipher_setkey, 1969 .encrypt = skcipher_encrypt, 1970 .decrypt = skcipher_decrypt, 1971 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1972 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1973 .ivsize = AES_BLOCK_SIZE, 1974 }, 1975 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1976 }, 1977 { 1978 .skcipher = { 1979 .base = { 1980 .cra_name = "ecb(des)", 1981 .cra_driver_name = "ecb-des-caam", 1982 .cra_blocksize = DES_BLOCK_SIZE, 1983 }, 1984 .setkey = des_skcipher_setkey, 1985 .encrypt = skcipher_encrypt, 1986 .decrypt = skcipher_decrypt, 1987 .min_keysize = DES_KEY_SIZE, 1988 .max_keysize = DES_KEY_SIZE, 1989 }, 1990 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 1991 }, 1992 { 1993 .skcipher = { 1994 .base = { 1995 .cra_name = "ecb(aes)", 1996 .cra_driver_name = "ecb-aes-caam", 1997 .cra_blocksize = AES_BLOCK_SIZE, 1998 }, 1999 .setkey = aes_skcipher_setkey, 2000 .encrypt = skcipher_encrypt, 2001 .decrypt = skcipher_decrypt, 2002 .min_keysize = AES_MIN_KEY_SIZE, 2003 .max_keysize = AES_MAX_KEY_SIZE, 2004 }, 2005 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2006 }, 2007 { 2008 .skcipher = { 2009 .base = { 2010 .cra_name = "ecb(des3_ede)", 2011 .cra_driver_name = "ecb-des3-caam", 2012 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2013 }, 2014 .setkey = des3_skcipher_setkey, 2015 .encrypt = skcipher_encrypt, 2016 .decrypt = skcipher_decrypt, 2017 .min_keysize = DES3_EDE_KEY_SIZE, 2018 .max_keysize = DES3_EDE_KEY_SIZE, 2019 }, 2020 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2021 }, 2022 }; 2023 2024 static struct caam_aead_alg driver_aeads[] = { 2025 { 2026 .aead = { 2027 .base = { 2028 .cra_name = "rfc4106(gcm(aes))", 2029 .cra_driver_name = "rfc4106-gcm-aes-caam", 2030 .cra_blocksize = 1, 2031 }, 2032 .setkey = rfc4106_setkey, 2033 .setauthsize = rfc4106_setauthsize, 2034 .encrypt = ipsec_gcm_encrypt, 2035 .decrypt = ipsec_gcm_decrypt, 2036 .ivsize = GCM_RFC4106_IV_SIZE, 2037 .maxauthsize = AES_BLOCK_SIZE, 2038 }, 2039 .caam = { 2040 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2041 .nodkp = true, 2042 }, 2043 }, 2044 { 2045 .aead = { 2046 .base = { 2047 .cra_name = "rfc4543(gcm(aes))", 2048 .cra_driver_name = "rfc4543-gcm-aes-caam", 2049 .cra_blocksize = 1, 2050 }, 2051 .setkey = rfc4543_setkey, 2052 .setauthsize = rfc4543_setauthsize, 2053 .encrypt = ipsec_gcm_encrypt, 2054 .decrypt = ipsec_gcm_decrypt, 2055 .ivsize = GCM_RFC4543_IV_SIZE, 2056 .maxauthsize = AES_BLOCK_SIZE, 2057 }, 2058 .caam = { 2059 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2060 .nodkp = true, 2061 }, 2062 }, 2063 /* Galois Counter Mode */ 2064 { 2065 .aead = { 2066 .base = { 2067 .cra_name = "gcm(aes)", 2068 .cra_driver_name = "gcm-aes-caam", 2069 .cra_blocksize = 1, 2070 }, 2071 .setkey = gcm_setkey, 2072 .setauthsize = gcm_setauthsize, 2073 .encrypt = gcm_encrypt, 2074 .decrypt = gcm_decrypt, 2075 .ivsize = GCM_AES_IV_SIZE, 2076 .maxauthsize = AES_BLOCK_SIZE, 2077 }, 2078 .caam = { 2079 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2080 .nodkp = true, 2081 }, 2082 }, 2083 /* single-pass ipsec_esp descriptor */ 2084 { 2085 .aead = { 2086 .base = { 2087 .cra_name = "authenc(hmac(md5)," 2088 "ecb(cipher_null))", 2089 .cra_driver_name = "authenc-hmac-md5-" 2090 "ecb-cipher_null-caam", 2091 .cra_blocksize = NULL_BLOCK_SIZE, 2092 }, 2093 .setkey = aead_setkey, 2094 .setauthsize = aead_setauthsize, 2095 .encrypt = aead_encrypt, 2096 .decrypt = aead_decrypt, 2097 .ivsize = NULL_IV_SIZE, 2098 .maxauthsize = MD5_DIGEST_SIZE, 2099 }, 2100 .caam = { 2101 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2102 OP_ALG_AAI_HMAC_PRECOMP, 2103 }, 2104 }, 2105 { 2106 .aead = { 2107 .base = { 2108 .cra_name = "authenc(hmac(sha1)," 2109 "ecb(cipher_null))", 2110 .cra_driver_name = "authenc-hmac-sha1-" 2111 "ecb-cipher_null-caam", 2112 .cra_blocksize = NULL_BLOCK_SIZE, 2113 }, 2114 .setkey = aead_setkey, 2115 .setauthsize = aead_setauthsize, 2116 .encrypt = aead_encrypt, 2117 .decrypt = aead_decrypt, 2118 .ivsize = NULL_IV_SIZE, 2119 .maxauthsize = SHA1_DIGEST_SIZE, 2120 }, 2121 .caam = { 2122 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2123 OP_ALG_AAI_HMAC_PRECOMP, 2124 }, 2125 }, 2126 { 2127 .aead = { 2128 .base = { 2129 .cra_name = "authenc(hmac(sha224)," 2130 "ecb(cipher_null))", 2131 .cra_driver_name = "authenc-hmac-sha224-" 2132 "ecb-cipher_null-caam", 2133 .cra_blocksize = NULL_BLOCK_SIZE, 2134 }, 2135 .setkey = aead_setkey, 2136 .setauthsize = aead_setauthsize, 2137 .encrypt = aead_encrypt, 2138 .decrypt = aead_decrypt, 2139 .ivsize = NULL_IV_SIZE, 2140 .maxauthsize = SHA224_DIGEST_SIZE, 2141 }, 2142 .caam = { 2143 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2144 OP_ALG_AAI_HMAC_PRECOMP, 2145 }, 2146 }, 2147 { 2148 .aead = { 2149 .base = { 2150 .cra_name = "authenc(hmac(sha256)," 2151 "ecb(cipher_null))", 2152 .cra_driver_name = "authenc-hmac-sha256-" 2153 "ecb-cipher_null-caam", 2154 .cra_blocksize = NULL_BLOCK_SIZE, 2155 }, 2156 .setkey = aead_setkey, 2157 .setauthsize = aead_setauthsize, 2158 .encrypt = aead_encrypt, 2159 .decrypt = aead_decrypt, 2160 .ivsize = NULL_IV_SIZE, 2161 .maxauthsize = SHA256_DIGEST_SIZE, 2162 }, 2163 .caam = { 2164 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2165 OP_ALG_AAI_HMAC_PRECOMP, 2166 }, 2167 }, 2168 { 2169 .aead = { 2170 .base = { 2171 .cra_name = "authenc(hmac(sha384)," 2172 "ecb(cipher_null))", 2173 .cra_driver_name = "authenc-hmac-sha384-" 2174 "ecb-cipher_null-caam", 2175 .cra_blocksize = NULL_BLOCK_SIZE, 2176 }, 2177 .setkey = aead_setkey, 2178 .setauthsize = aead_setauthsize, 2179 .encrypt = aead_encrypt, 2180 .decrypt = aead_decrypt, 2181 .ivsize = NULL_IV_SIZE, 2182 .maxauthsize = SHA384_DIGEST_SIZE, 2183 }, 2184 .caam = { 2185 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2186 OP_ALG_AAI_HMAC_PRECOMP, 2187 }, 2188 }, 2189 { 2190 .aead = { 2191 .base = { 2192 .cra_name = "authenc(hmac(sha512)," 2193 "ecb(cipher_null))", 2194 .cra_driver_name = "authenc-hmac-sha512-" 2195 "ecb-cipher_null-caam", 2196 .cra_blocksize = NULL_BLOCK_SIZE, 2197 }, 2198 .setkey = aead_setkey, 2199 .setauthsize = aead_setauthsize, 2200 .encrypt = aead_encrypt, 2201 .decrypt = aead_decrypt, 2202 .ivsize = NULL_IV_SIZE, 2203 .maxauthsize = SHA512_DIGEST_SIZE, 2204 }, 2205 .caam = { 2206 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2207 OP_ALG_AAI_HMAC_PRECOMP, 2208 }, 2209 }, 2210 { 2211 .aead = { 2212 .base = { 2213 .cra_name = "authenc(hmac(md5),cbc(aes))", 2214 .cra_driver_name = "authenc-hmac-md5-" 2215 "cbc-aes-caam", 2216 .cra_blocksize = AES_BLOCK_SIZE, 2217 }, 2218 .setkey = aead_setkey, 2219 .setauthsize = aead_setauthsize, 2220 .encrypt = aead_encrypt, 2221 .decrypt = aead_decrypt, 2222 .ivsize = AES_BLOCK_SIZE, 2223 .maxauthsize = MD5_DIGEST_SIZE, 2224 }, 2225 .caam = { 2226 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2227 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2228 OP_ALG_AAI_HMAC_PRECOMP, 2229 }, 2230 }, 2231 { 2232 .aead = { 2233 .base = { 2234 .cra_name = "echainiv(authenc(hmac(md5)," 2235 "cbc(aes)))", 2236 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2237 "cbc-aes-caam", 2238 .cra_blocksize = AES_BLOCK_SIZE, 2239 }, 2240 .setkey = aead_setkey, 2241 .setauthsize = aead_setauthsize, 2242 .encrypt = aead_encrypt, 2243 .decrypt = aead_decrypt, 2244 .ivsize = AES_BLOCK_SIZE, 2245 .maxauthsize = MD5_DIGEST_SIZE, 2246 }, 2247 .caam = { 2248 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2249 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2250 OP_ALG_AAI_HMAC_PRECOMP, 2251 .geniv = true, 2252 }, 2253 }, 2254 { 2255 .aead = { 2256 .base = { 2257 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2258 .cra_driver_name = "authenc-hmac-sha1-" 2259 "cbc-aes-caam", 2260 .cra_blocksize = AES_BLOCK_SIZE, 2261 }, 2262 .setkey = aead_setkey, 2263 .setauthsize = aead_setauthsize, 2264 .encrypt = aead_encrypt, 2265 .decrypt = aead_decrypt, 2266 .ivsize = AES_BLOCK_SIZE, 2267 .maxauthsize = SHA1_DIGEST_SIZE, 2268 }, 2269 .caam = { 2270 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2271 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2272 OP_ALG_AAI_HMAC_PRECOMP, 2273 }, 2274 }, 2275 { 2276 .aead = { 2277 .base = { 2278 .cra_name = "echainiv(authenc(hmac(sha1)," 2279 "cbc(aes)))", 2280 .cra_driver_name = "echainiv-authenc-" 2281 "hmac-sha1-cbc-aes-caam", 2282 .cra_blocksize = AES_BLOCK_SIZE, 2283 }, 2284 .setkey = aead_setkey, 2285 .setauthsize = aead_setauthsize, 2286 .encrypt = aead_encrypt, 2287 .decrypt = aead_decrypt, 2288 .ivsize = AES_BLOCK_SIZE, 2289 .maxauthsize = SHA1_DIGEST_SIZE, 2290 }, 2291 .caam = { 2292 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2293 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2294 OP_ALG_AAI_HMAC_PRECOMP, 2295 .geniv = true, 2296 }, 2297 }, 2298 { 2299 .aead = { 2300 .base = { 2301 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2302 .cra_driver_name = "authenc-hmac-sha224-" 2303 "cbc-aes-caam", 2304 .cra_blocksize = AES_BLOCK_SIZE, 2305 }, 2306 .setkey = aead_setkey, 2307 .setauthsize = aead_setauthsize, 2308 .encrypt = aead_encrypt, 2309 .decrypt = aead_decrypt, 2310 .ivsize = AES_BLOCK_SIZE, 2311 .maxauthsize = SHA224_DIGEST_SIZE, 2312 }, 2313 .caam = { 2314 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2315 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2316 OP_ALG_AAI_HMAC_PRECOMP, 2317 }, 2318 }, 2319 { 2320 .aead = { 2321 .base = { 2322 .cra_name = "echainiv(authenc(hmac(sha224)," 2323 "cbc(aes)))", 2324 .cra_driver_name = "echainiv-authenc-" 2325 "hmac-sha224-cbc-aes-caam", 2326 .cra_blocksize = AES_BLOCK_SIZE, 2327 }, 2328 .setkey = aead_setkey, 2329 .setauthsize = aead_setauthsize, 2330 .encrypt = aead_encrypt, 2331 .decrypt = aead_decrypt, 2332 .ivsize = AES_BLOCK_SIZE, 2333 .maxauthsize = SHA224_DIGEST_SIZE, 2334 }, 2335 .caam = { 2336 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2337 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2338 OP_ALG_AAI_HMAC_PRECOMP, 2339 .geniv = true, 2340 }, 2341 }, 2342 { 2343 .aead = { 2344 .base = { 2345 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2346 .cra_driver_name = "authenc-hmac-sha256-" 2347 "cbc-aes-caam", 2348 .cra_blocksize = AES_BLOCK_SIZE, 2349 }, 2350 .setkey = aead_setkey, 2351 .setauthsize = aead_setauthsize, 2352 .encrypt = aead_encrypt, 2353 .decrypt = aead_decrypt, 2354 .ivsize = AES_BLOCK_SIZE, 2355 .maxauthsize = SHA256_DIGEST_SIZE, 2356 }, 2357 .caam = { 2358 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2359 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2360 OP_ALG_AAI_HMAC_PRECOMP, 2361 }, 2362 }, 2363 { 2364 .aead = { 2365 .base = { 2366 .cra_name = "echainiv(authenc(hmac(sha256)," 2367 "cbc(aes)))", 2368 .cra_driver_name = "echainiv-authenc-" 2369 "hmac-sha256-cbc-aes-caam", 2370 .cra_blocksize = AES_BLOCK_SIZE, 2371 }, 2372 .setkey = aead_setkey, 2373 .setauthsize = aead_setauthsize, 2374 .encrypt = aead_encrypt, 2375 .decrypt = aead_decrypt, 2376 .ivsize = AES_BLOCK_SIZE, 2377 .maxauthsize = SHA256_DIGEST_SIZE, 2378 }, 2379 .caam = { 2380 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2381 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2382 OP_ALG_AAI_HMAC_PRECOMP, 2383 .geniv = true, 2384 }, 2385 }, 2386 { 2387 .aead = { 2388 .base = { 2389 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2390 .cra_driver_name = "authenc-hmac-sha384-" 2391 "cbc-aes-caam", 2392 .cra_blocksize = AES_BLOCK_SIZE, 2393 }, 2394 .setkey = aead_setkey, 2395 .setauthsize = aead_setauthsize, 2396 .encrypt = aead_encrypt, 2397 .decrypt = aead_decrypt, 2398 .ivsize = AES_BLOCK_SIZE, 2399 .maxauthsize = SHA384_DIGEST_SIZE, 2400 }, 2401 .caam = { 2402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2403 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2404 OP_ALG_AAI_HMAC_PRECOMP, 2405 }, 2406 }, 2407 { 2408 .aead = { 2409 .base = { 2410 .cra_name = "echainiv(authenc(hmac(sha384)," 2411 "cbc(aes)))", 2412 .cra_driver_name = "echainiv-authenc-" 2413 "hmac-sha384-cbc-aes-caam", 2414 .cra_blocksize = AES_BLOCK_SIZE, 2415 }, 2416 .setkey = aead_setkey, 2417 .setauthsize = aead_setauthsize, 2418 .encrypt = aead_encrypt, 2419 .decrypt = aead_decrypt, 2420 .ivsize = AES_BLOCK_SIZE, 2421 .maxauthsize = SHA384_DIGEST_SIZE, 2422 }, 2423 .caam = { 2424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2425 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2426 OP_ALG_AAI_HMAC_PRECOMP, 2427 .geniv = true, 2428 }, 2429 }, 2430 { 2431 .aead = { 2432 .base = { 2433 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2434 .cra_driver_name = "authenc-hmac-sha512-" 2435 "cbc-aes-caam", 2436 .cra_blocksize = AES_BLOCK_SIZE, 2437 }, 2438 .setkey = aead_setkey, 2439 .setauthsize = aead_setauthsize, 2440 .encrypt = aead_encrypt, 2441 .decrypt = aead_decrypt, 2442 .ivsize = AES_BLOCK_SIZE, 2443 .maxauthsize = SHA512_DIGEST_SIZE, 2444 }, 2445 .caam = { 2446 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2447 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2448 OP_ALG_AAI_HMAC_PRECOMP, 2449 }, 2450 }, 2451 { 2452 .aead = { 2453 .base = { 2454 .cra_name = "echainiv(authenc(hmac(sha512)," 2455 "cbc(aes)))", 2456 .cra_driver_name = "echainiv-authenc-" 2457 "hmac-sha512-cbc-aes-caam", 2458 .cra_blocksize = AES_BLOCK_SIZE, 2459 }, 2460 .setkey = aead_setkey, 2461 .setauthsize = aead_setauthsize, 2462 .encrypt = aead_encrypt, 2463 .decrypt = aead_decrypt, 2464 .ivsize = AES_BLOCK_SIZE, 2465 .maxauthsize = SHA512_DIGEST_SIZE, 2466 }, 2467 .caam = { 2468 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2469 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2470 OP_ALG_AAI_HMAC_PRECOMP, 2471 .geniv = true, 2472 }, 2473 }, 2474 { 2475 .aead = { 2476 .base = { 2477 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2478 .cra_driver_name = "authenc-hmac-md5-" 2479 "cbc-des3_ede-caam", 2480 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2481 }, 2482 .setkey = des3_aead_setkey, 2483 .setauthsize = aead_setauthsize, 2484 .encrypt = aead_encrypt, 2485 .decrypt = aead_decrypt, 2486 .ivsize = DES3_EDE_BLOCK_SIZE, 2487 .maxauthsize = MD5_DIGEST_SIZE, 2488 }, 2489 .caam = { 2490 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2491 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2492 OP_ALG_AAI_HMAC_PRECOMP, 2493 } 2494 }, 2495 { 2496 .aead = { 2497 .base = { 2498 .cra_name = "echainiv(authenc(hmac(md5)," 2499 "cbc(des3_ede)))", 2500 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2501 "cbc-des3_ede-caam", 2502 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2503 }, 2504 .setkey = des3_aead_setkey, 2505 .setauthsize = aead_setauthsize, 2506 .encrypt = aead_encrypt, 2507 .decrypt = aead_decrypt, 2508 .ivsize = DES3_EDE_BLOCK_SIZE, 2509 .maxauthsize = MD5_DIGEST_SIZE, 2510 }, 2511 .caam = { 2512 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2513 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2514 OP_ALG_AAI_HMAC_PRECOMP, 2515 .geniv = true, 2516 } 2517 }, 2518 { 2519 .aead = { 2520 .base = { 2521 .cra_name = "authenc(hmac(sha1)," 2522 "cbc(des3_ede))", 2523 .cra_driver_name = "authenc-hmac-sha1-" 2524 "cbc-des3_ede-caam", 2525 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2526 }, 2527 .setkey = des3_aead_setkey, 2528 .setauthsize = aead_setauthsize, 2529 .encrypt = aead_encrypt, 2530 .decrypt = aead_decrypt, 2531 .ivsize = DES3_EDE_BLOCK_SIZE, 2532 .maxauthsize = SHA1_DIGEST_SIZE, 2533 }, 2534 .caam = { 2535 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2536 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2537 OP_ALG_AAI_HMAC_PRECOMP, 2538 }, 2539 }, 2540 { 2541 .aead = { 2542 .base = { 2543 .cra_name = "echainiv(authenc(hmac(sha1)," 2544 "cbc(des3_ede)))", 2545 .cra_driver_name = "echainiv-authenc-" 2546 "hmac-sha1-" 2547 "cbc-des3_ede-caam", 2548 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2549 }, 2550 .setkey = des3_aead_setkey, 2551 .setauthsize = aead_setauthsize, 2552 .encrypt = aead_encrypt, 2553 .decrypt = aead_decrypt, 2554 .ivsize = DES3_EDE_BLOCK_SIZE, 2555 .maxauthsize = SHA1_DIGEST_SIZE, 2556 }, 2557 .caam = { 2558 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2559 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2560 OP_ALG_AAI_HMAC_PRECOMP, 2561 .geniv = true, 2562 }, 2563 }, 2564 { 2565 .aead = { 2566 .base = { 2567 .cra_name = "authenc(hmac(sha224)," 2568 "cbc(des3_ede))", 2569 .cra_driver_name = "authenc-hmac-sha224-" 2570 "cbc-des3_ede-caam", 2571 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2572 }, 2573 .setkey = des3_aead_setkey, 2574 .setauthsize = aead_setauthsize, 2575 .encrypt = aead_encrypt, 2576 .decrypt = aead_decrypt, 2577 .ivsize = DES3_EDE_BLOCK_SIZE, 2578 .maxauthsize = SHA224_DIGEST_SIZE, 2579 }, 2580 .caam = { 2581 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2582 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2583 OP_ALG_AAI_HMAC_PRECOMP, 2584 }, 2585 }, 2586 { 2587 .aead = { 2588 .base = { 2589 .cra_name = "echainiv(authenc(hmac(sha224)," 2590 "cbc(des3_ede)))", 2591 .cra_driver_name = "echainiv-authenc-" 2592 "hmac-sha224-" 2593 "cbc-des3_ede-caam", 2594 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2595 }, 2596 .setkey = des3_aead_setkey, 2597 .setauthsize = aead_setauthsize, 2598 .encrypt = aead_encrypt, 2599 .decrypt = aead_decrypt, 2600 .ivsize = DES3_EDE_BLOCK_SIZE, 2601 .maxauthsize = SHA224_DIGEST_SIZE, 2602 }, 2603 .caam = { 2604 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2605 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2606 OP_ALG_AAI_HMAC_PRECOMP, 2607 .geniv = true, 2608 }, 2609 }, 2610 { 2611 .aead = { 2612 .base = { 2613 .cra_name = "authenc(hmac(sha256)," 2614 "cbc(des3_ede))", 2615 .cra_driver_name = "authenc-hmac-sha256-" 2616 "cbc-des3_ede-caam", 2617 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2618 }, 2619 .setkey = des3_aead_setkey, 2620 .setauthsize = aead_setauthsize, 2621 .encrypt = aead_encrypt, 2622 .decrypt = aead_decrypt, 2623 .ivsize = DES3_EDE_BLOCK_SIZE, 2624 .maxauthsize = SHA256_DIGEST_SIZE, 2625 }, 2626 .caam = { 2627 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2628 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2629 OP_ALG_AAI_HMAC_PRECOMP, 2630 }, 2631 }, 2632 { 2633 .aead = { 2634 .base = { 2635 .cra_name = "echainiv(authenc(hmac(sha256)," 2636 "cbc(des3_ede)))", 2637 .cra_driver_name = "echainiv-authenc-" 2638 "hmac-sha256-" 2639 "cbc-des3_ede-caam", 2640 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2641 }, 2642 .setkey = des3_aead_setkey, 2643 .setauthsize = aead_setauthsize, 2644 .encrypt = aead_encrypt, 2645 .decrypt = aead_decrypt, 2646 .ivsize = DES3_EDE_BLOCK_SIZE, 2647 .maxauthsize = SHA256_DIGEST_SIZE, 2648 }, 2649 .caam = { 2650 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2651 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2652 OP_ALG_AAI_HMAC_PRECOMP, 2653 .geniv = true, 2654 }, 2655 }, 2656 { 2657 .aead = { 2658 .base = { 2659 .cra_name = "authenc(hmac(sha384)," 2660 "cbc(des3_ede))", 2661 .cra_driver_name = "authenc-hmac-sha384-" 2662 "cbc-des3_ede-caam", 2663 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2664 }, 2665 .setkey = des3_aead_setkey, 2666 .setauthsize = aead_setauthsize, 2667 .encrypt = aead_encrypt, 2668 .decrypt = aead_decrypt, 2669 .ivsize = DES3_EDE_BLOCK_SIZE, 2670 .maxauthsize = SHA384_DIGEST_SIZE, 2671 }, 2672 .caam = { 2673 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2674 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2675 OP_ALG_AAI_HMAC_PRECOMP, 2676 }, 2677 }, 2678 { 2679 .aead = { 2680 .base = { 2681 .cra_name = "echainiv(authenc(hmac(sha384)," 2682 "cbc(des3_ede)))", 2683 .cra_driver_name = "echainiv-authenc-" 2684 "hmac-sha384-" 2685 "cbc-des3_ede-caam", 2686 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2687 }, 2688 .setkey = des3_aead_setkey, 2689 .setauthsize = aead_setauthsize, 2690 .encrypt = aead_encrypt, 2691 .decrypt = aead_decrypt, 2692 .ivsize = DES3_EDE_BLOCK_SIZE, 2693 .maxauthsize = SHA384_DIGEST_SIZE, 2694 }, 2695 .caam = { 2696 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2697 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2698 OP_ALG_AAI_HMAC_PRECOMP, 2699 .geniv = true, 2700 }, 2701 }, 2702 { 2703 .aead = { 2704 .base = { 2705 .cra_name = "authenc(hmac(sha512)," 2706 "cbc(des3_ede))", 2707 .cra_driver_name = "authenc-hmac-sha512-" 2708 "cbc-des3_ede-caam", 2709 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2710 }, 2711 .setkey = des3_aead_setkey, 2712 .setauthsize = aead_setauthsize, 2713 .encrypt = aead_encrypt, 2714 .decrypt = aead_decrypt, 2715 .ivsize = DES3_EDE_BLOCK_SIZE, 2716 .maxauthsize = SHA512_DIGEST_SIZE, 2717 }, 2718 .caam = { 2719 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2720 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2721 OP_ALG_AAI_HMAC_PRECOMP, 2722 }, 2723 }, 2724 { 2725 .aead = { 2726 .base = { 2727 .cra_name = "echainiv(authenc(hmac(sha512)," 2728 "cbc(des3_ede)))", 2729 .cra_driver_name = "echainiv-authenc-" 2730 "hmac-sha512-" 2731 "cbc-des3_ede-caam", 2732 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2733 }, 2734 .setkey = des3_aead_setkey, 2735 .setauthsize = aead_setauthsize, 2736 .encrypt = aead_encrypt, 2737 .decrypt = aead_decrypt, 2738 .ivsize = DES3_EDE_BLOCK_SIZE, 2739 .maxauthsize = SHA512_DIGEST_SIZE, 2740 }, 2741 .caam = { 2742 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2743 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2744 OP_ALG_AAI_HMAC_PRECOMP, 2745 .geniv = true, 2746 }, 2747 }, 2748 { 2749 .aead = { 2750 .base = { 2751 .cra_name = "authenc(hmac(md5),cbc(des))", 2752 .cra_driver_name = "authenc-hmac-md5-" 2753 "cbc-des-caam", 2754 .cra_blocksize = DES_BLOCK_SIZE, 2755 }, 2756 .setkey = aead_setkey, 2757 .setauthsize = aead_setauthsize, 2758 .encrypt = aead_encrypt, 2759 .decrypt = aead_decrypt, 2760 .ivsize = DES_BLOCK_SIZE, 2761 .maxauthsize = MD5_DIGEST_SIZE, 2762 }, 2763 .caam = { 2764 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2765 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2766 OP_ALG_AAI_HMAC_PRECOMP, 2767 }, 2768 }, 2769 { 2770 .aead = { 2771 .base = { 2772 .cra_name = "echainiv(authenc(hmac(md5)," 2773 "cbc(des)))", 2774 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2775 "cbc-des-caam", 2776 .cra_blocksize = DES_BLOCK_SIZE, 2777 }, 2778 .setkey = aead_setkey, 2779 .setauthsize = aead_setauthsize, 2780 .encrypt = aead_encrypt, 2781 .decrypt = aead_decrypt, 2782 .ivsize = DES_BLOCK_SIZE, 2783 .maxauthsize = MD5_DIGEST_SIZE, 2784 }, 2785 .caam = { 2786 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2787 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2788 OP_ALG_AAI_HMAC_PRECOMP, 2789 .geniv = true, 2790 }, 2791 }, 2792 { 2793 .aead = { 2794 .base = { 2795 .cra_name = "authenc(hmac(sha1),cbc(des))", 2796 .cra_driver_name = "authenc-hmac-sha1-" 2797 "cbc-des-caam", 2798 .cra_blocksize = DES_BLOCK_SIZE, 2799 }, 2800 .setkey = aead_setkey, 2801 .setauthsize = aead_setauthsize, 2802 .encrypt = aead_encrypt, 2803 .decrypt = aead_decrypt, 2804 .ivsize = DES_BLOCK_SIZE, 2805 .maxauthsize = SHA1_DIGEST_SIZE, 2806 }, 2807 .caam = { 2808 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2809 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2810 OP_ALG_AAI_HMAC_PRECOMP, 2811 }, 2812 }, 2813 { 2814 .aead = { 2815 .base = { 2816 .cra_name = "echainiv(authenc(hmac(sha1)," 2817 "cbc(des)))", 2818 .cra_driver_name = "echainiv-authenc-" 2819 "hmac-sha1-cbc-des-caam", 2820 .cra_blocksize = DES_BLOCK_SIZE, 2821 }, 2822 .setkey = aead_setkey, 2823 .setauthsize = aead_setauthsize, 2824 .encrypt = aead_encrypt, 2825 .decrypt = aead_decrypt, 2826 .ivsize = DES_BLOCK_SIZE, 2827 .maxauthsize = SHA1_DIGEST_SIZE, 2828 }, 2829 .caam = { 2830 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2831 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2832 OP_ALG_AAI_HMAC_PRECOMP, 2833 .geniv = true, 2834 }, 2835 }, 2836 { 2837 .aead = { 2838 .base = { 2839 .cra_name = "authenc(hmac(sha224),cbc(des))", 2840 .cra_driver_name = "authenc-hmac-sha224-" 2841 "cbc-des-caam", 2842 .cra_blocksize = DES_BLOCK_SIZE, 2843 }, 2844 .setkey = aead_setkey, 2845 .setauthsize = aead_setauthsize, 2846 .encrypt = aead_encrypt, 2847 .decrypt = aead_decrypt, 2848 .ivsize = DES_BLOCK_SIZE, 2849 .maxauthsize = SHA224_DIGEST_SIZE, 2850 }, 2851 .caam = { 2852 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2853 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2854 OP_ALG_AAI_HMAC_PRECOMP, 2855 }, 2856 }, 2857 { 2858 .aead = { 2859 .base = { 2860 .cra_name = "echainiv(authenc(hmac(sha224)," 2861 "cbc(des)))", 2862 .cra_driver_name = "echainiv-authenc-" 2863 "hmac-sha224-cbc-des-caam", 2864 .cra_blocksize = DES_BLOCK_SIZE, 2865 }, 2866 .setkey = aead_setkey, 2867 .setauthsize = aead_setauthsize, 2868 .encrypt = aead_encrypt, 2869 .decrypt = aead_decrypt, 2870 .ivsize = DES_BLOCK_SIZE, 2871 .maxauthsize = SHA224_DIGEST_SIZE, 2872 }, 2873 .caam = { 2874 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2875 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2876 OP_ALG_AAI_HMAC_PRECOMP, 2877 .geniv = true, 2878 }, 2879 }, 2880 { 2881 .aead = { 2882 .base = { 2883 .cra_name = "authenc(hmac(sha256),cbc(des))", 2884 .cra_driver_name = "authenc-hmac-sha256-" 2885 "cbc-des-caam", 2886 .cra_blocksize = DES_BLOCK_SIZE, 2887 }, 2888 .setkey = aead_setkey, 2889 .setauthsize = aead_setauthsize, 2890 .encrypt = aead_encrypt, 2891 .decrypt = aead_decrypt, 2892 .ivsize = DES_BLOCK_SIZE, 2893 .maxauthsize = SHA256_DIGEST_SIZE, 2894 }, 2895 .caam = { 2896 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2897 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2898 OP_ALG_AAI_HMAC_PRECOMP, 2899 }, 2900 }, 2901 { 2902 .aead = { 2903 .base = { 2904 .cra_name = "echainiv(authenc(hmac(sha256)," 2905 "cbc(des)))", 2906 .cra_driver_name = "echainiv-authenc-" 2907 "hmac-sha256-cbc-des-caam", 2908 .cra_blocksize = DES_BLOCK_SIZE, 2909 }, 2910 .setkey = aead_setkey, 2911 .setauthsize = aead_setauthsize, 2912 .encrypt = aead_encrypt, 2913 .decrypt = aead_decrypt, 2914 .ivsize = DES_BLOCK_SIZE, 2915 .maxauthsize = SHA256_DIGEST_SIZE, 2916 }, 2917 .caam = { 2918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2919 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2920 OP_ALG_AAI_HMAC_PRECOMP, 2921 .geniv = true, 2922 }, 2923 }, 2924 { 2925 .aead = { 2926 .base = { 2927 .cra_name = "authenc(hmac(sha384),cbc(des))", 2928 .cra_driver_name = "authenc-hmac-sha384-" 2929 "cbc-des-caam", 2930 .cra_blocksize = DES_BLOCK_SIZE, 2931 }, 2932 .setkey = aead_setkey, 2933 .setauthsize = aead_setauthsize, 2934 .encrypt = aead_encrypt, 2935 .decrypt = aead_decrypt, 2936 .ivsize = DES_BLOCK_SIZE, 2937 .maxauthsize = SHA384_DIGEST_SIZE, 2938 }, 2939 .caam = { 2940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2941 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2942 OP_ALG_AAI_HMAC_PRECOMP, 2943 }, 2944 }, 2945 { 2946 .aead = { 2947 .base = { 2948 .cra_name = "echainiv(authenc(hmac(sha384)," 2949 "cbc(des)))", 2950 .cra_driver_name = "echainiv-authenc-" 2951 "hmac-sha384-cbc-des-caam", 2952 .cra_blocksize = DES_BLOCK_SIZE, 2953 }, 2954 .setkey = aead_setkey, 2955 .setauthsize = aead_setauthsize, 2956 .encrypt = aead_encrypt, 2957 .decrypt = aead_decrypt, 2958 .ivsize = DES_BLOCK_SIZE, 2959 .maxauthsize = SHA384_DIGEST_SIZE, 2960 }, 2961 .caam = { 2962 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2963 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2964 OP_ALG_AAI_HMAC_PRECOMP, 2965 .geniv = true, 2966 }, 2967 }, 2968 { 2969 .aead = { 2970 .base = { 2971 .cra_name = "authenc(hmac(sha512),cbc(des))", 2972 .cra_driver_name = "authenc-hmac-sha512-" 2973 "cbc-des-caam", 2974 .cra_blocksize = DES_BLOCK_SIZE, 2975 }, 2976 .setkey = aead_setkey, 2977 .setauthsize = aead_setauthsize, 2978 .encrypt = aead_encrypt, 2979 .decrypt = aead_decrypt, 2980 .ivsize = DES_BLOCK_SIZE, 2981 .maxauthsize = SHA512_DIGEST_SIZE, 2982 }, 2983 .caam = { 2984 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2985 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2986 OP_ALG_AAI_HMAC_PRECOMP, 2987 }, 2988 }, 2989 { 2990 .aead = { 2991 .base = { 2992 .cra_name = "echainiv(authenc(hmac(sha512)," 2993 "cbc(des)))", 2994 .cra_driver_name = "echainiv-authenc-" 2995 "hmac-sha512-cbc-des-caam", 2996 .cra_blocksize = DES_BLOCK_SIZE, 2997 }, 2998 .setkey = aead_setkey, 2999 .setauthsize = aead_setauthsize, 3000 .encrypt = aead_encrypt, 3001 .decrypt = aead_decrypt, 3002 .ivsize = DES_BLOCK_SIZE, 3003 .maxauthsize = SHA512_DIGEST_SIZE, 3004 }, 3005 .caam = { 3006 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3007 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3008 OP_ALG_AAI_HMAC_PRECOMP, 3009 .geniv = true, 3010 }, 3011 }, 3012 { 3013 .aead = { 3014 .base = { 3015 .cra_name = "authenc(hmac(md5)," 3016 "rfc3686(ctr(aes)))", 3017 .cra_driver_name = "authenc-hmac-md5-" 3018 "rfc3686-ctr-aes-caam", 3019 .cra_blocksize = 1, 3020 }, 3021 .setkey = aead_setkey, 3022 .setauthsize = aead_setauthsize, 3023 .encrypt = aead_encrypt, 3024 .decrypt = aead_decrypt, 3025 .ivsize = CTR_RFC3686_IV_SIZE, 3026 .maxauthsize = MD5_DIGEST_SIZE, 3027 }, 3028 .caam = { 3029 .class1_alg_type = OP_ALG_ALGSEL_AES | 3030 OP_ALG_AAI_CTR_MOD128, 3031 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3032 OP_ALG_AAI_HMAC_PRECOMP, 3033 .rfc3686 = true, 3034 }, 3035 }, 3036 { 3037 .aead = { 3038 .base = { 3039 .cra_name = "seqiv(authenc(" 3040 "hmac(md5),rfc3686(ctr(aes))))", 3041 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3042 "rfc3686-ctr-aes-caam", 3043 .cra_blocksize = 1, 3044 }, 3045 .setkey = aead_setkey, 3046 .setauthsize = aead_setauthsize, 3047 .encrypt = aead_encrypt, 3048 .decrypt = aead_decrypt, 3049 .ivsize = CTR_RFC3686_IV_SIZE, 3050 .maxauthsize = MD5_DIGEST_SIZE, 3051 }, 3052 .caam = { 3053 .class1_alg_type = OP_ALG_ALGSEL_AES | 3054 OP_ALG_AAI_CTR_MOD128, 3055 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3056 OP_ALG_AAI_HMAC_PRECOMP, 3057 .rfc3686 = true, 3058 .geniv = true, 3059 }, 3060 }, 3061 { 3062 .aead = { 3063 .base = { 3064 .cra_name = "authenc(hmac(sha1)," 3065 "rfc3686(ctr(aes)))", 3066 .cra_driver_name = "authenc-hmac-sha1-" 3067 "rfc3686-ctr-aes-caam", 3068 .cra_blocksize = 1, 3069 }, 3070 .setkey = aead_setkey, 3071 .setauthsize = aead_setauthsize, 3072 .encrypt = aead_encrypt, 3073 .decrypt = aead_decrypt, 3074 .ivsize = CTR_RFC3686_IV_SIZE, 3075 .maxauthsize = SHA1_DIGEST_SIZE, 3076 }, 3077 .caam = { 3078 .class1_alg_type = OP_ALG_ALGSEL_AES | 3079 OP_ALG_AAI_CTR_MOD128, 3080 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3081 OP_ALG_AAI_HMAC_PRECOMP, 3082 .rfc3686 = true, 3083 }, 3084 }, 3085 { 3086 .aead = { 3087 .base = { 3088 .cra_name = "seqiv(authenc(" 3089 "hmac(sha1),rfc3686(ctr(aes))))", 3090 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3091 "rfc3686-ctr-aes-caam", 3092 .cra_blocksize = 1, 3093 }, 3094 .setkey = aead_setkey, 3095 .setauthsize = aead_setauthsize, 3096 .encrypt = aead_encrypt, 3097 .decrypt = aead_decrypt, 3098 .ivsize = CTR_RFC3686_IV_SIZE, 3099 .maxauthsize = SHA1_DIGEST_SIZE, 3100 }, 3101 .caam = { 3102 .class1_alg_type = OP_ALG_ALGSEL_AES | 3103 OP_ALG_AAI_CTR_MOD128, 3104 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3105 OP_ALG_AAI_HMAC_PRECOMP, 3106 .rfc3686 = true, 3107 .geniv = true, 3108 }, 3109 }, 3110 { 3111 .aead = { 3112 .base = { 3113 .cra_name = "authenc(hmac(sha224)," 3114 "rfc3686(ctr(aes)))", 3115 .cra_driver_name = "authenc-hmac-sha224-" 3116 "rfc3686-ctr-aes-caam", 3117 .cra_blocksize = 1, 3118 }, 3119 .setkey = aead_setkey, 3120 .setauthsize = aead_setauthsize, 3121 .encrypt = aead_encrypt, 3122 .decrypt = aead_decrypt, 3123 .ivsize = CTR_RFC3686_IV_SIZE, 3124 .maxauthsize = SHA224_DIGEST_SIZE, 3125 }, 3126 .caam = { 3127 .class1_alg_type = OP_ALG_ALGSEL_AES | 3128 OP_ALG_AAI_CTR_MOD128, 3129 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3130 OP_ALG_AAI_HMAC_PRECOMP, 3131 .rfc3686 = true, 3132 }, 3133 }, 3134 { 3135 .aead = { 3136 .base = { 3137 .cra_name = "seqiv(authenc(" 3138 "hmac(sha224),rfc3686(ctr(aes))))", 3139 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3140 "rfc3686-ctr-aes-caam", 3141 .cra_blocksize = 1, 3142 }, 3143 .setkey = aead_setkey, 3144 .setauthsize = aead_setauthsize, 3145 .encrypt = aead_encrypt, 3146 .decrypt = aead_decrypt, 3147 .ivsize = CTR_RFC3686_IV_SIZE, 3148 .maxauthsize = SHA224_DIGEST_SIZE, 3149 }, 3150 .caam = { 3151 .class1_alg_type = OP_ALG_ALGSEL_AES | 3152 OP_ALG_AAI_CTR_MOD128, 3153 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3154 OP_ALG_AAI_HMAC_PRECOMP, 3155 .rfc3686 = true, 3156 .geniv = true, 3157 }, 3158 }, 3159 { 3160 .aead = { 3161 .base = { 3162 .cra_name = "authenc(hmac(sha256)," 3163 "rfc3686(ctr(aes)))", 3164 .cra_driver_name = "authenc-hmac-sha256-" 3165 "rfc3686-ctr-aes-caam", 3166 .cra_blocksize = 1, 3167 }, 3168 .setkey = aead_setkey, 3169 .setauthsize = aead_setauthsize, 3170 .encrypt = aead_encrypt, 3171 .decrypt = aead_decrypt, 3172 .ivsize = CTR_RFC3686_IV_SIZE, 3173 .maxauthsize = SHA256_DIGEST_SIZE, 3174 }, 3175 .caam = { 3176 .class1_alg_type = OP_ALG_ALGSEL_AES | 3177 OP_ALG_AAI_CTR_MOD128, 3178 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3179 OP_ALG_AAI_HMAC_PRECOMP, 3180 .rfc3686 = true, 3181 }, 3182 }, 3183 { 3184 .aead = { 3185 .base = { 3186 .cra_name = "seqiv(authenc(hmac(sha256)," 3187 "rfc3686(ctr(aes))))", 3188 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3189 "rfc3686-ctr-aes-caam", 3190 .cra_blocksize = 1, 3191 }, 3192 .setkey = aead_setkey, 3193 .setauthsize = aead_setauthsize, 3194 .encrypt = aead_encrypt, 3195 .decrypt = aead_decrypt, 3196 .ivsize = CTR_RFC3686_IV_SIZE, 3197 .maxauthsize = SHA256_DIGEST_SIZE, 3198 }, 3199 .caam = { 3200 .class1_alg_type = OP_ALG_ALGSEL_AES | 3201 OP_ALG_AAI_CTR_MOD128, 3202 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3203 OP_ALG_AAI_HMAC_PRECOMP, 3204 .rfc3686 = true, 3205 .geniv = true, 3206 }, 3207 }, 3208 { 3209 .aead = { 3210 .base = { 3211 .cra_name = "authenc(hmac(sha384)," 3212 "rfc3686(ctr(aes)))", 3213 .cra_driver_name = "authenc-hmac-sha384-" 3214 "rfc3686-ctr-aes-caam", 3215 .cra_blocksize = 1, 3216 }, 3217 .setkey = aead_setkey, 3218 .setauthsize = aead_setauthsize, 3219 .encrypt = aead_encrypt, 3220 .decrypt = aead_decrypt, 3221 .ivsize = CTR_RFC3686_IV_SIZE, 3222 .maxauthsize = SHA384_DIGEST_SIZE, 3223 }, 3224 .caam = { 3225 .class1_alg_type = OP_ALG_ALGSEL_AES | 3226 OP_ALG_AAI_CTR_MOD128, 3227 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3228 OP_ALG_AAI_HMAC_PRECOMP, 3229 .rfc3686 = true, 3230 }, 3231 }, 3232 { 3233 .aead = { 3234 .base = { 3235 .cra_name = "seqiv(authenc(hmac(sha384)," 3236 "rfc3686(ctr(aes))))", 3237 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3238 "rfc3686-ctr-aes-caam", 3239 .cra_blocksize = 1, 3240 }, 3241 .setkey = aead_setkey, 3242 .setauthsize = aead_setauthsize, 3243 .encrypt = aead_encrypt, 3244 .decrypt = aead_decrypt, 3245 .ivsize = CTR_RFC3686_IV_SIZE, 3246 .maxauthsize = SHA384_DIGEST_SIZE, 3247 }, 3248 .caam = { 3249 .class1_alg_type = OP_ALG_ALGSEL_AES | 3250 OP_ALG_AAI_CTR_MOD128, 3251 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3252 OP_ALG_AAI_HMAC_PRECOMP, 3253 .rfc3686 = true, 3254 .geniv = true, 3255 }, 3256 }, 3257 { 3258 .aead = { 3259 .base = { 3260 .cra_name = "authenc(hmac(sha512)," 3261 "rfc3686(ctr(aes)))", 3262 .cra_driver_name = "authenc-hmac-sha512-" 3263 "rfc3686-ctr-aes-caam", 3264 .cra_blocksize = 1, 3265 }, 3266 .setkey = aead_setkey, 3267 .setauthsize = aead_setauthsize, 3268 .encrypt = aead_encrypt, 3269 .decrypt = aead_decrypt, 3270 .ivsize = CTR_RFC3686_IV_SIZE, 3271 .maxauthsize = SHA512_DIGEST_SIZE, 3272 }, 3273 .caam = { 3274 .class1_alg_type = OP_ALG_ALGSEL_AES | 3275 OP_ALG_AAI_CTR_MOD128, 3276 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3277 OP_ALG_AAI_HMAC_PRECOMP, 3278 .rfc3686 = true, 3279 }, 3280 }, 3281 { 3282 .aead = { 3283 .base = { 3284 .cra_name = "seqiv(authenc(hmac(sha512)," 3285 "rfc3686(ctr(aes))))", 3286 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3287 "rfc3686-ctr-aes-caam", 3288 .cra_blocksize = 1, 3289 }, 3290 .setkey = aead_setkey, 3291 .setauthsize = aead_setauthsize, 3292 .encrypt = aead_encrypt, 3293 .decrypt = aead_decrypt, 3294 .ivsize = CTR_RFC3686_IV_SIZE, 3295 .maxauthsize = SHA512_DIGEST_SIZE, 3296 }, 3297 .caam = { 3298 .class1_alg_type = OP_ALG_ALGSEL_AES | 3299 OP_ALG_AAI_CTR_MOD128, 3300 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3301 OP_ALG_AAI_HMAC_PRECOMP, 3302 .rfc3686 = true, 3303 .geniv = true, 3304 }, 3305 }, 3306 { 3307 .aead = { 3308 .base = { 3309 .cra_name = "rfc7539(chacha20,poly1305)", 3310 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3311 "caam", 3312 .cra_blocksize = 1, 3313 }, 3314 .setkey = chachapoly_setkey, 3315 .setauthsize = chachapoly_setauthsize, 3316 .encrypt = chachapoly_encrypt, 3317 .decrypt = chachapoly_decrypt, 3318 .ivsize = CHACHAPOLY_IV_SIZE, 3319 .maxauthsize = POLY1305_DIGEST_SIZE, 3320 }, 3321 .caam = { 3322 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3323 OP_ALG_AAI_AEAD, 3324 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3325 OP_ALG_AAI_AEAD, 3326 .nodkp = true, 3327 }, 3328 }, 3329 { 3330 .aead = { 3331 .base = { 3332 .cra_name = "rfc7539esp(chacha20,poly1305)", 3333 .cra_driver_name = "rfc7539esp-chacha20-" 3334 "poly1305-caam", 3335 .cra_blocksize = 1, 3336 }, 3337 .setkey = chachapoly_setkey, 3338 .setauthsize = chachapoly_setauthsize, 3339 .encrypt = chachapoly_encrypt, 3340 .decrypt = chachapoly_decrypt, 3341 .ivsize = 8, 3342 .maxauthsize = POLY1305_DIGEST_SIZE, 3343 }, 3344 .caam = { 3345 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3346 OP_ALG_AAI_AEAD, 3347 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3348 OP_ALG_AAI_AEAD, 3349 .nodkp = true, 3350 }, 3351 }, 3352 }; 3353 3354 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3355 bool uses_dkp) 3356 { 3357 dma_addr_t dma_addr; 3358 struct caam_drv_private *priv; 3359 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx, 3360 sh_desc_enc); 3361 3362 ctx->jrdev = caam_jr_alloc(); 3363 if (IS_ERR(ctx->jrdev)) { 3364 pr_err("Job Ring Device allocation for transform failed\n"); 3365 return PTR_ERR(ctx->jrdev); 3366 } 3367 3368 priv = dev_get_drvdata(ctx->jrdev->parent); 3369 if (priv->era >= 6 && uses_dkp) 3370 ctx->dir = DMA_BIDIRECTIONAL; 3371 else 3372 ctx->dir = DMA_TO_DEVICE; 3373 3374 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3375 offsetof(struct caam_ctx, 3376 sh_desc_enc_dma) - 3377 sh_desc_enc_offset, 3378 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3379 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3380 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3381 caam_jr_free(ctx->jrdev); 3382 return -ENOMEM; 3383 } 3384 3385 ctx->sh_desc_enc_dma = dma_addr; 3386 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3387 sh_desc_dec) - 3388 sh_desc_enc_offset; 3389 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) - 3390 sh_desc_enc_offset; 3391 3392 /* copy descriptor header template value */ 3393 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3394 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3395 3396 return 0; 3397 } 3398 3399 static int caam_cra_init(struct crypto_skcipher *tfm) 3400 { 3401 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3402 struct caam_skcipher_alg *caam_alg = 3403 container_of(alg, typeof(*caam_alg), skcipher); 3404 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm); 3405 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3406 int ret = 0; 3407 3408 ctx->enginectx.op.do_one_request = skcipher_do_one_req; 3409 3410 if (alg_aai == OP_ALG_AAI_XTS) { 3411 const char *tfm_name = crypto_tfm_alg_name(&tfm->base); 3412 struct crypto_skcipher *fallback; 3413 3414 fallback = crypto_alloc_skcipher(tfm_name, 0, 3415 CRYPTO_ALG_NEED_FALLBACK); 3416 if (IS_ERR(fallback)) { 3417 pr_err("Failed to allocate %s fallback: %ld\n", 3418 tfm_name, PTR_ERR(fallback)); 3419 return PTR_ERR(fallback); 3420 } 3421 3422 ctx->fallback = fallback; 3423 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) + 3424 crypto_skcipher_reqsize(fallback)); 3425 } else { 3426 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx)); 3427 } 3428 3429 ret = caam_init_common(ctx, &caam_alg->caam, false); 3430 if (ret && ctx->fallback) 3431 crypto_free_skcipher(ctx->fallback); 3432 3433 return ret; 3434 } 3435 3436 static int caam_aead_init(struct crypto_aead *tfm) 3437 { 3438 struct aead_alg *alg = crypto_aead_alg(tfm); 3439 struct caam_aead_alg *caam_alg = 3440 container_of(alg, struct caam_aead_alg, aead); 3441 struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm); 3442 3443 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx)); 3444 3445 ctx->enginectx.op.do_one_request = aead_do_one_req; 3446 3447 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3448 } 3449 3450 static void caam_exit_common(struct caam_ctx *ctx) 3451 { 3452 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3453 offsetof(struct caam_ctx, sh_desc_enc_dma) - 3454 offsetof(struct caam_ctx, sh_desc_enc), 3455 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3456 caam_jr_free(ctx->jrdev); 3457 } 3458 3459 static void caam_cra_exit(struct crypto_skcipher *tfm) 3460 { 3461 struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm); 3462 3463 if (ctx->fallback) 3464 crypto_free_skcipher(ctx->fallback); 3465 caam_exit_common(ctx); 3466 } 3467 3468 static void caam_aead_exit(struct crypto_aead *tfm) 3469 { 3470 caam_exit_common(crypto_aead_ctx_dma(tfm)); 3471 } 3472 3473 void caam_algapi_exit(void) 3474 { 3475 int i; 3476 3477 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3478 struct caam_aead_alg *t_alg = driver_aeads + i; 3479 3480 if (t_alg->registered) 3481 crypto_unregister_aead(&t_alg->aead); 3482 } 3483 3484 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3485 struct caam_skcipher_alg *t_alg = driver_algs + i; 3486 3487 if (t_alg->registered) 3488 crypto_unregister_skcipher(&t_alg->skcipher); 3489 } 3490 } 3491 3492 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3493 { 3494 struct skcipher_alg *alg = &t_alg->skcipher; 3495 3496 alg->base.cra_module = THIS_MODULE; 3497 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3498 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); 3499 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3500 CRYPTO_ALG_KERN_DRIVER_ONLY); 3501 3502 alg->init = caam_cra_init; 3503 alg->exit = caam_cra_exit; 3504 } 3505 3506 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3507 { 3508 struct aead_alg *alg = &t_alg->aead; 3509 3510 alg->base.cra_module = THIS_MODULE; 3511 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3512 alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding(); 3513 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3514 CRYPTO_ALG_KERN_DRIVER_ONLY; 3515 3516 alg->init = caam_aead_init; 3517 alg->exit = caam_aead_exit; 3518 } 3519 3520 int caam_algapi_init(struct device *ctrldev) 3521 { 3522 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3523 int i = 0, err = 0; 3524 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3525 unsigned int md_limit = SHA512_DIGEST_SIZE; 3526 bool registered = false, gcm_support; 3527 3528 /* 3529 * Register crypto algorithms the device supports. 3530 * First, detect presence and attributes of DES, AES, and MD blocks. 3531 */ 3532 if (priv->era < 10) { 3533 u32 cha_vid, cha_inst, aes_rn; 3534 3535 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3536 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3537 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3538 3539 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3540 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3541 CHA_ID_LS_DES_SHIFT; 3542 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3543 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3544 ccha_inst = 0; 3545 ptha_inst = 0; 3546 3547 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3548 CHA_ID_LS_AES_MASK; 3549 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3550 } else { 3551 u32 aesa, mdha; 3552 3553 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3554 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3555 3556 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3557 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3558 3559 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3560 aes_inst = aesa & CHA_VER_NUM_MASK; 3561 md_inst = mdha & CHA_VER_NUM_MASK; 3562 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3563 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3564 3565 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3566 } 3567 3568 /* If MD is present, limit digest size based on LP256 */ 3569 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3570 md_limit = SHA256_DIGEST_SIZE; 3571 3572 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3573 struct caam_skcipher_alg *t_alg = driver_algs + i; 3574 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3575 3576 /* Skip DES algorithms if not supported by device */ 3577 if (!des_inst && 3578 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3579 (alg_sel == OP_ALG_ALGSEL_DES))) 3580 continue; 3581 3582 /* Skip AES algorithms if not supported by device */ 3583 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3584 continue; 3585 3586 /* 3587 * Check support for AES modes not available 3588 * on LP devices. 3589 */ 3590 if (aes_vid == CHA_VER_VID_AES_LP && 3591 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3592 OP_ALG_AAI_XTS) 3593 continue; 3594 3595 caam_skcipher_alg_init(t_alg); 3596 3597 err = crypto_register_skcipher(&t_alg->skcipher); 3598 if (err) { 3599 pr_warn("%s alg registration failed\n", 3600 t_alg->skcipher.base.cra_driver_name); 3601 continue; 3602 } 3603 3604 t_alg->registered = true; 3605 registered = true; 3606 } 3607 3608 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3609 struct caam_aead_alg *t_alg = driver_aeads + i; 3610 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3611 OP_ALG_ALGSEL_MASK; 3612 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3613 OP_ALG_ALGSEL_MASK; 3614 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3615 3616 /* Skip DES algorithms if not supported by device */ 3617 if (!des_inst && 3618 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3619 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3620 continue; 3621 3622 /* Skip AES algorithms if not supported by device */ 3623 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3624 continue; 3625 3626 /* Skip CHACHA20 algorithms if not supported by device */ 3627 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3628 continue; 3629 3630 /* Skip POLY1305 algorithms if not supported by device */ 3631 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3632 continue; 3633 3634 /* Skip GCM algorithms if not supported by device */ 3635 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3636 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3637 continue; 3638 3639 /* 3640 * Skip algorithms requiring message digests 3641 * if MD or MD size is not supported by device. 3642 */ 3643 if (is_mdha(c2_alg_sel) && 3644 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3645 continue; 3646 3647 caam_aead_alg_init(t_alg); 3648 3649 err = crypto_register_aead(&t_alg->aead); 3650 if (err) { 3651 pr_warn("%s alg registration failed\n", 3652 t_alg->aead.base.cra_driver_name); 3653 continue; 3654 } 3655 3656 t_alg->registered = true; 3657 registered = true; 3658 } 3659 3660 if (registered) 3661 pr_info("caam algorithms registered in /proc/crypto\n"); 3662 3663 return err; 3664 } 3665