1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 #include <crypto/engine.h> 60 61 /* 62 * crypto alg 63 */ 64 #define CAAM_CRA_PRIORITY 3000 65 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 66 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 67 CTR_RFC3686_NONCE_SIZE + \ 68 SHA512_DIGEST_SIZE * 2) 69 70 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 71 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 4) 73 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 74 CAAM_CMD_SZ * 5) 75 76 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 77 78 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN) 79 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 80 81 struct caam_alg_entry { 82 int class1_alg_type; 83 int class2_alg_type; 84 bool rfc3686; 85 bool geniv; 86 bool nodkp; 87 }; 88 89 struct caam_aead_alg { 90 struct aead_alg aead; 91 struct caam_alg_entry caam; 92 bool registered; 93 }; 94 95 struct caam_skcipher_alg { 96 struct skcipher_alg skcipher; 97 struct caam_alg_entry caam; 98 bool registered; 99 }; 100 101 /* 102 * per-session context 103 */ 104 struct caam_ctx { 105 struct crypto_engine_ctx enginectx; 106 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 107 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 108 u8 key[CAAM_MAX_KEY_SIZE]; 109 dma_addr_t sh_desc_enc_dma; 110 dma_addr_t sh_desc_dec_dma; 111 dma_addr_t key_dma; 112 enum dma_data_direction dir; 113 struct device *jrdev; 114 struct alginfo adata; 115 struct alginfo cdata; 116 unsigned int authsize; 117 }; 118 119 struct caam_skcipher_req_ctx { 120 struct skcipher_edesc *edesc; 121 }; 122 123 struct caam_aead_req_ctx { 124 struct aead_edesc *edesc; 125 }; 126 127 static int aead_null_set_sh_desc(struct crypto_aead *aead) 128 { 129 struct caam_ctx *ctx = crypto_aead_ctx(aead); 130 struct device *jrdev = ctx->jrdev; 131 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 132 u32 *desc; 133 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 134 ctx->adata.keylen_pad; 135 136 /* 137 * Job Descriptor and Shared Descriptors 138 * must all fit into the 64-word Descriptor h/w Buffer 139 */ 140 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 141 ctx->adata.key_inline = true; 142 ctx->adata.key_virt = ctx->key; 143 } else { 144 ctx->adata.key_inline = false; 145 ctx->adata.key_dma = ctx->key_dma; 146 } 147 148 /* aead_encrypt shared descriptor */ 149 desc = ctx->sh_desc_enc; 150 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 151 ctrlpriv->era); 152 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 153 desc_bytes(desc), ctx->dir); 154 155 /* 156 * Job Descriptor and Shared Descriptors 157 * must all fit into the 64-word Descriptor h/w Buffer 158 */ 159 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 160 ctx->adata.key_inline = true; 161 ctx->adata.key_virt = ctx->key; 162 } else { 163 ctx->adata.key_inline = false; 164 ctx->adata.key_dma = ctx->key_dma; 165 } 166 167 /* aead_decrypt shared descriptor */ 168 desc = ctx->sh_desc_dec; 169 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 170 ctrlpriv->era); 171 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 172 desc_bytes(desc), ctx->dir); 173 174 return 0; 175 } 176 177 static int aead_set_sh_desc(struct crypto_aead *aead) 178 { 179 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 180 struct caam_aead_alg, aead); 181 unsigned int ivsize = crypto_aead_ivsize(aead); 182 struct caam_ctx *ctx = crypto_aead_ctx(aead); 183 struct device *jrdev = ctx->jrdev; 184 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 185 u32 ctx1_iv_off = 0; 186 u32 *desc, *nonce = NULL; 187 u32 inl_mask; 188 unsigned int data_len[2]; 189 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 190 OP_ALG_AAI_CTR_MOD128); 191 const bool is_rfc3686 = alg->caam.rfc3686; 192 193 if (!ctx->authsize) 194 return 0; 195 196 /* NULL encryption / decryption */ 197 if (!ctx->cdata.keylen) 198 return aead_null_set_sh_desc(aead); 199 200 /* 201 * AES-CTR needs to load IV in CONTEXT1 reg 202 * at an offset of 128bits (16bytes) 203 * CONTEXT1[255:128] = IV 204 */ 205 if (ctr_mode) 206 ctx1_iv_off = 16; 207 208 /* 209 * RFC3686 specific: 210 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 211 */ 212 if (is_rfc3686) { 213 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 214 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 215 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 216 } 217 218 /* 219 * In case |user key| > |derived key|, using DKP<imm,imm> 220 * would result in invalid opcodes (last bytes of user key) in 221 * the resulting descriptor. Use DKP<ptr,imm> instead => both 222 * virtual and dma key addresses are needed. 223 */ 224 ctx->adata.key_virt = ctx->key; 225 ctx->adata.key_dma = ctx->key_dma; 226 227 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 228 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 229 230 data_len[0] = ctx->adata.keylen_pad; 231 data_len[1] = ctx->cdata.keylen; 232 233 if (alg->caam.geniv) 234 goto skip_enc; 235 236 /* 237 * Job Descriptor and Shared Descriptors 238 * must all fit into the 64-word Descriptor h/w Buffer 239 */ 240 if (desc_inline_query(DESC_AEAD_ENC_LEN + 241 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 242 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 243 ARRAY_SIZE(data_len)) < 0) 244 return -EINVAL; 245 246 ctx->adata.key_inline = !!(inl_mask & 1); 247 ctx->cdata.key_inline = !!(inl_mask & 2); 248 249 /* aead_encrypt shared descriptor */ 250 desc = ctx->sh_desc_enc; 251 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 252 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 253 false, ctrlpriv->era); 254 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 255 desc_bytes(desc), ctx->dir); 256 257 skip_enc: 258 /* 259 * Job Descriptor and Shared Descriptors 260 * must all fit into the 64-word Descriptor h/w Buffer 261 */ 262 if (desc_inline_query(DESC_AEAD_DEC_LEN + 263 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 264 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 265 ARRAY_SIZE(data_len)) < 0) 266 return -EINVAL; 267 268 ctx->adata.key_inline = !!(inl_mask & 1); 269 ctx->cdata.key_inline = !!(inl_mask & 2); 270 271 /* aead_decrypt shared descriptor */ 272 desc = ctx->sh_desc_dec; 273 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 274 ctx->authsize, alg->caam.geniv, is_rfc3686, 275 nonce, ctx1_iv_off, false, ctrlpriv->era); 276 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 277 desc_bytes(desc), ctx->dir); 278 279 if (!alg->caam.geniv) 280 goto skip_givenc; 281 282 /* 283 * Job Descriptor and Shared Descriptors 284 * must all fit into the 64-word Descriptor h/w Buffer 285 */ 286 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 287 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 288 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 289 ARRAY_SIZE(data_len)) < 0) 290 return -EINVAL; 291 292 ctx->adata.key_inline = !!(inl_mask & 1); 293 ctx->cdata.key_inline = !!(inl_mask & 2); 294 295 /* aead_givencrypt shared descriptor */ 296 desc = ctx->sh_desc_enc; 297 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 298 ctx->authsize, is_rfc3686, nonce, 299 ctx1_iv_off, false, ctrlpriv->era); 300 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 301 desc_bytes(desc), ctx->dir); 302 303 skip_givenc: 304 return 0; 305 } 306 307 static int aead_setauthsize(struct crypto_aead *authenc, 308 unsigned int authsize) 309 { 310 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 311 312 ctx->authsize = authsize; 313 aead_set_sh_desc(authenc); 314 315 return 0; 316 } 317 318 static int gcm_set_sh_desc(struct crypto_aead *aead) 319 { 320 struct caam_ctx *ctx = crypto_aead_ctx(aead); 321 struct device *jrdev = ctx->jrdev; 322 unsigned int ivsize = crypto_aead_ivsize(aead); 323 u32 *desc; 324 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 325 ctx->cdata.keylen; 326 327 if (!ctx->cdata.keylen || !ctx->authsize) 328 return 0; 329 330 /* 331 * AES GCM encrypt shared descriptor 332 * Job Descriptor and Shared Descriptor 333 * must fit into the 64-word Descriptor h/w Buffer 334 */ 335 if (rem_bytes >= DESC_GCM_ENC_LEN) { 336 ctx->cdata.key_inline = true; 337 ctx->cdata.key_virt = ctx->key; 338 } else { 339 ctx->cdata.key_inline = false; 340 ctx->cdata.key_dma = ctx->key_dma; 341 } 342 343 desc = ctx->sh_desc_enc; 344 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 345 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 346 desc_bytes(desc), ctx->dir); 347 348 /* 349 * Job Descriptor and Shared Descriptors 350 * must all fit into the 64-word Descriptor h/w Buffer 351 */ 352 if (rem_bytes >= DESC_GCM_DEC_LEN) { 353 ctx->cdata.key_inline = true; 354 ctx->cdata.key_virt = ctx->key; 355 } else { 356 ctx->cdata.key_inline = false; 357 ctx->cdata.key_dma = ctx->key_dma; 358 } 359 360 desc = ctx->sh_desc_dec; 361 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 362 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 363 desc_bytes(desc), ctx->dir); 364 365 return 0; 366 } 367 368 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 369 { 370 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 371 int err; 372 373 err = crypto_gcm_check_authsize(authsize); 374 if (err) 375 return err; 376 377 ctx->authsize = authsize; 378 gcm_set_sh_desc(authenc); 379 380 return 0; 381 } 382 383 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 384 { 385 struct caam_ctx *ctx = crypto_aead_ctx(aead); 386 struct device *jrdev = ctx->jrdev; 387 unsigned int ivsize = crypto_aead_ivsize(aead); 388 u32 *desc; 389 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 390 ctx->cdata.keylen; 391 392 if (!ctx->cdata.keylen || !ctx->authsize) 393 return 0; 394 395 /* 396 * RFC4106 encrypt shared descriptor 397 * Job Descriptor and Shared Descriptor 398 * must fit into the 64-word Descriptor h/w Buffer 399 */ 400 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 401 ctx->cdata.key_inline = true; 402 ctx->cdata.key_virt = ctx->key; 403 } else { 404 ctx->cdata.key_inline = false; 405 ctx->cdata.key_dma = ctx->key_dma; 406 } 407 408 desc = ctx->sh_desc_enc; 409 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 410 false); 411 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 412 desc_bytes(desc), ctx->dir); 413 414 /* 415 * Job Descriptor and Shared Descriptors 416 * must all fit into the 64-word Descriptor h/w Buffer 417 */ 418 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 419 ctx->cdata.key_inline = true; 420 ctx->cdata.key_virt = ctx->key; 421 } else { 422 ctx->cdata.key_inline = false; 423 ctx->cdata.key_dma = ctx->key_dma; 424 } 425 426 desc = ctx->sh_desc_dec; 427 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 428 false); 429 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 430 desc_bytes(desc), ctx->dir); 431 432 return 0; 433 } 434 435 static int rfc4106_setauthsize(struct crypto_aead *authenc, 436 unsigned int authsize) 437 { 438 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 439 int err; 440 441 err = crypto_rfc4106_check_authsize(authsize); 442 if (err) 443 return err; 444 445 ctx->authsize = authsize; 446 rfc4106_set_sh_desc(authenc); 447 448 return 0; 449 } 450 451 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 452 { 453 struct caam_ctx *ctx = crypto_aead_ctx(aead); 454 struct device *jrdev = ctx->jrdev; 455 unsigned int ivsize = crypto_aead_ivsize(aead); 456 u32 *desc; 457 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 458 ctx->cdata.keylen; 459 460 if (!ctx->cdata.keylen || !ctx->authsize) 461 return 0; 462 463 /* 464 * RFC4543 encrypt shared descriptor 465 * Job Descriptor and Shared Descriptor 466 * must fit into the 64-word Descriptor h/w Buffer 467 */ 468 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 469 ctx->cdata.key_inline = true; 470 ctx->cdata.key_virt = ctx->key; 471 } else { 472 ctx->cdata.key_inline = false; 473 ctx->cdata.key_dma = ctx->key_dma; 474 } 475 476 desc = ctx->sh_desc_enc; 477 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 478 false); 479 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 480 desc_bytes(desc), ctx->dir); 481 482 /* 483 * Job Descriptor and Shared Descriptors 484 * must all fit into the 64-word Descriptor h/w Buffer 485 */ 486 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 487 ctx->cdata.key_inline = true; 488 ctx->cdata.key_virt = ctx->key; 489 } else { 490 ctx->cdata.key_inline = false; 491 ctx->cdata.key_dma = ctx->key_dma; 492 } 493 494 desc = ctx->sh_desc_dec; 495 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 496 false); 497 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 498 desc_bytes(desc), ctx->dir); 499 500 return 0; 501 } 502 503 static int rfc4543_setauthsize(struct crypto_aead *authenc, 504 unsigned int authsize) 505 { 506 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 507 508 if (authsize != 16) 509 return -EINVAL; 510 511 ctx->authsize = authsize; 512 rfc4543_set_sh_desc(authenc); 513 514 return 0; 515 } 516 517 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 518 { 519 struct caam_ctx *ctx = crypto_aead_ctx(aead); 520 struct device *jrdev = ctx->jrdev; 521 unsigned int ivsize = crypto_aead_ivsize(aead); 522 u32 *desc; 523 524 if (!ctx->cdata.keylen || !ctx->authsize) 525 return 0; 526 527 desc = ctx->sh_desc_enc; 528 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 529 ctx->authsize, true, false); 530 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 531 desc_bytes(desc), ctx->dir); 532 533 desc = ctx->sh_desc_dec; 534 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 535 ctx->authsize, false, false); 536 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 537 desc_bytes(desc), ctx->dir); 538 539 return 0; 540 } 541 542 static int chachapoly_setauthsize(struct crypto_aead *aead, 543 unsigned int authsize) 544 { 545 struct caam_ctx *ctx = crypto_aead_ctx(aead); 546 547 if (authsize != POLY1305_DIGEST_SIZE) 548 return -EINVAL; 549 550 ctx->authsize = authsize; 551 return chachapoly_set_sh_desc(aead); 552 } 553 554 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 555 unsigned int keylen) 556 { 557 struct caam_ctx *ctx = crypto_aead_ctx(aead); 558 unsigned int ivsize = crypto_aead_ivsize(aead); 559 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 560 561 if (keylen != CHACHA_KEY_SIZE + saltlen) 562 return -EINVAL; 563 564 ctx->cdata.key_virt = key; 565 ctx->cdata.keylen = keylen - saltlen; 566 567 return chachapoly_set_sh_desc(aead); 568 } 569 570 static int aead_setkey(struct crypto_aead *aead, 571 const u8 *key, unsigned int keylen) 572 { 573 struct caam_ctx *ctx = crypto_aead_ctx(aead); 574 struct device *jrdev = ctx->jrdev; 575 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 576 struct crypto_authenc_keys keys; 577 int ret = 0; 578 579 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 580 goto badkey; 581 582 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", 583 keys.authkeylen + keys.enckeylen, keys.enckeylen, 584 keys.authkeylen); 585 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 586 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 587 588 /* 589 * If DKP is supported, use it in the shared descriptor to generate 590 * the split key. 591 */ 592 if (ctrlpriv->era >= 6) { 593 ctx->adata.keylen = keys.authkeylen; 594 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 595 OP_ALG_ALGSEL_MASK); 596 597 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 598 goto badkey; 599 600 memcpy(ctx->key, keys.authkey, keys.authkeylen); 601 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 602 keys.enckeylen); 603 dma_sync_single_for_device(jrdev, ctx->key_dma, 604 ctx->adata.keylen_pad + 605 keys.enckeylen, ctx->dir); 606 goto skip_split_key; 607 } 608 609 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 610 keys.authkeylen, CAAM_MAX_KEY_SIZE - 611 keys.enckeylen); 612 if (ret) { 613 goto badkey; 614 } 615 616 /* postpend encryption key to auth split key */ 617 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 618 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 619 keys.enckeylen, ctx->dir); 620 621 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ", 622 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 623 ctx->adata.keylen_pad + keys.enckeylen, 1); 624 625 skip_split_key: 626 ctx->cdata.keylen = keys.enckeylen; 627 memzero_explicit(&keys, sizeof(keys)); 628 return aead_set_sh_desc(aead); 629 badkey: 630 memzero_explicit(&keys, sizeof(keys)); 631 return -EINVAL; 632 } 633 634 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 635 unsigned int keylen) 636 { 637 struct crypto_authenc_keys keys; 638 int err; 639 640 err = crypto_authenc_extractkeys(&keys, key, keylen); 641 if (unlikely(err)) 642 return err; 643 644 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?: 645 aead_setkey(aead, key, keylen); 646 647 memzero_explicit(&keys, sizeof(keys)); 648 return err; 649 } 650 651 static int gcm_setkey(struct crypto_aead *aead, 652 const u8 *key, unsigned int keylen) 653 { 654 struct caam_ctx *ctx = crypto_aead_ctx(aead); 655 struct device *jrdev = ctx->jrdev; 656 int err; 657 658 err = aes_check_keylen(keylen); 659 if (err) 660 return err; 661 662 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 663 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 664 665 memcpy(ctx->key, key, keylen); 666 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 667 ctx->cdata.keylen = keylen; 668 669 return gcm_set_sh_desc(aead); 670 } 671 672 static int rfc4106_setkey(struct crypto_aead *aead, 673 const u8 *key, unsigned int keylen) 674 { 675 struct caam_ctx *ctx = crypto_aead_ctx(aead); 676 struct device *jrdev = ctx->jrdev; 677 int err; 678 679 err = aes_check_keylen(keylen - 4); 680 if (err) 681 return err; 682 683 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 684 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 685 686 memcpy(ctx->key, key, keylen); 687 688 /* 689 * The last four bytes of the key material are used as the salt value 690 * in the nonce. Update the AES key length. 691 */ 692 ctx->cdata.keylen = keylen - 4; 693 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 694 ctx->dir); 695 return rfc4106_set_sh_desc(aead); 696 } 697 698 static int rfc4543_setkey(struct crypto_aead *aead, 699 const u8 *key, unsigned int keylen) 700 { 701 struct caam_ctx *ctx = crypto_aead_ctx(aead); 702 struct device *jrdev = ctx->jrdev; 703 int err; 704 705 err = aes_check_keylen(keylen - 4); 706 if (err) 707 return err; 708 709 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 710 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 711 712 memcpy(ctx->key, key, keylen); 713 714 /* 715 * The last four bytes of the key material are used as the salt value 716 * in the nonce. Update the AES key length. 717 */ 718 ctx->cdata.keylen = keylen - 4; 719 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 720 ctx->dir); 721 return rfc4543_set_sh_desc(aead); 722 } 723 724 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 725 unsigned int keylen, const u32 ctx1_iv_off) 726 { 727 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 728 struct caam_skcipher_alg *alg = 729 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 730 skcipher); 731 struct device *jrdev = ctx->jrdev; 732 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 733 u32 *desc; 734 const bool is_rfc3686 = alg->caam.rfc3686; 735 736 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 737 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 738 739 ctx->cdata.keylen = keylen; 740 ctx->cdata.key_virt = key; 741 ctx->cdata.key_inline = true; 742 743 /* skcipher_encrypt shared descriptor */ 744 desc = ctx->sh_desc_enc; 745 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 746 ctx1_iv_off); 747 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 748 desc_bytes(desc), ctx->dir); 749 750 /* skcipher_decrypt shared descriptor */ 751 desc = ctx->sh_desc_dec; 752 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 753 ctx1_iv_off); 754 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 755 desc_bytes(desc), ctx->dir); 756 757 return 0; 758 } 759 760 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher, 761 const u8 *key, unsigned int keylen) 762 { 763 int err; 764 765 err = aes_check_keylen(keylen); 766 if (err) 767 return err; 768 769 return skcipher_setkey(skcipher, key, keylen, 0); 770 } 771 772 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher, 773 const u8 *key, unsigned int keylen) 774 { 775 u32 ctx1_iv_off; 776 int err; 777 778 /* 779 * RFC3686 specific: 780 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 781 * | *key = {KEY, NONCE} 782 */ 783 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 784 keylen -= CTR_RFC3686_NONCE_SIZE; 785 786 err = aes_check_keylen(keylen); 787 if (err) 788 return err; 789 790 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 791 } 792 793 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher, 794 const u8 *key, unsigned int keylen) 795 { 796 u32 ctx1_iv_off; 797 int err; 798 799 /* 800 * AES-CTR needs to load IV in CONTEXT1 reg 801 * at an offset of 128bits (16bytes) 802 * CONTEXT1[255:128] = IV 803 */ 804 ctx1_iv_off = 16; 805 806 err = aes_check_keylen(keylen); 807 if (err) 808 return err; 809 810 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 811 } 812 813 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 814 const u8 *key, unsigned int keylen) 815 { 816 return verify_skcipher_des_key(skcipher, key) ?: 817 skcipher_setkey(skcipher, key, keylen, 0); 818 } 819 820 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher, 821 const u8 *key, unsigned int keylen) 822 { 823 return verify_skcipher_des3_key(skcipher, key) ?: 824 skcipher_setkey(skcipher, key, keylen, 0); 825 } 826 827 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 828 unsigned int keylen) 829 { 830 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 831 struct device *jrdev = ctx->jrdev; 832 u32 *desc; 833 834 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 835 dev_dbg(jrdev, "key size mismatch\n"); 836 return -EINVAL; 837 } 838 839 ctx->cdata.keylen = keylen; 840 ctx->cdata.key_virt = key; 841 ctx->cdata.key_inline = true; 842 843 /* xts_skcipher_encrypt shared descriptor */ 844 desc = ctx->sh_desc_enc; 845 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 846 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 847 desc_bytes(desc), ctx->dir); 848 849 /* xts_skcipher_decrypt shared descriptor */ 850 desc = ctx->sh_desc_dec; 851 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 852 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 853 desc_bytes(desc), ctx->dir); 854 855 return 0; 856 } 857 858 /* 859 * aead_edesc - s/w-extended aead descriptor 860 * @src_nents: number of segments in input s/w scatterlist 861 * @dst_nents: number of segments in output s/w scatterlist 862 * @mapped_src_nents: number of segments in input h/w link table 863 * @mapped_dst_nents: number of segments in output h/w link table 864 * @sec4_sg_bytes: length of dma mapped sec4_sg space 865 * @bklog: stored to determine if the request needs backlog 866 * @sec4_sg_dma: bus physical mapped address of h/w link table 867 * @sec4_sg: pointer to h/w link table 868 * @hw_desc: the h/w job descriptor followed by any referenced link tables 869 */ 870 struct aead_edesc { 871 int src_nents; 872 int dst_nents; 873 int mapped_src_nents; 874 int mapped_dst_nents; 875 int sec4_sg_bytes; 876 bool bklog; 877 dma_addr_t sec4_sg_dma; 878 struct sec4_sg_entry *sec4_sg; 879 u32 hw_desc[]; 880 }; 881 882 /* 883 * skcipher_edesc - s/w-extended skcipher descriptor 884 * @src_nents: number of segments in input s/w scatterlist 885 * @dst_nents: number of segments in output s/w scatterlist 886 * @mapped_src_nents: number of segments in input h/w link table 887 * @mapped_dst_nents: number of segments in output h/w link table 888 * @iv_dma: dma address of iv for checking continuity and link table 889 * @sec4_sg_bytes: length of dma mapped sec4_sg space 890 * @bklog: stored to determine if the request needs backlog 891 * @sec4_sg_dma: bus physical mapped address of h/w link table 892 * @sec4_sg: pointer to h/w link table 893 * @hw_desc: the h/w job descriptor followed by any referenced link tables 894 * and IV 895 */ 896 struct skcipher_edesc { 897 int src_nents; 898 int dst_nents; 899 int mapped_src_nents; 900 int mapped_dst_nents; 901 dma_addr_t iv_dma; 902 int sec4_sg_bytes; 903 bool bklog; 904 dma_addr_t sec4_sg_dma; 905 struct sec4_sg_entry *sec4_sg; 906 u32 hw_desc[]; 907 }; 908 909 static void caam_unmap(struct device *dev, struct scatterlist *src, 910 struct scatterlist *dst, int src_nents, 911 int dst_nents, 912 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 913 int sec4_sg_bytes) 914 { 915 if (dst != src) { 916 if (src_nents) 917 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 918 if (dst_nents) 919 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 920 } else { 921 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 922 } 923 924 if (iv_dma) 925 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); 926 if (sec4_sg_bytes) 927 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 928 DMA_TO_DEVICE); 929 } 930 931 static void aead_unmap(struct device *dev, 932 struct aead_edesc *edesc, 933 struct aead_request *req) 934 { 935 caam_unmap(dev, req->src, req->dst, 936 edesc->src_nents, edesc->dst_nents, 0, 0, 937 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 938 } 939 940 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 941 struct skcipher_request *req) 942 { 943 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 944 int ivsize = crypto_skcipher_ivsize(skcipher); 945 946 caam_unmap(dev, req->src, req->dst, 947 edesc->src_nents, edesc->dst_nents, 948 edesc->iv_dma, ivsize, 949 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 950 } 951 952 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err, 953 void *context) 954 { 955 struct aead_request *req = context; 956 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 957 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 958 struct aead_edesc *edesc; 959 int ecode = 0; 960 bool has_bklog; 961 962 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 963 964 edesc = rctx->edesc; 965 has_bklog = edesc->bklog; 966 967 if (err) 968 ecode = caam_jr_strstatus(jrdev, err); 969 970 aead_unmap(jrdev, edesc, req); 971 972 kfree(edesc); 973 974 /* 975 * If no backlog flag, the completion of the request is done 976 * by CAAM, not crypto engine. 977 */ 978 if (!has_bklog) 979 aead_request_complete(req, ecode); 980 else 981 crypto_finalize_aead_request(jrp->engine, req, ecode); 982 } 983 984 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, 985 void *context) 986 { 987 struct skcipher_request *req = context; 988 struct skcipher_edesc *edesc; 989 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 990 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 991 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); 992 int ivsize = crypto_skcipher_ivsize(skcipher); 993 int ecode = 0; 994 bool has_bklog; 995 996 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 997 998 edesc = rctx->edesc; 999 has_bklog = edesc->bklog; 1000 if (err) 1001 ecode = caam_jr_strstatus(jrdev, err); 1002 1003 skcipher_unmap(jrdev, edesc, req); 1004 1005 /* 1006 * The crypto API expects us to set the IV (req->iv) to the last 1007 * ciphertext block (CBC mode) or last counter (CTR mode). 1008 * This is used e.g. by the CTS mode. 1009 */ 1010 if (ivsize && !ecode) { 1011 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1012 ivsize); 1013 1014 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ", 1015 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1016 ivsize, 1); 1017 } 1018 1019 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1020 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1021 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1022 1023 kfree(edesc); 1024 1025 /* 1026 * If no backlog flag, the completion of the request is done 1027 * by CAAM, not crypto engine. 1028 */ 1029 if (!has_bklog) 1030 skcipher_request_complete(req, ecode); 1031 else 1032 crypto_finalize_skcipher_request(jrp->engine, req, ecode); 1033 } 1034 1035 /* 1036 * Fill in aead job descriptor 1037 */ 1038 static void init_aead_job(struct aead_request *req, 1039 struct aead_edesc *edesc, 1040 bool all_contig, bool encrypt) 1041 { 1042 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1043 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1044 int authsize = ctx->authsize; 1045 u32 *desc = edesc->hw_desc; 1046 u32 out_options, in_options; 1047 dma_addr_t dst_dma, src_dma; 1048 int len, sec4_sg_index = 0; 1049 dma_addr_t ptr; 1050 u32 *sh_desc; 1051 1052 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1053 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1054 1055 len = desc_len(sh_desc); 1056 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1057 1058 if (all_contig) { 1059 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1060 0; 1061 in_options = 0; 1062 } else { 1063 src_dma = edesc->sec4_sg_dma; 1064 sec4_sg_index += edesc->mapped_src_nents; 1065 in_options = LDST_SGF; 1066 } 1067 1068 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1069 in_options); 1070 1071 dst_dma = src_dma; 1072 out_options = in_options; 1073 1074 if (unlikely(req->src != req->dst)) { 1075 if (!edesc->mapped_dst_nents) { 1076 dst_dma = 0; 1077 out_options = 0; 1078 } else if (edesc->mapped_dst_nents == 1) { 1079 dst_dma = sg_dma_address(req->dst); 1080 out_options = 0; 1081 } else { 1082 dst_dma = edesc->sec4_sg_dma + 1083 sec4_sg_index * 1084 sizeof(struct sec4_sg_entry); 1085 out_options = LDST_SGF; 1086 } 1087 } 1088 1089 if (encrypt) 1090 append_seq_out_ptr(desc, dst_dma, 1091 req->assoclen + req->cryptlen + authsize, 1092 out_options); 1093 else 1094 append_seq_out_ptr(desc, dst_dma, 1095 req->assoclen + req->cryptlen - authsize, 1096 out_options); 1097 } 1098 1099 static void init_gcm_job(struct aead_request *req, 1100 struct aead_edesc *edesc, 1101 bool all_contig, bool encrypt) 1102 { 1103 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1104 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1105 unsigned int ivsize = crypto_aead_ivsize(aead); 1106 u32 *desc = edesc->hw_desc; 1107 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1108 unsigned int last; 1109 1110 init_aead_job(req, edesc, all_contig, encrypt); 1111 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1112 1113 /* BUG This should not be specific to generic GCM. */ 1114 last = 0; 1115 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1116 last = FIFOLD_TYPE_LAST1; 1117 1118 /* Read GCM IV */ 1119 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1120 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1121 /* Append Salt */ 1122 if (!generic_gcm) 1123 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1124 /* Append IV */ 1125 append_data(desc, req->iv, ivsize); 1126 /* End of blank commands */ 1127 } 1128 1129 static void init_chachapoly_job(struct aead_request *req, 1130 struct aead_edesc *edesc, bool all_contig, 1131 bool encrypt) 1132 { 1133 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1134 unsigned int ivsize = crypto_aead_ivsize(aead); 1135 unsigned int assoclen = req->assoclen; 1136 u32 *desc = edesc->hw_desc; 1137 u32 ctx_iv_off = 4; 1138 1139 init_aead_job(req, edesc, all_contig, encrypt); 1140 1141 if (ivsize != CHACHAPOLY_IV_SIZE) { 1142 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1143 ctx_iv_off += 4; 1144 1145 /* 1146 * The associated data comes already with the IV but we need 1147 * to skip it when we authenticate or encrypt... 1148 */ 1149 assoclen -= ivsize; 1150 } 1151 1152 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1153 1154 /* 1155 * For IPsec load the IV further in the same register. 1156 * For RFC7539 simply load the 12 bytes nonce in a single operation 1157 */ 1158 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1159 LDST_SRCDST_BYTE_CONTEXT | 1160 ctx_iv_off << LDST_OFFSET_SHIFT); 1161 } 1162 1163 static void init_authenc_job(struct aead_request *req, 1164 struct aead_edesc *edesc, 1165 bool all_contig, bool encrypt) 1166 { 1167 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1168 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1169 struct caam_aead_alg, aead); 1170 unsigned int ivsize = crypto_aead_ivsize(aead); 1171 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1172 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1173 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1174 OP_ALG_AAI_CTR_MOD128); 1175 const bool is_rfc3686 = alg->caam.rfc3686; 1176 u32 *desc = edesc->hw_desc; 1177 u32 ivoffset = 0; 1178 1179 /* 1180 * AES-CTR needs to load IV in CONTEXT1 reg 1181 * at an offset of 128bits (16bytes) 1182 * CONTEXT1[255:128] = IV 1183 */ 1184 if (ctr_mode) 1185 ivoffset = 16; 1186 1187 /* 1188 * RFC3686 specific: 1189 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1190 */ 1191 if (is_rfc3686) 1192 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1193 1194 init_aead_job(req, edesc, all_contig, encrypt); 1195 1196 /* 1197 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1198 * having DPOVRD as destination. 1199 */ 1200 if (ctrlpriv->era < 3) 1201 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1202 else 1203 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1204 1205 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1206 append_load_as_imm(desc, req->iv, ivsize, 1207 LDST_CLASS_1_CCB | 1208 LDST_SRCDST_BYTE_CONTEXT | 1209 (ivoffset << LDST_OFFSET_SHIFT)); 1210 } 1211 1212 /* 1213 * Fill in skcipher job descriptor 1214 */ 1215 static void init_skcipher_job(struct skcipher_request *req, 1216 struct skcipher_edesc *edesc, 1217 const bool encrypt) 1218 { 1219 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1220 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1221 struct device *jrdev = ctx->jrdev; 1222 int ivsize = crypto_skcipher_ivsize(skcipher); 1223 u32 *desc = edesc->hw_desc; 1224 u32 *sh_desc; 1225 u32 in_options = 0, out_options = 0; 1226 dma_addr_t src_dma, dst_dma, ptr; 1227 int len, sec4_sg_index = 0; 1228 1229 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ", 1230 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1231 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", 1232 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1233 1234 caam_dump_sg("src @" __stringify(__LINE__)": ", 1235 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1236 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1237 1238 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1239 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1240 1241 len = desc_len(sh_desc); 1242 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1243 1244 if (ivsize || edesc->mapped_src_nents > 1) { 1245 src_dma = edesc->sec4_sg_dma; 1246 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1247 in_options = LDST_SGF; 1248 } else { 1249 src_dma = sg_dma_address(req->src); 1250 } 1251 1252 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1253 1254 if (likely(req->src == req->dst)) { 1255 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1256 out_options = in_options; 1257 } else if (!ivsize && edesc->mapped_dst_nents == 1) { 1258 dst_dma = sg_dma_address(req->dst); 1259 } else { 1260 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1261 sizeof(struct sec4_sg_entry); 1262 out_options = LDST_SGF; 1263 } 1264 1265 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); 1266 } 1267 1268 /* 1269 * allocate and map the aead extended descriptor 1270 */ 1271 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1272 int desc_bytes, bool *all_contig_ptr, 1273 bool encrypt) 1274 { 1275 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1276 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1277 struct device *jrdev = ctx->jrdev; 1278 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1279 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1280 GFP_KERNEL : GFP_ATOMIC; 1281 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1282 int src_len, dst_len = 0; 1283 struct aead_edesc *edesc; 1284 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1285 unsigned int authsize = ctx->authsize; 1286 1287 if (unlikely(req->dst != req->src)) { 1288 src_len = req->assoclen + req->cryptlen; 1289 dst_len = src_len + (encrypt ? authsize : (-authsize)); 1290 1291 src_nents = sg_nents_for_len(req->src, src_len); 1292 if (unlikely(src_nents < 0)) { 1293 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1294 src_len); 1295 return ERR_PTR(src_nents); 1296 } 1297 1298 dst_nents = sg_nents_for_len(req->dst, dst_len); 1299 if (unlikely(dst_nents < 0)) { 1300 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1301 dst_len); 1302 return ERR_PTR(dst_nents); 1303 } 1304 } else { 1305 src_len = req->assoclen + req->cryptlen + 1306 (encrypt ? authsize : 0); 1307 1308 src_nents = sg_nents_for_len(req->src, src_len); 1309 if (unlikely(src_nents < 0)) { 1310 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1311 src_len); 1312 return ERR_PTR(src_nents); 1313 } 1314 } 1315 1316 if (likely(req->src == req->dst)) { 1317 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1318 DMA_BIDIRECTIONAL); 1319 if (unlikely(!mapped_src_nents)) { 1320 dev_err(jrdev, "unable to map source\n"); 1321 return ERR_PTR(-ENOMEM); 1322 } 1323 } else { 1324 /* Cover also the case of null (zero length) input data */ 1325 if (src_nents) { 1326 mapped_src_nents = dma_map_sg(jrdev, req->src, 1327 src_nents, DMA_TO_DEVICE); 1328 if (unlikely(!mapped_src_nents)) { 1329 dev_err(jrdev, "unable to map source\n"); 1330 return ERR_PTR(-ENOMEM); 1331 } 1332 } else { 1333 mapped_src_nents = 0; 1334 } 1335 1336 /* Cover also the case of null (zero length) output data */ 1337 if (dst_nents) { 1338 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1339 dst_nents, 1340 DMA_FROM_DEVICE); 1341 if (unlikely(!mapped_dst_nents)) { 1342 dev_err(jrdev, "unable to map destination\n"); 1343 dma_unmap_sg(jrdev, req->src, src_nents, 1344 DMA_TO_DEVICE); 1345 return ERR_PTR(-ENOMEM); 1346 } 1347 } else { 1348 mapped_dst_nents = 0; 1349 } 1350 } 1351 1352 /* 1353 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1354 * the end of the table by allocating more S/G entries. 1355 */ 1356 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1357 if (mapped_dst_nents > 1) 1358 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1359 else 1360 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1361 1362 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1363 1364 /* allocate space for base edesc and hw desc commands, link tables */ 1365 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1366 GFP_DMA | flags); 1367 if (!edesc) { 1368 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1369 0, 0, 0); 1370 return ERR_PTR(-ENOMEM); 1371 } 1372 1373 edesc->src_nents = src_nents; 1374 edesc->dst_nents = dst_nents; 1375 edesc->mapped_src_nents = mapped_src_nents; 1376 edesc->mapped_dst_nents = mapped_dst_nents; 1377 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1378 desc_bytes; 1379 1380 rctx->edesc = edesc; 1381 1382 *all_contig_ptr = !(mapped_src_nents > 1); 1383 1384 sec4_sg_index = 0; 1385 if (mapped_src_nents > 1) { 1386 sg_to_sec4_sg_last(req->src, src_len, 1387 edesc->sec4_sg + sec4_sg_index, 0); 1388 sec4_sg_index += mapped_src_nents; 1389 } 1390 if (mapped_dst_nents > 1) { 1391 sg_to_sec4_sg_last(req->dst, dst_len, 1392 edesc->sec4_sg + sec4_sg_index, 0); 1393 } 1394 1395 if (!sec4_sg_bytes) 1396 return edesc; 1397 1398 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1399 sec4_sg_bytes, DMA_TO_DEVICE); 1400 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1401 dev_err(jrdev, "unable to map S/G table\n"); 1402 aead_unmap(jrdev, edesc, req); 1403 kfree(edesc); 1404 return ERR_PTR(-ENOMEM); 1405 } 1406 1407 edesc->sec4_sg_bytes = sec4_sg_bytes; 1408 1409 return edesc; 1410 } 1411 1412 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req) 1413 { 1414 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1415 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1416 struct aead_edesc *edesc = rctx->edesc; 1417 u32 *desc = edesc->hw_desc; 1418 int ret; 1419 1420 /* 1421 * Only the backlog request are sent to crypto-engine since the others 1422 * can be handled by CAAM, if free, especially since JR has up to 1024 1423 * entries (more than the 10 entries from crypto-engine). 1424 */ 1425 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1426 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine, 1427 req); 1428 else 1429 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req); 1430 1431 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1432 aead_unmap(jrdev, edesc, req); 1433 kfree(rctx->edesc); 1434 } 1435 1436 return ret; 1437 } 1438 1439 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt) 1440 { 1441 struct aead_edesc *edesc; 1442 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1443 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1444 struct device *jrdev = ctx->jrdev; 1445 bool all_contig; 1446 u32 *desc; 1447 1448 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1449 encrypt); 1450 if (IS_ERR(edesc)) 1451 return PTR_ERR(edesc); 1452 1453 desc = edesc->hw_desc; 1454 1455 init_chachapoly_job(req, edesc, all_contig, encrypt); 1456 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1457 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1458 1); 1459 1460 return aead_enqueue_req(jrdev, req); 1461 } 1462 1463 static int chachapoly_encrypt(struct aead_request *req) 1464 { 1465 return chachapoly_crypt(req, true); 1466 } 1467 1468 static int chachapoly_decrypt(struct aead_request *req) 1469 { 1470 return chachapoly_crypt(req, false); 1471 } 1472 1473 static inline int aead_crypt(struct aead_request *req, bool encrypt) 1474 { 1475 struct aead_edesc *edesc; 1476 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1477 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1478 struct device *jrdev = ctx->jrdev; 1479 bool all_contig; 1480 1481 /* allocate extended descriptor */ 1482 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1483 &all_contig, encrypt); 1484 if (IS_ERR(edesc)) 1485 return PTR_ERR(edesc); 1486 1487 /* Create and submit job descriptor */ 1488 init_authenc_job(req, edesc, all_contig, encrypt); 1489 1490 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1491 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1492 desc_bytes(edesc->hw_desc), 1); 1493 1494 return aead_enqueue_req(jrdev, req); 1495 } 1496 1497 static int aead_encrypt(struct aead_request *req) 1498 { 1499 return aead_crypt(req, true); 1500 } 1501 1502 static int aead_decrypt(struct aead_request *req) 1503 { 1504 return aead_crypt(req, false); 1505 } 1506 1507 static int aead_do_one_req(struct crypto_engine *engine, void *areq) 1508 { 1509 struct aead_request *req = aead_request_cast(areq); 1510 struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1511 struct caam_aead_req_ctx *rctx = aead_request_ctx(req); 1512 u32 *desc = rctx->edesc->hw_desc; 1513 int ret; 1514 1515 rctx->edesc->bklog = true; 1516 1517 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); 1518 1519 if (ret != -EINPROGRESS) { 1520 aead_unmap(ctx->jrdev, rctx->edesc, req); 1521 kfree(rctx->edesc); 1522 } else { 1523 ret = 0; 1524 } 1525 1526 return ret; 1527 } 1528 1529 static inline int gcm_crypt(struct aead_request *req, bool encrypt) 1530 { 1531 struct aead_edesc *edesc; 1532 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1533 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1534 struct device *jrdev = ctx->jrdev; 1535 bool all_contig; 1536 1537 /* allocate extended descriptor */ 1538 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, 1539 encrypt); 1540 if (IS_ERR(edesc)) 1541 return PTR_ERR(edesc); 1542 1543 /* Create and submit job descriptor */ 1544 init_gcm_job(req, edesc, all_contig, encrypt); 1545 1546 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1547 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1548 desc_bytes(edesc->hw_desc), 1); 1549 1550 return aead_enqueue_req(jrdev, req); 1551 } 1552 1553 static int gcm_encrypt(struct aead_request *req) 1554 { 1555 return gcm_crypt(req, true); 1556 } 1557 1558 static int gcm_decrypt(struct aead_request *req) 1559 { 1560 return gcm_crypt(req, false); 1561 } 1562 1563 static int ipsec_gcm_encrypt(struct aead_request *req) 1564 { 1565 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); 1566 } 1567 1568 static int ipsec_gcm_decrypt(struct aead_request *req) 1569 { 1570 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); 1571 } 1572 1573 /* 1574 * allocate and map the skcipher extended descriptor for skcipher 1575 */ 1576 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1577 int desc_bytes) 1578 { 1579 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1580 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1581 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1582 struct device *jrdev = ctx->jrdev; 1583 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1584 GFP_KERNEL : GFP_ATOMIC; 1585 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1586 struct skcipher_edesc *edesc; 1587 dma_addr_t iv_dma = 0; 1588 u8 *iv; 1589 int ivsize = crypto_skcipher_ivsize(skcipher); 1590 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1591 1592 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1593 if (unlikely(src_nents < 0)) { 1594 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1595 req->cryptlen); 1596 return ERR_PTR(src_nents); 1597 } 1598 1599 if (req->dst != req->src) { 1600 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1601 if (unlikely(dst_nents < 0)) { 1602 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1603 req->cryptlen); 1604 return ERR_PTR(dst_nents); 1605 } 1606 } 1607 1608 if (likely(req->src == req->dst)) { 1609 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1610 DMA_BIDIRECTIONAL); 1611 if (unlikely(!mapped_src_nents)) { 1612 dev_err(jrdev, "unable to map source\n"); 1613 return ERR_PTR(-ENOMEM); 1614 } 1615 } else { 1616 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1617 DMA_TO_DEVICE); 1618 if (unlikely(!mapped_src_nents)) { 1619 dev_err(jrdev, "unable to map source\n"); 1620 return ERR_PTR(-ENOMEM); 1621 } 1622 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1623 DMA_FROM_DEVICE); 1624 if (unlikely(!mapped_dst_nents)) { 1625 dev_err(jrdev, "unable to map destination\n"); 1626 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1627 return ERR_PTR(-ENOMEM); 1628 } 1629 } 1630 1631 if (!ivsize && mapped_src_nents == 1) 1632 sec4_sg_ents = 0; // no need for an input hw s/g table 1633 else 1634 sec4_sg_ents = mapped_src_nents + !!ivsize; 1635 dst_sg_idx = sec4_sg_ents; 1636 1637 /* 1638 * Input, output HW S/G tables: [IV, src][dst, IV] 1639 * IV entries point to the same buffer 1640 * If src == dst, S/G entries are reused (S/G tables overlap) 1641 * 1642 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1643 * the end of the table by allocating more S/G entries. Logic: 1644 * if (output S/G) 1645 * pad output S/G, if needed 1646 * else if (input S/G) ... 1647 * pad input S/G, if needed 1648 */ 1649 if (ivsize || mapped_dst_nents > 1) { 1650 if (req->src == req->dst) 1651 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents); 1652 else 1653 sec4_sg_ents += pad_sg_nents(mapped_dst_nents + 1654 !!ivsize); 1655 } else { 1656 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1657 } 1658 1659 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1660 1661 /* 1662 * allocate space for base edesc and hw desc commands, link tables, IV 1663 */ 1664 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1665 GFP_DMA | flags); 1666 if (!edesc) { 1667 dev_err(jrdev, "could not allocate extended descriptor\n"); 1668 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1669 0, 0, 0); 1670 return ERR_PTR(-ENOMEM); 1671 } 1672 1673 edesc->src_nents = src_nents; 1674 edesc->dst_nents = dst_nents; 1675 edesc->mapped_src_nents = mapped_src_nents; 1676 edesc->mapped_dst_nents = mapped_dst_nents; 1677 edesc->sec4_sg_bytes = sec4_sg_bytes; 1678 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1679 desc_bytes); 1680 rctx->edesc = edesc; 1681 1682 /* Make sure IV is located in a DMAable area */ 1683 if (ivsize) { 1684 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; 1685 memcpy(iv, req->iv, ivsize); 1686 1687 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1688 if (dma_mapping_error(jrdev, iv_dma)) { 1689 dev_err(jrdev, "unable to map IV\n"); 1690 caam_unmap(jrdev, req->src, req->dst, src_nents, 1691 dst_nents, 0, 0, 0, 0); 1692 kfree(edesc); 1693 return ERR_PTR(-ENOMEM); 1694 } 1695 1696 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1697 } 1698 if (dst_sg_idx) 1699 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + 1700 !!ivsize, 0); 1701 1702 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) 1703 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + 1704 dst_sg_idx, 0); 1705 1706 if (ivsize) 1707 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + 1708 mapped_dst_nents, iv_dma, ivsize, 0); 1709 1710 if (ivsize || mapped_dst_nents > 1) 1711 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + 1712 mapped_dst_nents - 1 + !!ivsize); 1713 1714 if (sec4_sg_bytes) { 1715 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1716 sec4_sg_bytes, 1717 DMA_TO_DEVICE); 1718 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1719 dev_err(jrdev, "unable to map S/G table\n"); 1720 caam_unmap(jrdev, req->src, req->dst, src_nents, 1721 dst_nents, iv_dma, ivsize, 0, 0); 1722 kfree(edesc); 1723 return ERR_PTR(-ENOMEM); 1724 } 1725 } 1726 1727 edesc->iv_dma = iv_dma; 1728 1729 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1730 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1731 sec4_sg_bytes, 1); 1732 1733 return edesc; 1734 } 1735 1736 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq) 1737 { 1738 struct skcipher_request *req = skcipher_request_cast(areq); 1739 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 1740 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req); 1741 u32 *desc = rctx->edesc->hw_desc; 1742 int ret; 1743 1744 rctx->edesc->bklog = true; 1745 1746 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); 1747 1748 if (ret != -EINPROGRESS) { 1749 skcipher_unmap(ctx->jrdev, rctx->edesc, req); 1750 kfree(rctx->edesc); 1751 } else { 1752 ret = 0; 1753 } 1754 1755 return ret; 1756 } 1757 1758 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt) 1759 { 1760 struct skcipher_edesc *edesc; 1761 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1762 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1763 struct device *jrdev = ctx->jrdev; 1764 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); 1765 u32 *desc; 1766 int ret = 0; 1767 1768 if (!req->cryptlen) 1769 return 0; 1770 1771 /* allocate extended descriptor */ 1772 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1773 if (IS_ERR(edesc)) 1774 return PTR_ERR(edesc); 1775 1776 /* Create and submit job descriptor*/ 1777 init_skcipher_job(req, edesc, encrypt); 1778 1779 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1780 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1781 desc_bytes(edesc->hw_desc), 1); 1782 1783 desc = edesc->hw_desc; 1784 /* 1785 * Only the backlog request are sent to crypto-engine since the others 1786 * can be handled by CAAM, if free, especially since JR has up to 1024 1787 * entries (more than the 10 entries from crypto-engine). 1788 */ 1789 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG) 1790 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine, 1791 req); 1792 else 1793 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); 1794 1795 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) { 1796 skcipher_unmap(jrdev, edesc, req); 1797 kfree(edesc); 1798 } 1799 1800 return ret; 1801 } 1802 1803 static int skcipher_encrypt(struct skcipher_request *req) 1804 { 1805 return skcipher_crypt(req, true); 1806 } 1807 1808 static int skcipher_decrypt(struct skcipher_request *req) 1809 { 1810 return skcipher_crypt(req, false); 1811 } 1812 1813 static struct caam_skcipher_alg driver_algs[] = { 1814 { 1815 .skcipher = { 1816 .base = { 1817 .cra_name = "cbc(aes)", 1818 .cra_driver_name = "cbc-aes-caam", 1819 .cra_blocksize = AES_BLOCK_SIZE, 1820 }, 1821 .setkey = aes_skcipher_setkey, 1822 .encrypt = skcipher_encrypt, 1823 .decrypt = skcipher_decrypt, 1824 .min_keysize = AES_MIN_KEY_SIZE, 1825 .max_keysize = AES_MAX_KEY_SIZE, 1826 .ivsize = AES_BLOCK_SIZE, 1827 }, 1828 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1829 }, 1830 { 1831 .skcipher = { 1832 .base = { 1833 .cra_name = "cbc(des3_ede)", 1834 .cra_driver_name = "cbc-3des-caam", 1835 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1836 }, 1837 .setkey = des3_skcipher_setkey, 1838 .encrypt = skcipher_encrypt, 1839 .decrypt = skcipher_decrypt, 1840 .min_keysize = DES3_EDE_KEY_SIZE, 1841 .max_keysize = DES3_EDE_KEY_SIZE, 1842 .ivsize = DES3_EDE_BLOCK_SIZE, 1843 }, 1844 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1845 }, 1846 { 1847 .skcipher = { 1848 .base = { 1849 .cra_name = "cbc(des)", 1850 .cra_driver_name = "cbc-des-caam", 1851 .cra_blocksize = DES_BLOCK_SIZE, 1852 }, 1853 .setkey = des_skcipher_setkey, 1854 .encrypt = skcipher_encrypt, 1855 .decrypt = skcipher_decrypt, 1856 .min_keysize = DES_KEY_SIZE, 1857 .max_keysize = DES_KEY_SIZE, 1858 .ivsize = DES_BLOCK_SIZE, 1859 }, 1860 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1861 }, 1862 { 1863 .skcipher = { 1864 .base = { 1865 .cra_name = "ctr(aes)", 1866 .cra_driver_name = "ctr-aes-caam", 1867 .cra_blocksize = 1, 1868 }, 1869 .setkey = ctr_skcipher_setkey, 1870 .encrypt = skcipher_encrypt, 1871 .decrypt = skcipher_decrypt, 1872 .min_keysize = AES_MIN_KEY_SIZE, 1873 .max_keysize = AES_MAX_KEY_SIZE, 1874 .ivsize = AES_BLOCK_SIZE, 1875 .chunksize = AES_BLOCK_SIZE, 1876 }, 1877 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1878 OP_ALG_AAI_CTR_MOD128, 1879 }, 1880 { 1881 .skcipher = { 1882 .base = { 1883 .cra_name = "rfc3686(ctr(aes))", 1884 .cra_driver_name = "rfc3686-ctr-aes-caam", 1885 .cra_blocksize = 1, 1886 }, 1887 .setkey = rfc3686_skcipher_setkey, 1888 .encrypt = skcipher_encrypt, 1889 .decrypt = skcipher_decrypt, 1890 .min_keysize = AES_MIN_KEY_SIZE + 1891 CTR_RFC3686_NONCE_SIZE, 1892 .max_keysize = AES_MAX_KEY_SIZE + 1893 CTR_RFC3686_NONCE_SIZE, 1894 .ivsize = CTR_RFC3686_IV_SIZE, 1895 .chunksize = AES_BLOCK_SIZE, 1896 }, 1897 .caam = { 1898 .class1_alg_type = OP_ALG_ALGSEL_AES | 1899 OP_ALG_AAI_CTR_MOD128, 1900 .rfc3686 = true, 1901 }, 1902 }, 1903 { 1904 .skcipher = { 1905 .base = { 1906 .cra_name = "xts(aes)", 1907 .cra_driver_name = "xts-aes-caam", 1908 .cra_blocksize = AES_BLOCK_SIZE, 1909 }, 1910 .setkey = xts_skcipher_setkey, 1911 .encrypt = skcipher_encrypt, 1912 .decrypt = skcipher_decrypt, 1913 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1914 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1915 .ivsize = AES_BLOCK_SIZE, 1916 }, 1917 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1918 }, 1919 { 1920 .skcipher = { 1921 .base = { 1922 .cra_name = "ecb(des)", 1923 .cra_driver_name = "ecb-des-caam", 1924 .cra_blocksize = DES_BLOCK_SIZE, 1925 }, 1926 .setkey = des_skcipher_setkey, 1927 .encrypt = skcipher_encrypt, 1928 .decrypt = skcipher_decrypt, 1929 .min_keysize = DES_KEY_SIZE, 1930 .max_keysize = DES_KEY_SIZE, 1931 }, 1932 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 1933 }, 1934 { 1935 .skcipher = { 1936 .base = { 1937 .cra_name = "ecb(aes)", 1938 .cra_driver_name = "ecb-aes-caam", 1939 .cra_blocksize = AES_BLOCK_SIZE, 1940 }, 1941 .setkey = aes_skcipher_setkey, 1942 .encrypt = skcipher_encrypt, 1943 .decrypt = skcipher_decrypt, 1944 .min_keysize = AES_MIN_KEY_SIZE, 1945 .max_keysize = AES_MAX_KEY_SIZE, 1946 }, 1947 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 1948 }, 1949 { 1950 .skcipher = { 1951 .base = { 1952 .cra_name = "ecb(des3_ede)", 1953 .cra_driver_name = "ecb-des3-caam", 1954 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1955 }, 1956 .setkey = des3_skcipher_setkey, 1957 .encrypt = skcipher_encrypt, 1958 .decrypt = skcipher_decrypt, 1959 .min_keysize = DES3_EDE_KEY_SIZE, 1960 .max_keysize = DES3_EDE_KEY_SIZE, 1961 }, 1962 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 1963 }, 1964 }; 1965 1966 static struct caam_aead_alg driver_aeads[] = { 1967 { 1968 .aead = { 1969 .base = { 1970 .cra_name = "rfc4106(gcm(aes))", 1971 .cra_driver_name = "rfc4106-gcm-aes-caam", 1972 .cra_blocksize = 1, 1973 }, 1974 .setkey = rfc4106_setkey, 1975 .setauthsize = rfc4106_setauthsize, 1976 .encrypt = ipsec_gcm_encrypt, 1977 .decrypt = ipsec_gcm_decrypt, 1978 .ivsize = GCM_RFC4106_IV_SIZE, 1979 .maxauthsize = AES_BLOCK_SIZE, 1980 }, 1981 .caam = { 1982 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 1983 .nodkp = true, 1984 }, 1985 }, 1986 { 1987 .aead = { 1988 .base = { 1989 .cra_name = "rfc4543(gcm(aes))", 1990 .cra_driver_name = "rfc4543-gcm-aes-caam", 1991 .cra_blocksize = 1, 1992 }, 1993 .setkey = rfc4543_setkey, 1994 .setauthsize = rfc4543_setauthsize, 1995 .encrypt = ipsec_gcm_encrypt, 1996 .decrypt = ipsec_gcm_decrypt, 1997 .ivsize = GCM_RFC4543_IV_SIZE, 1998 .maxauthsize = AES_BLOCK_SIZE, 1999 }, 2000 .caam = { 2001 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2002 .nodkp = true, 2003 }, 2004 }, 2005 /* Galois Counter Mode */ 2006 { 2007 .aead = { 2008 .base = { 2009 .cra_name = "gcm(aes)", 2010 .cra_driver_name = "gcm-aes-caam", 2011 .cra_blocksize = 1, 2012 }, 2013 .setkey = gcm_setkey, 2014 .setauthsize = gcm_setauthsize, 2015 .encrypt = gcm_encrypt, 2016 .decrypt = gcm_decrypt, 2017 .ivsize = GCM_AES_IV_SIZE, 2018 .maxauthsize = AES_BLOCK_SIZE, 2019 }, 2020 .caam = { 2021 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2022 .nodkp = true, 2023 }, 2024 }, 2025 /* single-pass ipsec_esp descriptor */ 2026 { 2027 .aead = { 2028 .base = { 2029 .cra_name = "authenc(hmac(md5)," 2030 "ecb(cipher_null))", 2031 .cra_driver_name = "authenc-hmac-md5-" 2032 "ecb-cipher_null-caam", 2033 .cra_blocksize = NULL_BLOCK_SIZE, 2034 }, 2035 .setkey = aead_setkey, 2036 .setauthsize = aead_setauthsize, 2037 .encrypt = aead_encrypt, 2038 .decrypt = aead_decrypt, 2039 .ivsize = NULL_IV_SIZE, 2040 .maxauthsize = MD5_DIGEST_SIZE, 2041 }, 2042 .caam = { 2043 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2044 OP_ALG_AAI_HMAC_PRECOMP, 2045 }, 2046 }, 2047 { 2048 .aead = { 2049 .base = { 2050 .cra_name = "authenc(hmac(sha1)," 2051 "ecb(cipher_null))", 2052 .cra_driver_name = "authenc-hmac-sha1-" 2053 "ecb-cipher_null-caam", 2054 .cra_blocksize = NULL_BLOCK_SIZE, 2055 }, 2056 .setkey = aead_setkey, 2057 .setauthsize = aead_setauthsize, 2058 .encrypt = aead_encrypt, 2059 .decrypt = aead_decrypt, 2060 .ivsize = NULL_IV_SIZE, 2061 .maxauthsize = SHA1_DIGEST_SIZE, 2062 }, 2063 .caam = { 2064 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2065 OP_ALG_AAI_HMAC_PRECOMP, 2066 }, 2067 }, 2068 { 2069 .aead = { 2070 .base = { 2071 .cra_name = "authenc(hmac(sha224)," 2072 "ecb(cipher_null))", 2073 .cra_driver_name = "authenc-hmac-sha224-" 2074 "ecb-cipher_null-caam", 2075 .cra_blocksize = NULL_BLOCK_SIZE, 2076 }, 2077 .setkey = aead_setkey, 2078 .setauthsize = aead_setauthsize, 2079 .encrypt = aead_encrypt, 2080 .decrypt = aead_decrypt, 2081 .ivsize = NULL_IV_SIZE, 2082 .maxauthsize = SHA224_DIGEST_SIZE, 2083 }, 2084 .caam = { 2085 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2086 OP_ALG_AAI_HMAC_PRECOMP, 2087 }, 2088 }, 2089 { 2090 .aead = { 2091 .base = { 2092 .cra_name = "authenc(hmac(sha256)," 2093 "ecb(cipher_null))", 2094 .cra_driver_name = "authenc-hmac-sha256-" 2095 "ecb-cipher_null-caam", 2096 .cra_blocksize = NULL_BLOCK_SIZE, 2097 }, 2098 .setkey = aead_setkey, 2099 .setauthsize = aead_setauthsize, 2100 .encrypt = aead_encrypt, 2101 .decrypt = aead_decrypt, 2102 .ivsize = NULL_IV_SIZE, 2103 .maxauthsize = SHA256_DIGEST_SIZE, 2104 }, 2105 .caam = { 2106 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2107 OP_ALG_AAI_HMAC_PRECOMP, 2108 }, 2109 }, 2110 { 2111 .aead = { 2112 .base = { 2113 .cra_name = "authenc(hmac(sha384)," 2114 "ecb(cipher_null))", 2115 .cra_driver_name = "authenc-hmac-sha384-" 2116 "ecb-cipher_null-caam", 2117 .cra_blocksize = NULL_BLOCK_SIZE, 2118 }, 2119 .setkey = aead_setkey, 2120 .setauthsize = aead_setauthsize, 2121 .encrypt = aead_encrypt, 2122 .decrypt = aead_decrypt, 2123 .ivsize = NULL_IV_SIZE, 2124 .maxauthsize = SHA384_DIGEST_SIZE, 2125 }, 2126 .caam = { 2127 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2128 OP_ALG_AAI_HMAC_PRECOMP, 2129 }, 2130 }, 2131 { 2132 .aead = { 2133 .base = { 2134 .cra_name = "authenc(hmac(sha512)," 2135 "ecb(cipher_null))", 2136 .cra_driver_name = "authenc-hmac-sha512-" 2137 "ecb-cipher_null-caam", 2138 .cra_blocksize = NULL_BLOCK_SIZE, 2139 }, 2140 .setkey = aead_setkey, 2141 .setauthsize = aead_setauthsize, 2142 .encrypt = aead_encrypt, 2143 .decrypt = aead_decrypt, 2144 .ivsize = NULL_IV_SIZE, 2145 .maxauthsize = SHA512_DIGEST_SIZE, 2146 }, 2147 .caam = { 2148 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2149 OP_ALG_AAI_HMAC_PRECOMP, 2150 }, 2151 }, 2152 { 2153 .aead = { 2154 .base = { 2155 .cra_name = "authenc(hmac(md5),cbc(aes))", 2156 .cra_driver_name = "authenc-hmac-md5-" 2157 "cbc-aes-caam", 2158 .cra_blocksize = AES_BLOCK_SIZE, 2159 }, 2160 .setkey = aead_setkey, 2161 .setauthsize = aead_setauthsize, 2162 .encrypt = aead_encrypt, 2163 .decrypt = aead_decrypt, 2164 .ivsize = AES_BLOCK_SIZE, 2165 .maxauthsize = MD5_DIGEST_SIZE, 2166 }, 2167 .caam = { 2168 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2169 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2170 OP_ALG_AAI_HMAC_PRECOMP, 2171 }, 2172 }, 2173 { 2174 .aead = { 2175 .base = { 2176 .cra_name = "echainiv(authenc(hmac(md5)," 2177 "cbc(aes)))", 2178 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2179 "cbc-aes-caam", 2180 .cra_blocksize = AES_BLOCK_SIZE, 2181 }, 2182 .setkey = aead_setkey, 2183 .setauthsize = aead_setauthsize, 2184 .encrypt = aead_encrypt, 2185 .decrypt = aead_decrypt, 2186 .ivsize = AES_BLOCK_SIZE, 2187 .maxauthsize = MD5_DIGEST_SIZE, 2188 }, 2189 .caam = { 2190 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2191 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2192 OP_ALG_AAI_HMAC_PRECOMP, 2193 .geniv = true, 2194 }, 2195 }, 2196 { 2197 .aead = { 2198 .base = { 2199 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2200 .cra_driver_name = "authenc-hmac-sha1-" 2201 "cbc-aes-caam", 2202 .cra_blocksize = AES_BLOCK_SIZE, 2203 }, 2204 .setkey = aead_setkey, 2205 .setauthsize = aead_setauthsize, 2206 .encrypt = aead_encrypt, 2207 .decrypt = aead_decrypt, 2208 .ivsize = AES_BLOCK_SIZE, 2209 .maxauthsize = SHA1_DIGEST_SIZE, 2210 }, 2211 .caam = { 2212 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2213 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2214 OP_ALG_AAI_HMAC_PRECOMP, 2215 }, 2216 }, 2217 { 2218 .aead = { 2219 .base = { 2220 .cra_name = "echainiv(authenc(hmac(sha1)," 2221 "cbc(aes)))", 2222 .cra_driver_name = "echainiv-authenc-" 2223 "hmac-sha1-cbc-aes-caam", 2224 .cra_blocksize = AES_BLOCK_SIZE, 2225 }, 2226 .setkey = aead_setkey, 2227 .setauthsize = aead_setauthsize, 2228 .encrypt = aead_encrypt, 2229 .decrypt = aead_decrypt, 2230 .ivsize = AES_BLOCK_SIZE, 2231 .maxauthsize = SHA1_DIGEST_SIZE, 2232 }, 2233 .caam = { 2234 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2235 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2236 OP_ALG_AAI_HMAC_PRECOMP, 2237 .geniv = true, 2238 }, 2239 }, 2240 { 2241 .aead = { 2242 .base = { 2243 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2244 .cra_driver_name = "authenc-hmac-sha224-" 2245 "cbc-aes-caam", 2246 .cra_blocksize = AES_BLOCK_SIZE, 2247 }, 2248 .setkey = aead_setkey, 2249 .setauthsize = aead_setauthsize, 2250 .encrypt = aead_encrypt, 2251 .decrypt = aead_decrypt, 2252 .ivsize = AES_BLOCK_SIZE, 2253 .maxauthsize = SHA224_DIGEST_SIZE, 2254 }, 2255 .caam = { 2256 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2257 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2258 OP_ALG_AAI_HMAC_PRECOMP, 2259 }, 2260 }, 2261 { 2262 .aead = { 2263 .base = { 2264 .cra_name = "echainiv(authenc(hmac(sha224)," 2265 "cbc(aes)))", 2266 .cra_driver_name = "echainiv-authenc-" 2267 "hmac-sha224-cbc-aes-caam", 2268 .cra_blocksize = AES_BLOCK_SIZE, 2269 }, 2270 .setkey = aead_setkey, 2271 .setauthsize = aead_setauthsize, 2272 .encrypt = aead_encrypt, 2273 .decrypt = aead_decrypt, 2274 .ivsize = AES_BLOCK_SIZE, 2275 .maxauthsize = SHA224_DIGEST_SIZE, 2276 }, 2277 .caam = { 2278 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2279 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2280 OP_ALG_AAI_HMAC_PRECOMP, 2281 .geniv = true, 2282 }, 2283 }, 2284 { 2285 .aead = { 2286 .base = { 2287 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2288 .cra_driver_name = "authenc-hmac-sha256-" 2289 "cbc-aes-caam", 2290 .cra_blocksize = AES_BLOCK_SIZE, 2291 }, 2292 .setkey = aead_setkey, 2293 .setauthsize = aead_setauthsize, 2294 .encrypt = aead_encrypt, 2295 .decrypt = aead_decrypt, 2296 .ivsize = AES_BLOCK_SIZE, 2297 .maxauthsize = SHA256_DIGEST_SIZE, 2298 }, 2299 .caam = { 2300 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2301 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2302 OP_ALG_AAI_HMAC_PRECOMP, 2303 }, 2304 }, 2305 { 2306 .aead = { 2307 .base = { 2308 .cra_name = "echainiv(authenc(hmac(sha256)," 2309 "cbc(aes)))", 2310 .cra_driver_name = "echainiv-authenc-" 2311 "hmac-sha256-cbc-aes-caam", 2312 .cra_blocksize = AES_BLOCK_SIZE, 2313 }, 2314 .setkey = aead_setkey, 2315 .setauthsize = aead_setauthsize, 2316 .encrypt = aead_encrypt, 2317 .decrypt = aead_decrypt, 2318 .ivsize = AES_BLOCK_SIZE, 2319 .maxauthsize = SHA256_DIGEST_SIZE, 2320 }, 2321 .caam = { 2322 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2323 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2324 OP_ALG_AAI_HMAC_PRECOMP, 2325 .geniv = true, 2326 }, 2327 }, 2328 { 2329 .aead = { 2330 .base = { 2331 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2332 .cra_driver_name = "authenc-hmac-sha384-" 2333 "cbc-aes-caam", 2334 .cra_blocksize = AES_BLOCK_SIZE, 2335 }, 2336 .setkey = aead_setkey, 2337 .setauthsize = aead_setauthsize, 2338 .encrypt = aead_encrypt, 2339 .decrypt = aead_decrypt, 2340 .ivsize = AES_BLOCK_SIZE, 2341 .maxauthsize = SHA384_DIGEST_SIZE, 2342 }, 2343 .caam = { 2344 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2345 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2346 OP_ALG_AAI_HMAC_PRECOMP, 2347 }, 2348 }, 2349 { 2350 .aead = { 2351 .base = { 2352 .cra_name = "echainiv(authenc(hmac(sha384)," 2353 "cbc(aes)))", 2354 .cra_driver_name = "echainiv-authenc-" 2355 "hmac-sha384-cbc-aes-caam", 2356 .cra_blocksize = AES_BLOCK_SIZE, 2357 }, 2358 .setkey = aead_setkey, 2359 .setauthsize = aead_setauthsize, 2360 .encrypt = aead_encrypt, 2361 .decrypt = aead_decrypt, 2362 .ivsize = AES_BLOCK_SIZE, 2363 .maxauthsize = SHA384_DIGEST_SIZE, 2364 }, 2365 .caam = { 2366 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2367 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2368 OP_ALG_AAI_HMAC_PRECOMP, 2369 .geniv = true, 2370 }, 2371 }, 2372 { 2373 .aead = { 2374 .base = { 2375 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2376 .cra_driver_name = "authenc-hmac-sha512-" 2377 "cbc-aes-caam", 2378 .cra_blocksize = AES_BLOCK_SIZE, 2379 }, 2380 .setkey = aead_setkey, 2381 .setauthsize = aead_setauthsize, 2382 .encrypt = aead_encrypt, 2383 .decrypt = aead_decrypt, 2384 .ivsize = AES_BLOCK_SIZE, 2385 .maxauthsize = SHA512_DIGEST_SIZE, 2386 }, 2387 .caam = { 2388 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2389 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2390 OP_ALG_AAI_HMAC_PRECOMP, 2391 }, 2392 }, 2393 { 2394 .aead = { 2395 .base = { 2396 .cra_name = "echainiv(authenc(hmac(sha512)," 2397 "cbc(aes)))", 2398 .cra_driver_name = "echainiv-authenc-" 2399 "hmac-sha512-cbc-aes-caam", 2400 .cra_blocksize = AES_BLOCK_SIZE, 2401 }, 2402 .setkey = aead_setkey, 2403 .setauthsize = aead_setauthsize, 2404 .encrypt = aead_encrypt, 2405 .decrypt = aead_decrypt, 2406 .ivsize = AES_BLOCK_SIZE, 2407 .maxauthsize = SHA512_DIGEST_SIZE, 2408 }, 2409 .caam = { 2410 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2411 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2412 OP_ALG_AAI_HMAC_PRECOMP, 2413 .geniv = true, 2414 }, 2415 }, 2416 { 2417 .aead = { 2418 .base = { 2419 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2420 .cra_driver_name = "authenc-hmac-md5-" 2421 "cbc-des3_ede-caam", 2422 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2423 }, 2424 .setkey = des3_aead_setkey, 2425 .setauthsize = aead_setauthsize, 2426 .encrypt = aead_encrypt, 2427 .decrypt = aead_decrypt, 2428 .ivsize = DES3_EDE_BLOCK_SIZE, 2429 .maxauthsize = MD5_DIGEST_SIZE, 2430 }, 2431 .caam = { 2432 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2433 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2434 OP_ALG_AAI_HMAC_PRECOMP, 2435 } 2436 }, 2437 { 2438 .aead = { 2439 .base = { 2440 .cra_name = "echainiv(authenc(hmac(md5)," 2441 "cbc(des3_ede)))", 2442 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2443 "cbc-des3_ede-caam", 2444 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2445 }, 2446 .setkey = des3_aead_setkey, 2447 .setauthsize = aead_setauthsize, 2448 .encrypt = aead_encrypt, 2449 .decrypt = aead_decrypt, 2450 .ivsize = DES3_EDE_BLOCK_SIZE, 2451 .maxauthsize = MD5_DIGEST_SIZE, 2452 }, 2453 .caam = { 2454 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2455 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2456 OP_ALG_AAI_HMAC_PRECOMP, 2457 .geniv = true, 2458 } 2459 }, 2460 { 2461 .aead = { 2462 .base = { 2463 .cra_name = "authenc(hmac(sha1)," 2464 "cbc(des3_ede))", 2465 .cra_driver_name = "authenc-hmac-sha1-" 2466 "cbc-des3_ede-caam", 2467 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2468 }, 2469 .setkey = des3_aead_setkey, 2470 .setauthsize = aead_setauthsize, 2471 .encrypt = aead_encrypt, 2472 .decrypt = aead_decrypt, 2473 .ivsize = DES3_EDE_BLOCK_SIZE, 2474 .maxauthsize = SHA1_DIGEST_SIZE, 2475 }, 2476 .caam = { 2477 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2478 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2479 OP_ALG_AAI_HMAC_PRECOMP, 2480 }, 2481 }, 2482 { 2483 .aead = { 2484 .base = { 2485 .cra_name = "echainiv(authenc(hmac(sha1)," 2486 "cbc(des3_ede)))", 2487 .cra_driver_name = "echainiv-authenc-" 2488 "hmac-sha1-" 2489 "cbc-des3_ede-caam", 2490 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2491 }, 2492 .setkey = des3_aead_setkey, 2493 .setauthsize = aead_setauthsize, 2494 .encrypt = aead_encrypt, 2495 .decrypt = aead_decrypt, 2496 .ivsize = DES3_EDE_BLOCK_SIZE, 2497 .maxauthsize = SHA1_DIGEST_SIZE, 2498 }, 2499 .caam = { 2500 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2501 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2502 OP_ALG_AAI_HMAC_PRECOMP, 2503 .geniv = true, 2504 }, 2505 }, 2506 { 2507 .aead = { 2508 .base = { 2509 .cra_name = "authenc(hmac(sha224)," 2510 "cbc(des3_ede))", 2511 .cra_driver_name = "authenc-hmac-sha224-" 2512 "cbc-des3_ede-caam", 2513 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2514 }, 2515 .setkey = des3_aead_setkey, 2516 .setauthsize = aead_setauthsize, 2517 .encrypt = aead_encrypt, 2518 .decrypt = aead_decrypt, 2519 .ivsize = DES3_EDE_BLOCK_SIZE, 2520 .maxauthsize = SHA224_DIGEST_SIZE, 2521 }, 2522 .caam = { 2523 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2524 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2525 OP_ALG_AAI_HMAC_PRECOMP, 2526 }, 2527 }, 2528 { 2529 .aead = { 2530 .base = { 2531 .cra_name = "echainiv(authenc(hmac(sha224)," 2532 "cbc(des3_ede)))", 2533 .cra_driver_name = "echainiv-authenc-" 2534 "hmac-sha224-" 2535 "cbc-des3_ede-caam", 2536 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2537 }, 2538 .setkey = des3_aead_setkey, 2539 .setauthsize = aead_setauthsize, 2540 .encrypt = aead_encrypt, 2541 .decrypt = aead_decrypt, 2542 .ivsize = DES3_EDE_BLOCK_SIZE, 2543 .maxauthsize = SHA224_DIGEST_SIZE, 2544 }, 2545 .caam = { 2546 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2547 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2548 OP_ALG_AAI_HMAC_PRECOMP, 2549 .geniv = true, 2550 }, 2551 }, 2552 { 2553 .aead = { 2554 .base = { 2555 .cra_name = "authenc(hmac(sha256)," 2556 "cbc(des3_ede))", 2557 .cra_driver_name = "authenc-hmac-sha256-" 2558 "cbc-des3_ede-caam", 2559 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2560 }, 2561 .setkey = des3_aead_setkey, 2562 .setauthsize = aead_setauthsize, 2563 .encrypt = aead_encrypt, 2564 .decrypt = aead_decrypt, 2565 .ivsize = DES3_EDE_BLOCK_SIZE, 2566 .maxauthsize = SHA256_DIGEST_SIZE, 2567 }, 2568 .caam = { 2569 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2570 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2571 OP_ALG_AAI_HMAC_PRECOMP, 2572 }, 2573 }, 2574 { 2575 .aead = { 2576 .base = { 2577 .cra_name = "echainiv(authenc(hmac(sha256)," 2578 "cbc(des3_ede)))", 2579 .cra_driver_name = "echainiv-authenc-" 2580 "hmac-sha256-" 2581 "cbc-des3_ede-caam", 2582 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2583 }, 2584 .setkey = des3_aead_setkey, 2585 .setauthsize = aead_setauthsize, 2586 .encrypt = aead_encrypt, 2587 .decrypt = aead_decrypt, 2588 .ivsize = DES3_EDE_BLOCK_SIZE, 2589 .maxauthsize = SHA256_DIGEST_SIZE, 2590 }, 2591 .caam = { 2592 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2593 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2594 OP_ALG_AAI_HMAC_PRECOMP, 2595 .geniv = true, 2596 }, 2597 }, 2598 { 2599 .aead = { 2600 .base = { 2601 .cra_name = "authenc(hmac(sha384)," 2602 "cbc(des3_ede))", 2603 .cra_driver_name = "authenc-hmac-sha384-" 2604 "cbc-des3_ede-caam", 2605 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2606 }, 2607 .setkey = des3_aead_setkey, 2608 .setauthsize = aead_setauthsize, 2609 .encrypt = aead_encrypt, 2610 .decrypt = aead_decrypt, 2611 .ivsize = DES3_EDE_BLOCK_SIZE, 2612 .maxauthsize = SHA384_DIGEST_SIZE, 2613 }, 2614 .caam = { 2615 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2616 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2617 OP_ALG_AAI_HMAC_PRECOMP, 2618 }, 2619 }, 2620 { 2621 .aead = { 2622 .base = { 2623 .cra_name = "echainiv(authenc(hmac(sha384)," 2624 "cbc(des3_ede)))", 2625 .cra_driver_name = "echainiv-authenc-" 2626 "hmac-sha384-" 2627 "cbc-des3_ede-caam", 2628 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2629 }, 2630 .setkey = des3_aead_setkey, 2631 .setauthsize = aead_setauthsize, 2632 .encrypt = aead_encrypt, 2633 .decrypt = aead_decrypt, 2634 .ivsize = DES3_EDE_BLOCK_SIZE, 2635 .maxauthsize = SHA384_DIGEST_SIZE, 2636 }, 2637 .caam = { 2638 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2639 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2640 OP_ALG_AAI_HMAC_PRECOMP, 2641 .geniv = true, 2642 }, 2643 }, 2644 { 2645 .aead = { 2646 .base = { 2647 .cra_name = "authenc(hmac(sha512)," 2648 "cbc(des3_ede))", 2649 .cra_driver_name = "authenc-hmac-sha512-" 2650 "cbc-des3_ede-caam", 2651 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2652 }, 2653 .setkey = des3_aead_setkey, 2654 .setauthsize = aead_setauthsize, 2655 .encrypt = aead_encrypt, 2656 .decrypt = aead_decrypt, 2657 .ivsize = DES3_EDE_BLOCK_SIZE, 2658 .maxauthsize = SHA512_DIGEST_SIZE, 2659 }, 2660 .caam = { 2661 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2662 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2663 OP_ALG_AAI_HMAC_PRECOMP, 2664 }, 2665 }, 2666 { 2667 .aead = { 2668 .base = { 2669 .cra_name = "echainiv(authenc(hmac(sha512)," 2670 "cbc(des3_ede)))", 2671 .cra_driver_name = "echainiv-authenc-" 2672 "hmac-sha512-" 2673 "cbc-des3_ede-caam", 2674 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2675 }, 2676 .setkey = des3_aead_setkey, 2677 .setauthsize = aead_setauthsize, 2678 .encrypt = aead_encrypt, 2679 .decrypt = aead_decrypt, 2680 .ivsize = DES3_EDE_BLOCK_SIZE, 2681 .maxauthsize = SHA512_DIGEST_SIZE, 2682 }, 2683 .caam = { 2684 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2685 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2686 OP_ALG_AAI_HMAC_PRECOMP, 2687 .geniv = true, 2688 }, 2689 }, 2690 { 2691 .aead = { 2692 .base = { 2693 .cra_name = "authenc(hmac(md5),cbc(des))", 2694 .cra_driver_name = "authenc-hmac-md5-" 2695 "cbc-des-caam", 2696 .cra_blocksize = DES_BLOCK_SIZE, 2697 }, 2698 .setkey = aead_setkey, 2699 .setauthsize = aead_setauthsize, 2700 .encrypt = aead_encrypt, 2701 .decrypt = aead_decrypt, 2702 .ivsize = DES_BLOCK_SIZE, 2703 .maxauthsize = MD5_DIGEST_SIZE, 2704 }, 2705 .caam = { 2706 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2707 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2708 OP_ALG_AAI_HMAC_PRECOMP, 2709 }, 2710 }, 2711 { 2712 .aead = { 2713 .base = { 2714 .cra_name = "echainiv(authenc(hmac(md5)," 2715 "cbc(des)))", 2716 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2717 "cbc-des-caam", 2718 .cra_blocksize = DES_BLOCK_SIZE, 2719 }, 2720 .setkey = aead_setkey, 2721 .setauthsize = aead_setauthsize, 2722 .encrypt = aead_encrypt, 2723 .decrypt = aead_decrypt, 2724 .ivsize = DES_BLOCK_SIZE, 2725 .maxauthsize = MD5_DIGEST_SIZE, 2726 }, 2727 .caam = { 2728 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2729 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2730 OP_ALG_AAI_HMAC_PRECOMP, 2731 .geniv = true, 2732 }, 2733 }, 2734 { 2735 .aead = { 2736 .base = { 2737 .cra_name = "authenc(hmac(sha1),cbc(des))", 2738 .cra_driver_name = "authenc-hmac-sha1-" 2739 "cbc-des-caam", 2740 .cra_blocksize = DES_BLOCK_SIZE, 2741 }, 2742 .setkey = aead_setkey, 2743 .setauthsize = aead_setauthsize, 2744 .encrypt = aead_encrypt, 2745 .decrypt = aead_decrypt, 2746 .ivsize = DES_BLOCK_SIZE, 2747 .maxauthsize = SHA1_DIGEST_SIZE, 2748 }, 2749 .caam = { 2750 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2751 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2752 OP_ALG_AAI_HMAC_PRECOMP, 2753 }, 2754 }, 2755 { 2756 .aead = { 2757 .base = { 2758 .cra_name = "echainiv(authenc(hmac(sha1)," 2759 "cbc(des)))", 2760 .cra_driver_name = "echainiv-authenc-" 2761 "hmac-sha1-cbc-des-caam", 2762 .cra_blocksize = DES_BLOCK_SIZE, 2763 }, 2764 .setkey = aead_setkey, 2765 .setauthsize = aead_setauthsize, 2766 .encrypt = aead_encrypt, 2767 .decrypt = aead_decrypt, 2768 .ivsize = DES_BLOCK_SIZE, 2769 .maxauthsize = SHA1_DIGEST_SIZE, 2770 }, 2771 .caam = { 2772 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2773 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2774 OP_ALG_AAI_HMAC_PRECOMP, 2775 .geniv = true, 2776 }, 2777 }, 2778 { 2779 .aead = { 2780 .base = { 2781 .cra_name = "authenc(hmac(sha224),cbc(des))", 2782 .cra_driver_name = "authenc-hmac-sha224-" 2783 "cbc-des-caam", 2784 .cra_blocksize = DES_BLOCK_SIZE, 2785 }, 2786 .setkey = aead_setkey, 2787 .setauthsize = aead_setauthsize, 2788 .encrypt = aead_encrypt, 2789 .decrypt = aead_decrypt, 2790 .ivsize = DES_BLOCK_SIZE, 2791 .maxauthsize = SHA224_DIGEST_SIZE, 2792 }, 2793 .caam = { 2794 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2795 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2796 OP_ALG_AAI_HMAC_PRECOMP, 2797 }, 2798 }, 2799 { 2800 .aead = { 2801 .base = { 2802 .cra_name = "echainiv(authenc(hmac(sha224)," 2803 "cbc(des)))", 2804 .cra_driver_name = "echainiv-authenc-" 2805 "hmac-sha224-cbc-des-caam", 2806 .cra_blocksize = DES_BLOCK_SIZE, 2807 }, 2808 .setkey = aead_setkey, 2809 .setauthsize = aead_setauthsize, 2810 .encrypt = aead_encrypt, 2811 .decrypt = aead_decrypt, 2812 .ivsize = DES_BLOCK_SIZE, 2813 .maxauthsize = SHA224_DIGEST_SIZE, 2814 }, 2815 .caam = { 2816 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2817 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2818 OP_ALG_AAI_HMAC_PRECOMP, 2819 .geniv = true, 2820 }, 2821 }, 2822 { 2823 .aead = { 2824 .base = { 2825 .cra_name = "authenc(hmac(sha256),cbc(des))", 2826 .cra_driver_name = "authenc-hmac-sha256-" 2827 "cbc-des-caam", 2828 .cra_blocksize = DES_BLOCK_SIZE, 2829 }, 2830 .setkey = aead_setkey, 2831 .setauthsize = aead_setauthsize, 2832 .encrypt = aead_encrypt, 2833 .decrypt = aead_decrypt, 2834 .ivsize = DES_BLOCK_SIZE, 2835 .maxauthsize = SHA256_DIGEST_SIZE, 2836 }, 2837 .caam = { 2838 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2839 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2840 OP_ALG_AAI_HMAC_PRECOMP, 2841 }, 2842 }, 2843 { 2844 .aead = { 2845 .base = { 2846 .cra_name = "echainiv(authenc(hmac(sha256)," 2847 "cbc(des)))", 2848 .cra_driver_name = "echainiv-authenc-" 2849 "hmac-sha256-cbc-des-caam", 2850 .cra_blocksize = DES_BLOCK_SIZE, 2851 }, 2852 .setkey = aead_setkey, 2853 .setauthsize = aead_setauthsize, 2854 .encrypt = aead_encrypt, 2855 .decrypt = aead_decrypt, 2856 .ivsize = DES_BLOCK_SIZE, 2857 .maxauthsize = SHA256_DIGEST_SIZE, 2858 }, 2859 .caam = { 2860 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2861 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2862 OP_ALG_AAI_HMAC_PRECOMP, 2863 .geniv = true, 2864 }, 2865 }, 2866 { 2867 .aead = { 2868 .base = { 2869 .cra_name = "authenc(hmac(sha384),cbc(des))", 2870 .cra_driver_name = "authenc-hmac-sha384-" 2871 "cbc-des-caam", 2872 .cra_blocksize = DES_BLOCK_SIZE, 2873 }, 2874 .setkey = aead_setkey, 2875 .setauthsize = aead_setauthsize, 2876 .encrypt = aead_encrypt, 2877 .decrypt = aead_decrypt, 2878 .ivsize = DES_BLOCK_SIZE, 2879 .maxauthsize = SHA384_DIGEST_SIZE, 2880 }, 2881 .caam = { 2882 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2883 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2884 OP_ALG_AAI_HMAC_PRECOMP, 2885 }, 2886 }, 2887 { 2888 .aead = { 2889 .base = { 2890 .cra_name = "echainiv(authenc(hmac(sha384)," 2891 "cbc(des)))", 2892 .cra_driver_name = "echainiv-authenc-" 2893 "hmac-sha384-cbc-des-caam", 2894 .cra_blocksize = DES_BLOCK_SIZE, 2895 }, 2896 .setkey = aead_setkey, 2897 .setauthsize = aead_setauthsize, 2898 .encrypt = aead_encrypt, 2899 .decrypt = aead_decrypt, 2900 .ivsize = DES_BLOCK_SIZE, 2901 .maxauthsize = SHA384_DIGEST_SIZE, 2902 }, 2903 .caam = { 2904 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2905 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2906 OP_ALG_AAI_HMAC_PRECOMP, 2907 .geniv = true, 2908 }, 2909 }, 2910 { 2911 .aead = { 2912 .base = { 2913 .cra_name = "authenc(hmac(sha512),cbc(des))", 2914 .cra_driver_name = "authenc-hmac-sha512-" 2915 "cbc-des-caam", 2916 .cra_blocksize = DES_BLOCK_SIZE, 2917 }, 2918 .setkey = aead_setkey, 2919 .setauthsize = aead_setauthsize, 2920 .encrypt = aead_encrypt, 2921 .decrypt = aead_decrypt, 2922 .ivsize = DES_BLOCK_SIZE, 2923 .maxauthsize = SHA512_DIGEST_SIZE, 2924 }, 2925 .caam = { 2926 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2927 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2928 OP_ALG_AAI_HMAC_PRECOMP, 2929 }, 2930 }, 2931 { 2932 .aead = { 2933 .base = { 2934 .cra_name = "echainiv(authenc(hmac(sha512)," 2935 "cbc(des)))", 2936 .cra_driver_name = "echainiv-authenc-" 2937 "hmac-sha512-cbc-des-caam", 2938 .cra_blocksize = DES_BLOCK_SIZE, 2939 }, 2940 .setkey = aead_setkey, 2941 .setauthsize = aead_setauthsize, 2942 .encrypt = aead_encrypt, 2943 .decrypt = aead_decrypt, 2944 .ivsize = DES_BLOCK_SIZE, 2945 .maxauthsize = SHA512_DIGEST_SIZE, 2946 }, 2947 .caam = { 2948 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2949 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2950 OP_ALG_AAI_HMAC_PRECOMP, 2951 .geniv = true, 2952 }, 2953 }, 2954 { 2955 .aead = { 2956 .base = { 2957 .cra_name = "authenc(hmac(md5)," 2958 "rfc3686(ctr(aes)))", 2959 .cra_driver_name = "authenc-hmac-md5-" 2960 "rfc3686-ctr-aes-caam", 2961 .cra_blocksize = 1, 2962 }, 2963 .setkey = aead_setkey, 2964 .setauthsize = aead_setauthsize, 2965 .encrypt = aead_encrypt, 2966 .decrypt = aead_decrypt, 2967 .ivsize = CTR_RFC3686_IV_SIZE, 2968 .maxauthsize = MD5_DIGEST_SIZE, 2969 }, 2970 .caam = { 2971 .class1_alg_type = OP_ALG_ALGSEL_AES | 2972 OP_ALG_AAI_CTR_MOD128, 2973 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2974 OP_ALG_AAI_HMAC_PRECOMP, 2975 .rfc3686 = true, 2976 }, 2977 }, 2978 { 2979 .aead = { 2980 .base = { 2981 .cra_name = "seqiv(authenc(" 2982 "hmac(md5),rfc3686(ctr(aes))))", 2983 .cra_driver_name = "seqiv-authenc-hmac-md5-" 2984 "rfc3686-ctr-aes-caam", 2985 .cra_blocksize = 1, 2986 }, 2987 .setkey = aead_setkey, 2988 .setauthsize = aead_setauthsize, 2989 .encrypt = aead_encrypt, 2990 .decrypt = aead_decrypt, 2991 .ivsize = CTR_RFC3686_IV_SIZE, 2992 .maxauthsize = MD5_DIGEST_SIZE, 2993 }, 2994 .caam = { 2995 .class1_alg_type = OP_ALG_ALGSEL_AES | 2996 OP_ALG_AAI_CTR_MOD128, 2997 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2998 OP_ALG_AAI_HMAC_PRECOMP, 2999 .rfc3686 = true, 3000 .geniv = true, 3001 }, 3002 }, 3003 { 3004 .aead = { 3005 .base = { 3006 .cra_name = "authenc(hmac(sha1)," 3007 "rfc3686(ctr(aes)))", 3008 .cra_driver_name = "authenc-hmac-sha1-" 3009 "rfc3686-ctr-aes-caam", 3010 .cra_blocksize = 1, 3011 }, 3012 .setkey = aead_setkey, 3013 .setauthsize = aead_setauthsize, 3014 .encrypt = aead_encrypt, 3015 .decrypt = aead_decrypt, 3016 .ivsize = CTR_RFC3686_IV_SIZE, 3017 .maxauthsize = SHA1_DIGEST_SIZE, 3018 }, 3019 .caam = { 3020 .class1_alg_type = OP_ALG_ALGSEL_AES | 3021 OP_ALG_AAI_CTR_MOD128, 3022 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3023 OP_ALG_AAI_HMAC_PRECOMP, 3024 .rfc3686 = true, 3025 }, 3026 }, 3027 { 3028 .aead = { 3029 .base = { 3030 .cra_name = "seqiv(authenc(" 3031 "hmac(sha1),rfc3686(ctr(aes))))", 3032 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3033 "rfc3686-ctr-aes-caam", 3034 .cra_blocksize = 1, 3035 }, 3036 .setkey = aead_setkey, 3037 .setauthsize = aead_setauthsize, 3038 .encrypt = aead_encrypt, 3039 .decrypt = aead_decrypt, 3040 .ivsize = CTR_RFC3686_IV_SIZE, 3041 .maxauthsize = SHA1_DIGEST_SIZE, 3042 }, 3043 .caam = { 3044 .class1_alg_type = OP_ALG_ALGSEL_AES | 3045 OP_ALG_AAI_CTR_MOD128, 3046 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3047 OP_ALG_AAI_HMAC_PRECOMP, 3048 .rfc3686 = true, 3049 .geniv = true, 3050 }, 3051 }, 3052 { 3053 .aead = { 3054 .base = { 3055 .cra_name = "authenc(hmac(sha224)," 3056 "rfc3686(ctr(aes)))", 3057 .cra_driver_name = "authenc-hmac-sha224-" 3058 "rfc3686-ctr-aes-caam", 3059 .cra_blocksize = 1, 3060 }, 3061 .setkey = aead_setkey, 3062 .setauthsize = aead_setauthsize, 3063 .encrypt = aead_encrypt, 3064 .decrypt = aead_decrypt, 3065 .ivsize = CTR_RFC3686_IV_SIZE, 3066 .maxauthsize = SHA224_DIGEST_SIZE, 3067 }, 3068 .caam = { 3069 .class1_alg_type = OP_ALG_ALGSEL_AES | 3070 OP_ALG_AAI_CTR_MOD128, 3071 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3072 OP_ALG_AAI_HMAC_PRECOMP, 3073 .rfc3686 = true, 3074 }, 3075 }, 3076 { 3077 .aead = { 3078 .base = { 3079 .cra_name = "seqiv(authenc(" 3080 "hmac(sha224),rfc3686(ctr(aes))))", 3081 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3082 "rfc3686-ctr-aes-caam", 3083 .cra_blocksize = 1, 3084 }, 3085 .setkey = aead_setkey, 3086 .setauthsize = aead_setauthsize, 3087 .encrypt = aead_encrypt, 3088 .decrypt = aead_decrypt, 3089 .ivsize = CTR_RFC3686_IV_SIZE, 3090 .maxauthsize = SHA224_DIGEST_SIZE, 3091 }, 3092 .caam = { 3093 .class1_alg_type = OP_ALG_ALGSEL_AES | 3094 OP_ALG_AAI_CTR_MOD128, 3095 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3096 OP_ALG_AAI_HMAC_PRECOMP, 3097 .rfc3686 = true, 3098 .geniv = true, 3099 }, 3100 }, 3101 { 3102 .aead = { 3103 .base = { 3104 .cra_name = "authenc(hmac(sha256)," 3105 "rfc3686(ctr(aes)))", 3106 .cra_driver_name = "authenc-hmac-sha256-" 3107 "rfc3686-ctr-aes-caam", 3108 .cra_blocksize = 1, 3109 }, 3110 .setkey = aead_setkey, 3111 .setauthsize = aead_setauthsize, 3112 .encrypt = aead_encrypt, 3113 .decrypt = aead_decrypt, 3114 .ivsize = CTR_RFC3686_IV_SIZE, 3115 .maxauthsize = SHA256_DIGEST_SIZE, 3116 }, 3117 .caam = { 3118 .class1_alg_type = OP_ALG_ALGSEL_AES | 3119 OP_ALG_AAI_CTR_MOD128, 3120 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3121 OP_ALG_AAI_HMAC_PRECOMP, 3122 .rfc3686 = true, 3123 }, 3124 }, 3125 { 3126 .aead = { 3127 .base = { 3128 .cra_name = "seqiv(authenc(hmac(sha256)," 3129 "rfc3686(ctr(aes))))", 3130 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3131 "rfc3686-ctr-aes-caam", 3132 .cra_blocksize = 1, 3133 }, 3134 .setkey = aead_setkey, 3135 .setauthsize = aead_setauthsize, 3136 .encrypt = aead_encrypt, 3137 .decrypt = aead_decrypt, 3138 .ivsize = CTR_RFC3686_IV_SIZE, 3139 .maxauthsize = SHA256_DIGEST_SIZE, 3140 }, 3141 .caam = { 3142 .class1_alg_type = OP_ALG_ALGSEL_AES | 3143 OP_ALG_AAI_CTR_MOD128, 3144 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3145 OP_ALG_AAI_HMAC_PRECOMP, 3146 .rfc3686 = true, 3147 .geniv = true, 3148 }, 3149 }, 3150 { 3151 .aead = { 3152 .base = { 3153 .cra_name = "authenc(hmac(sha384)," 3154 "rfc3686(ctr(aes)))", 3155 .cra_driver_name = "authenc-hmac-sha384-" 3156 "rfc3686-ctr-aes-caam", 3157 .cra_blocksize = 1, 3158 }, 3159 .setkey = aead_setkey, 3160 .setauthsize = aead_setauthsize, 3161 .encrypt = aead_encrypt, 3162 .decrypt = aead_decrypt, 3163 .ivsize = CTR_RFC3686_IV_SIZE, 3164 .maxauthsize = SHA384_DIGEST_SIZE, 3165 }, 3166 .caam = { 3167 .class1_alg_type = OP_ALG_ALGSEL_AES | 3168 OP_ALG_AAI_CTR_MOD128, 3169 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3170 OP_ALG_AAI_HMAC_PRECOMP, 3171 .rfc3686 = true, 3172 }, 3173 }, 3174 { 3175 .aead = { 3176 .base = { 3177 .cra_name = "seqiv(authenc(hmac(sha384)," 3178 "rfc3686(ctr(aes))))", 3179 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3180 "rfc3686-ctr-aes-caam", 3181 .cra_blocksize = 1, 3182 }, 3183 .setkey = aead_setkey, 3184 .setauthsize = aead_setauthsize, 3185 .encrypt = aead_encrypt, 3186 .decrypt = aead_decrypt, 3187 .ivsize = CTR_RFC3686_IV_SIZE, 3188 .maxauthsize = SHA384_DIGEST_SIZE, 3189 }, 3190 .caam = { 3191 .class1_alg_type = OP_ALG_ALGSEL_AES | 3192 OP_ALG_AAI_CTR_MOD128, 3193 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3194 OP_ALG_AAI_HMAC_PRECOMP, 3195 .rfc3686 = true, 3196 .geniv = true, 3197 }, 3198 }, 3199 { 3200 .aead = { 3201 .base = { 3202 .cra_name = "authenc(hmac(sha512)," 3203 "rfc3686(ctr(aes)))", 3204 .cra_driver_name = "authenc-hmac-sha512-" 3205 "rfc3686-ctr-aes-caam", 3206 .cra_blocksize = 1, 3207 }, 3208 .setkey = aead_setkey, 3209 .setauthsize = aead_setauthsize, 3210 .encrypt = aead_encrypt, 3211 .decrypt = aead_decrypt, 3212 .ivsize = CTR_RFC3686_IV_SIZE, 3213 .maxauthsize = SHA512_DIGEST_SIZE, 3214 }, 3215 .caam = { 3216 .class1_alg_type = OP_ALG_ALGSEL_AES | 3217 OP_ALG_AAI_CTR_MOD128, 3218 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3219 OP_ALG_AAI_HMAC_PRECOMP, 3220 .rfc3686 = true, 3221 }, 3222 }, 3223 { 3224 .aead = { 3225 .base = { 3226 .cra_name = "seqiv(authenc(hmac(sha512)," 3227 "rfc3686(ctr(aes))))", 3228 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3229 "rfc3686-ctr-aes-caam", 3230 .cra_blocksize = 1, 3231 }, 3232 .setkey = aead_setkey, 3233 .setauthsize = aead_setauthsize, 3234 .encrypt = aead_encrypt, 3235 .decrypt = aead_decrypt, 3236 .ivsize = CTR_RFC3686_IV_SIZE, 3237 .maxauthsize = SHA512_DIGEST_SIZE, 3238 }, 3239 .caam = { 3240 .class1_alg_type = OP_ALG_ALGSEL_AES | 3241 OP_ALG_AAI_CTR_MOD128, 3242 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3243 OP_ALG_AAI_HMAC_PRECOMP, 3244 .rfc3686 = true, 3245 .geniv = true, 3246 }, 3247 }, 3248 { 3249 .aead = { 3250 .base = { 3251 .cra_name = "rfc7539(chacha20,poly1305)", 3252 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3253 "caam", 3254 .cra_blocksize = 1, 3255 }, 3256 .setkey = chachapoly_setkey, 3257 .setauthsize = chachapoly_setauthsize, 3258 .encrypt = chachapoly_encrypt, 3259 .decrypt = chachapoly_decrypt, 3260 .ivsize = CHACHAPOLY_IV_SIZE, 3261 .maxauthsize = POLY1305_DIGEST_SIZE, 3262 }, 3263 .caam = { 3264 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3265 OP_ALG_AAI_AEAD, 3266 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3267 OP_ALG_AAI_AEAD, 3268 .nodkp = true, 3269 }, 3270 }, 3271 { 3272 .aead = { 3273 .base = { 3274 .cra_name = "rfc7539esp(chacha20,poly1305)", 3275 .cra_driver_name = "rfc7539esp-chacha20-" 3276 "poly1305-caam", 3277 .cra_blocksize = 1, 3278 }, 3279 .setkey = chachapoly_setkey, 3280 .setauthsize = chachapoly_setauthsize, 3281 .encrypt = chachapoly_encrypt, 3282 .decrypt = chachapoly_decrypt, 3283 .ivsize = 8, 3284 .maxauthsize = POLY1305_DIGEST_SIZE, 3285 }, 3286 .caam = { 3287 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3288 OP_ALG_AAI_AEAD, 3289 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3290 OP_ALG_AAI_AEAD, 3291 .nodkp = true, 3292 }, 3293 }, 3294 }; 3295 3296 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3297 bool uses_dkp) 3298 { 3299 dma_addr_t dma_addr; 3300 struct caam_drv_private *priv; 3301 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx, 3302 sh_desc_enc); 3303 3304 ctx->jrdev = caam_jr_alloc(); 3305 if (IS_ERR(ctx->jrdev)) { 3306 pr_err("Job Ring Device allocation for transform failed\n"); 3307 return PTR_ERR(ctx->jrdev); 3308 } 3309 3310 priv = dev_get_drvdata(ctx->jrdev->parent); 3311 if (priv->era >= 6 && uses_dkp) 3312 ctx->dir = DMA_BIDIRECTIONAL; 3313 else 3314 ctx->dir = DMA_TO_DEVICE; 3315 3316 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3317 offsetof(struct caam_ctx, 3318 sh_desc_enc_dma) - 3319 sh_desc_enc_offset, 3320 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3321 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3322 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3323 caam_jr_free(ctx->jrdev); 3324 return -ENOMEM; 3325 } 3326 3327 ctx->sh_desc_enc_dma = dma_addr; 3328 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3329 sh_desc_dec) - 3330 sh_desc_enc_offset; 3331 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) - 3332 sh_desc_enc_offset; 3333 3334 /* copy descriptor header template value */ 3335 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3336 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3337 3338 return 0; 3339 } 3340 3341 static int caam_cra_init(struct crypto_skcipher *tfm) 3342 { 3343 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3344 struct caam_skcipher_alg *caam_alg = 3345 container_of(alg, typeof(*caam_alg), skcipher); 3346 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm); 3347 3348 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx)); 3349 3350 ctx->enginectx.op.do_one_request = skcipher_do_one_req; 3351 3352 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3353 false); 3354 } 3355 3356 static int caam_aead_init(struct crypto_aead *tfm) 3357 { 3358 struct aead_alg *alg = crypto_aead_alg(tfm); 3359 struct caam_aead_alg *caam_alg = 3360 container_of(alg, struct caam_aead_alg, aead); 3361 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3362 3363 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx)); 3364 3365 ctx->enginectx.op.do_one_request = aead_do_one_req; 3366 3367 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3368 } 3369 3370 static void caam_exit_common(struct caam_ctx *ctx) 3371 { 3372 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3373 offsetof(struct caam_ctx, sh_desc_enc_dma) - 3374 offsetof(struct caam_ctx, sh_desc_enc), 3375 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3376 caam_jr_free(ctx->jrdev); 3377 } 3378 3379 static void caam_cra_exit(struct crypto_skcipher *tfm) 3380 { 3381 caam_exit_common(crypto_skcipher_ctx(tfm)); 3382 } 3383 3384 static void caam_aead_exit(struct crypto_aead *tfm) 3385 { 3386 caam_exit_common(crypto_aead_ctx(tfm)); 3387 } 3388 3389 void caam_algapi_exit(void) 3390 { 3391 int i; 3392 3393 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3394 struct caam_aead_alg *t_alg = driver_aeads + i; 3395 3396 if (t_alg->registered) 3397 crypto_unregister_aead(&t_alg->aead); 3398 } 3399 3400 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3401 struct caam_skcipher_alg *t_alg = driver_algs + i; 3402 3403 if (t_alg->registered) 3404 crypto_unregister_skcipher(&t_alg->skcipher); 3405 } 3406 } 3407 3408 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3409 { 3410 struct skcipher_alg *alg = &t_alg->skcipher; 3411 3412 alg->base.cra_module = THIS_MODULE; 3413 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3414 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3415 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3416 CRYPTO_ALG_KERN_DRIVER_ONLY; 3417 3418 alg->init = caam_cra_init; 3419 alg->exit = caam_cra_exit; 3420 } 3421 3422 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3423 { 3424 struct aead_alg *alg = &t_alg->aead; 3425 3426 alg->base.cra_module = THIS_MODULE; 3427 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3428 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3429 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY | 3430 CRYPTO_ALG_KERN_DRIVER_ONLY; 3431 3432 alg->init = caam_aead_init; 3433 alg->exit = caam_aead_exit; 3434 } 3435 3436 int caam_algapi_init(struct device *ctrldev) 3437 { 3438 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3439 int i = 0, err = 0; 3440 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3441 unsigned int md_limit = SHA512_DIGEST_SIZE; 3442 bool registered = false, gcm_support; 3443 3444 /* 3445 * Register crypto algorithms the device supports. 3446 * First, detect presence and attributes of DES, AES, and MD blocks. 3447 */ 3448 if (priv->era < 10) { 3449 u32 cha_vid, cha_inst, aes_rn; 3450 3451 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3452 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3453 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3454 3455 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3456 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3457 CHA_ID_LS_DES_SHIFT; 3458 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3459 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3460 ccha_inst = 0; 3461 ptha_inst = 0; 3462 3463 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3464 CHA_ID_LS_AES_MASK; 3465 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3466 } else { 3467 u32 aesa, mdha; 3468 3469 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3470 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3471 3472 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3473 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3474 3475 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3476 aes_inst = aesa & CHA_VER_NUM_MASK; 3477 md_inst = mdha & CHA_VER_NUM_MASK; 3478 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3479 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3480 3481 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3482 } 3483 3484 /* If MD is present, limit digest size based on LP256 */ 3485 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3486 md_limit = SHA256_DIGEST_SIZE; 3487 3488 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3489 struct caam_skcipher_alg *t_alg = driver_algs + i; 3490 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3491 3492 /* Skip DES algorithms if not supported by device */ 3493 if (!des_inst && 3494 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3495 (alg_sel == OP_ALG_ALGSEL_DES))) 3496 continue; 3497 3498 /* Skip AES algorithms if not supported by device */ 3499 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3500 continue; 3501 3502 /* 3503 * Check support for AES modes not available 3504 * on LP devices. 3505 */ 3506 if (aes_vid == CHA_VER_VID_AES_LP && 3507 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3508 OP_ALG_AAI_XTS) 3509 continue; 3510 3511 caam_skcipher_alg_init(t_alg); 3512 3513 err = crypto_register_skcipher(&t_alg->skcipher); 3514 if (err) { 3515 pr_warn("%s alg registration failed\n", 3516 t_alg->skcipher.base.cra_driver_name); 3517 continue; 3518 } 3519 3520 t_alg->registered = true; 3521 registered = true; 3522 } 3523 3524 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3525 struct caam_aead_alg *t_alg = driver_aeads + i; 3526 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3527 OP_ALG_ALGSEL_MASK; 3528 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3529 OP_ALG_ALGSEL_MASK; 3530 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3531 3532 /* Skip DES algorithms if not supported by device */ 3533 if (!des_inst && 3534 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3535 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3536 continue; 3537 3538 /* Skip AES algorithms if not supported by device */ 3539 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3540 continue; 3541 3542 /* Skip CHACHA20 algorithms if not supported by device */ 3543 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3544 continue; 3545 3546 /* Skip POLY1305 algorithms if not supported by device */ 3547 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3548 continue; 3549 3550 /* Skip GCM algorithms if not supported by device */ 3551 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3552 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3553 continue; 3554 3555 /* 3556 * Skip algorithms requiring message digests 3557 * if MD or MD size is not supported by device. 3558 */ 3559 if (is_mdha(c2_alg_sel) && 3560 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3561 continue; 3562 3563 caam_aead_alg_init(t_alg); 3564 3565 err = crypto_register_aead(&t_alg->aead); 3566 if (err) { 3567 pr_warn("%s alg registration failed\n", 3568 t_alg->aead.base.cra_driver_name); 3569 continue; 3570 } 3571 3572 t_alg->registered = true; 3573 registered = true; 3574 } 3575 3576 if (registered) 3577 pr_info("caam algorithms registered in /proc/crypto\n"); 3578 3579 return err; 3580 } 3581