1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 60 /* 61 * crypto alg 62 */ 63 #define CAAM_CRA_PRIORITY 3000 64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ 67 SHA512_DIGEST_SIZE * 2) 68 69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 71 CAAM_CMD_SZ * 4) 72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 73 CAAM_CMD_SZ * 5) 74 75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 76 77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN) 78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 79 80 struct caam_alg_entry { 81 int class1_alg_type; 82 int class2_alg_type; 83 bool rfc3686; 84 bool geniv; 85 bool nodkp; 86 }; 87 88 struct caam_aead_alg { 89 struct aead_alg aead; 90 struct caam_alg_entry caam; 91 bool registered; 92 }; 93 94 struct caam_skcipher_alg { 95 struct skcipher_alg skcipher; 96 struct caam_alg_entry caam; 97 bool registered; 98 }; 99 100 /* 101 * per-session context 102 */ 103 struct caam_ctx { 104 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 105 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 106 u8 key[CAAM_MAX_KEY_SIZE]; 107 dma_addr_t sh_desc_enc_dma; 108 dma_addr_t sh_desc_dec_dma; 109 dma_addr_t key_dma; 110 enum dma_data_direction dir; 111 struct device *jrdev; 112 struct alginfo adata; 113 struct alginfo cdata; 114 unsigned int authsize; 115 }; 116 117 static int aead_null_set_sh_desc(struct crypto_aead *aead) 118 { 119 struct caam_ctx *ctx = crypto_aead_ctx(aead); 120 struct device *jrdev = ctx->jrdev; 121 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 122 u32 *desc; 123 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 124 ctx->adata.keylen_pad; 125 126 /* 127 * Job Descriptor and Shared Descriptors 128 * must all fit into the 64-word Descriptor h/w Buffer 129 */ 130 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 131 ctx->adata.key_inline = true; 132 ctx->adata.key_virt = ctx->key; 133 } else { 134 ctx->adata.key_inline = false; 135 ctx->adata.key_dma = ctx->key_dma; 136 } 137 138 /* aead_encrypt shared descriptor */ 139 desc = ctx->sh_desc_enc; 140 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 141 ctrlpriv->era); 142 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 143 desc_bytes(desc), ctx->dir); 144 145 /* 146 * Job Descriptor and Shared Descriptors 147 * must all fit into the 64-word Descriptor h/w Buffer 148 */ 149 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 150 ctx->adata.key_inline = true; 151 ctx->adata.key_virt = ctx->key; 152 } else { 153 ctx->adata.key_inline = false; 154 ctx->adata.key_dma = ctx->key_dma; 155 } 156 157 /* aead_decrypt shared descriptor */ 158 desc = ctx->sh_desc_dec; 159 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 160 ctrlpriv->era); 161 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 162 desc_bytes(desc), ctx->dir); 163 164 return 0; 165 } 166 167 static int aead_set_sh_desc(struct crypto_aead *aead) 168 { 169 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 170 struct caam_aead_alg, aead); 171 unsigned int ivsize = crypto_aead_ivsize(aead); 172 struct caam_ctx *ctx = crypto_aead_ctx(aead); 173 struct device *jrdev = ctx->jrdev; 174 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 175 u32 ctx1_iv_off = 0; 176 u32 *desc, *nonce = NULL; 177 u32 inl_mask; 178 unsigned int data_len[2]; 179 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 180 OP_ALG_AAI_CTR_MOD128); 181 const bool is_rfc3686 = alg->caam.rfc3686; 182 183 if (!ctx->authsize) 184 return 0; 185 186 /* NULL encryption / decryption */ 187 if (!ctx->cdata.keylen) 188 return aead_null_set_sh_desc(aead); 189 190 /* 191 * AES-CTR needs to load IV in CONTEXT1 reg 192 * at an offset of 128bits (16bytes) 193 * CONTEXT1[255:128] = IV 194 */ 195 if (ctr_mode) 196 ctx1_iv_off = 16; 197 198 /* 199 * RFC3686 specific: 200 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 201 */ 202 if (is_rfc3686) { 203 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 204 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 205 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 206 } 207 208 /* 209 * In case |user key| > |derived key|, using DKP<imm,imm> 210 * would result in invalid opcodes (last bytes of user key) in 211 * the resulting descriptor. Use DKP<ptr,imm> instead => both 212 * virtual and dma key addresses are needed. 213 */ 214 ctx->adata.key_virt = ctx->key; 215 ctx->adata.key_dma = ctx->key_dma; 216 217 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 218 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 219 220 data_len[0] = ctx->adata.keylen_pad; 221 data_len[1] = ctx->cdata.keylen; 222 223 if (alg->caam.geniv) 224 goto skip_enc; 225 226 /* 227 * Job Descriptor and Shared Descriptors 228 * must all fit into the 64-word Descriptor h/w Buffer 229 */ 230 if (desc_inline_query(DESC_AEAD_ENC_LEN + 231 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 232 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 233 ARRAY_SIZE(data_len)) < 0) 234 return -EINVAL; 235 236 ctx->adata.key_inline = !!(inl_mask & 1); 237 ctx->cdata.key_inline = !!(inl_mask & 2); 238 239 /* aead_encrypt shared descriptor */ 240 desc = ctx->sh_desc_enc; 241 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 242 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 243 false, ctrlpriv->era); 244 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 245 desc_bytes(desc), ctx->dir); 246 247 skip_enc: 248 /* 249 * Job Descriptor and Shared Descriptors 250 * must all fit into the 64-word Descriptor h/w Buffer 251 */ 252 if (desc_inline_query(DESC_AEAD_DEC_LEN + 253 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 254 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 255 ARRAY_SIZE(data_len)) < 0) 256 return -EINVAL; 257 258 ctx->adata.key_inline = !!(inl_mask & 1); 259 ctx->cdata.key_inline = !!(inl_mask & 2); 260 261 /* aead_decrypt shared descriptor */ 262 desc = ctx->sh_desc_dec; 263 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 264 ctx->authsize, alg->caam.geniv, is_rfc3686, 265 nonce, ctx1_iv_off, false, ctrlpriv->era); 266 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 267 desc_bytes(desc), ctx->dir); 268 269 if (!alg->caam.geniv) 270 goto skip_givenc; 271 272 /* 273 * Job Descriptor and Shared Descriptors 274 * must all fit into the 64-word Descriptor h/w Buffer 275 */ 276 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 277 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 278 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 279 ARRAY_SIZE(data_len)) < 0) 280 return -EINVAL; 281 282 ctx->adata.key_inline = !!(inl_mask & 1); 283 ctx->cdata.key_inline = !!(inl_mask & 2); 284 285 /* aead_givencrypt shared descriptor */ 286 desc = ctx->sh_desc_enc; 287 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 288 ctx->authsize, is_rfc3686, nonce, 289 ctx1_iv_off, false, ctrlpriv->era); 290 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 291 desc_bytes(desc), ctx->dir); 292 293 skip_givenc: 294 return 0; 295 } 296 297 static int aead_setauthsize(struct crypto_aead *authenc, 298 unsigned int authsize) 299 { 300 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 301 302 ctx->authsize = authsize; 303 aead_set_sh_desc(authenc); 304 305 return 0; 306 } 307 308 static int gcm_set_sh_desc(struct crypto_aead *aead) 309 { 310 struct caam_ctx *ctx = crypto_aead_ctx(aead); 311 struct device *jrdev = ctx->jrdev; 312 unsigned int ivsize = crypto_aead_ivsize(aead); 313 u32 *desc; 314 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 315 ctx->cdata.keylen; 316 317 if (!ctx->cdata.keylen || !ctx->authsize) 318 return 0; 319 320 /* 321 * AES GCM encrypt shared descriptor 322 * Job Descriptor and Shared Descriptor 323 * must fit into the 64-word Descriptor h/w Buffer 324 */ 325 if (rem_bytes >= DESC_GCM_ENC_LEN) { 326 ctx->cdata.key_inline = true; 327 ctx->cdata.key_virt = ctx->key; 328 } else { 329 ctx->cdata.key_inline = false; 330 ctx->cdata.key_dma = ctx->key_dma; 331 } 332 333 desc = ctx->sh_desc_enc; 334 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 335 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 336 desc_bytes(desc), ctx->dir); 337 338 /* 339 * Job Descriptor and Shared Descriptors 340 * must all fit into the 64-word Descriptor h/w Buffer 341 */ 342 if (rem_bytes >= DESC_GCM_DEC_LEN) { 343 ctx->cdata.key_inline = true; 344 ctx->cdata.key_virt = ctx->key; 345 } else { 346 ctx->cdata.key_inline = false; 347 ctx->cdata.key_dma = ctx->key_dma; 348 } 349 350 desc = ctx->sh_desc_dec; 351 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 352 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 353 desc_bytes(desc), ctx->dir); 354 355 return 0; 356 } 357 358 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 359 { 360 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 361 int err; 362 363 err = crypto_gcm_check_authsize(authsize); 364 if (err) 365 return err; 366 367 ctx->authsize = authsize; 368 gcm_set_sh_desc(authenc); 369 370 return 0; 371 } 372 373 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 374 { 375 struct caam_ctx *ctx = crypto_aead_ctx(aead); 376 struct device *jrdev = ctx->jrdev; 377 unsigned int ivsize = crypto_aead_ivsize(aead); 378 u32 *desc; 379 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 380 ctx->cdata.keylen; 381 382 if (!ctx->cdata.keylen || !ctx->authsize) 383 return 0; 384 385 /* 386 * RFC4106 encrypt shared descriptor 387 * Job Descriptor and Shared Descriptor 388 * must fit into the 64-word Descriptor h/w Buffer 389 */ 390 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 391 ctx->cdata.key_inline = true; 392 ctx->cdata.key_virt = ctx->key; 393 } else { 394 ctx->cdata.key_inline = false; 395 ctx->cdata.key_dma = ctx->key_dma; 396 } 397 398 desc = ctx->sh_desc_enc; 399 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 400 false); 401 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 402 desc_bytes(desc), ctx->dir); 403 404 /* 405 * Job Descriptor and Shared Descriptors 406 * must all fit into the 64-word Descriptor h/w Buffer 407 */ 408 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 409 ctx->cdata.key_inline = true; 410 ctx->cdata.key_virt = ctx->key; 411 } else { 412 ctx->cdata.key_inline = false; 413 ctx->cdata.key_dma = ctx->key_dma; 414 } 415 416 desc = ctx->sh_desc_dec; 417 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 418 false); 419 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 420 desc_bytes(desc), ctx->dir); 421 422 return 0; 423 } 424 425 static int rfc4106_setauthsize(struct crypto_aead *authenc, 426 unsigned int authsize) 427 { 428 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 429 int err; 430 431 err = crypto_rfc4106_check_authsize(authsize); 432 if (err) 433 return err; 434 435 ctx->authsize = authsize; 436 rfc4106_set_sh_desc(authenc); 437 438 return 0; 439 } 440 441 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 442 { 443 struct caam_ctx *ctx = crypto_aead_ctx(aead); 444 struct device *jrdev = ctx->jrdev; 445 unsigned int ivsize = crypto_aead_ivsize(aead); 446 u32 *desc; 447 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 448 ctx->cdata.keylen; 449 450 if (!ctx->cdata.keylen || !ctx->authsize) 451 return 0; 452 453 /* 454 * RFC4543 encrypt shared descriptor 455 * Job Descriptor and Shared Descriptor 456 * must fit into the 64-word Descriptor h/w Buffer 457 */ 458 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 459 ctx->cdata.key_inline = true; 460 ctx->cdata.key_virt = ctx->key; 461 } else { 462 ctx->cdata.key_inline = false; 463 ctx->cdata.key_dma = ctx->key_dma; 464 } 465 466 desc = ctx->sh_desc_enc; 467 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 468 false); 469 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 470 desc_bytes(desc), ctx->dir); 471 472 /* 473 * Job Descriptor and Shared Descriptors 474 * must all fit into the 64-word Descriptor h/w Buffer 475 */ 476 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 477 ctx->cdata.key_inline = true; 478 ctx->cdata.key_virt = ctx->key; 479 } else { 480 ctx->cdata.key_inline = false; 481 ctx->cdata.key_dma = ctx->key_dma; 482 } 483 484 desc = ctx->sh_desc_dec; 485 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 486 false); 487 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 488 desc_bytes(desc), ctx->dir); 489 490 return 0; 491 } 492 493 static int rfc4543_setauthsize(struct crypto_aead *authenc, 494 unsigned int authsize) 495 { 496 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 497 498 if (authsize != 16) 499 return -EINVAL; 500 501 ctx->authsize = authsize; 502 rfc4543_set_sh_desc(authenc); 503 504 return 0; 505 } 506 507 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 508 { 509 struct caam_ctx *ctx = crypto_aead_ctx(aead); 510 struct device *jrdev = ctx->jrdev; 511 unsigned int ivsize = crypto_aead_ivsize(aead); 512 u32 *desc; 513 514 if (!ctx->cdata.keylen || !ctx->authsize) 515 return 0; 516 517 desc = ctx->sh_desc_enc; 518 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 519 ctx->authsize, true, false); 520 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 521 desc_bytes(desc), ctx->dir); 522 523 desc = ctx->sh_desc_dec; 524 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 525 ctx->authsize, false, false); 526 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 527 desc_bytes(desc), ctx->dir); 528 529 return 0; 530 } 531 532 static int chachapoly_setauthsize(struct crypto_aead *aead, 533 unsigned int authsize) 534 { 535 struct caam_ctx *ctx = crypto_aead_ctx(aead); 536 537 if (authsize != POLY1305_DIGEST_SIZE) 538 return -EINVAL; 539 540 ctx->authsize = authsize; 541 return chachapoly_set_sh_desc(aead); 542 } 543 544 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 545 unsigned int keylen) 546 { 547 struct caam_ctx *ctx = crypto_aead_ctx(aead); 548 unsigned int ivsize = crypto_aead_ivsize(aead); 549 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 550 551 if (keylen != CHACHA_KEY_SIZE + saltlen) 552 return -EINVAL; 553 554 ctx->cdata.key_virt = key; 555 ctx->cdata.keylen = keylen - saltlen; 556 557 return chachapoly_set_sh_desc(aead); 558 } 559 560 static int aead_setkey(struct crypto_aead *aead, 561 const u8 *key, unsigned int keylen) 562 { 563 struct caam_ctx *ctx = crypto_aead_ctx(aead); 564 struct device *jrdev = ctx->jrdev; 565 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 566 struct crypto_authenc_keys keys; 567 int ret = 0; 568 569 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 570 goto badkey; 571 572 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", 573 keys.authkeylen + keys.enckeylen, keys.enckeylen, 574 keys.authkeylen); 575 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 576 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 577 578 /* 579 * If DKP is supported, use it in the shared descriptor to generate 580 * the split key. 581 */ 582 if (ctrlpriv->era >= 6) { 583 ctx->adata.keylen = keys.authkeylen; 584 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 585 OP_ALG_ALGSEL_MASK); 586 587 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 588 goto badkey; 589 590 memcpy(ctx->key, keys.authkey, keys.authkeylen); 591 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 592 keys.enckeylen); 593 dma_sync_single_for_device(jrdev, ctx->key_dma, 594 ctx->adata.keylen_pad + 595 keys.enckeylen, ctx->dir); 596 goto skip_split_key; 597 } 598 599 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 600 keys.authkeylen, CAAM_MAX_KEY_SIZE - 601 keys.enckeylen); 602 if (ret) { 603 goto badkey; 604 } 605 606 /* postpend encryption key to auth split key */ 607 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 608 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 609 keys.enckeylen, ctx->dir); 610 611 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ", 612 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 613 ctx->adata.keylen_pad + keys.enckeylen, 1); 614 615 skip_split_key: 616 ctx->cdata.keylen = keys.enckeylen; 617 memzero_explicit(&keys, sizeof(keys)); 618 return aead_set_sh_desc(aead); 619 badkey: 620 memzero_explicit(&keys, sizeof(keys)); 621 return -EINVAL; 622 } 623 624 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 625 unsigned int keylen) 626 { 627 struct crypto_authenc_keys keys; 628 int err; 629 630 err = crypto_authenc_extractkeys(&keys, key, keylen); 631 if (unlikely(err)) 632 return err; 633 634 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?: 635 aead_setkey(aead, key, keylen); 636 637 memzero_explicit(&keys, sizeof(keys)); 638 return err; 639 } 640 641 static int gcm_setkey(struct crypto_aead *aead, 642 const u8 *key, unsigned int keylen) 643 { 644 struct caam_ctx *ctx = crypto_aead_ctx(aead); 645 struct device *jrdev = ctx->jrdev; 646 int err; 647 648 err = aes_check_keylen(keylen); 649 if (err) 650 return err; 651 652 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 653 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 654 655 memcpy(ctx->key, key, keylen); 656 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 657 ctx->cdata.keylen = keylen; 658 659 return gcm_set_sh_desc(aead); 660 } 661 662 static int rfc4106_setkey(struct crypto_aead *aead, 663 const u8 *key, unsigned int keylen) 664 { 665 struct caam_ctx *ctx = crypto_aead_ctx(aead); 666 struct device *jrdev = ctx->jrdev; 667 int err; 668 669 err = aes_check_keylen(keylen - 4); 670 if (err) 671 return err; 672 673 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 674 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 675 676 memcpy(ctx->key, key, keylen); 677 678 /* 679 * The last four bytes of the key material are used as the salt value 680 * in the nonce. Update the AES key length. 681 */ 682 ctx->cdata.keylen = keylen - 4; 683 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 684 ctx->dir); 685 return rfc4106_set_sh_desc(aead); 686 } 687 688 static int rfc4543_setkey(struct crypto_aead *aead, 689 const u8 *key, unsigned int keylen) 690 { 691 struct caam_ctx *ctx = crypto_aead_ctx(aead); 692 struct device *jrdev = ctx->jrdev; 693 int err; 694 695 err = aes_check_keylen(keylen - 4); 696 if (err) 697 return err; 698 699 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 700 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 701 702 memcpy(ctx->key, key, keylen); 703 704 /* 705 * The last four bytes of the key material are used as the salt value 706 * in the nonce. Update the AES key length. 707 */ 708 ctx->cdata.keylen = keylen - 4; 709 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 710 ctx->dir); 711 return rfc4543_set_sh_desc(aead); 712 } 713 714 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 715 unsigned int keylen, const u32 ctx1_iv_off) 716 { 717 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 718 struct caam_skcipher_alg *alg = 719 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 720 skcipher); 721 struct device *jrdev = ctx->jrdev; 722 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 723 u32 *desc; 724 const bool is_rfc3686 = alg->caam.rfc3686; 725 726 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 727 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 728 729 ctx->cdata.keylen = keylen; 730 ctx->cdata.key_virt = key; 731 ctx->cdata.key_inline = true; 732 733 /* skcipher_encrypt shared descriptor */ 734 desc = ctx->sh_desc_enc; 735 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 736 ctx1_iv_off); 737 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 738 desc_bytes(desc), ctx->dir); 739 740 /* skcipher_decrypt shared descriptor */ 741 desc = ctx->sh_desc_dec; 742 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 743 ctx1_iv_off); 744 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 745 desc_bytes(desc), ctx->dir); 746 747 return 0; 748 } 749 750 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher, 751 const u8 *key, unsigned int keylen) 752 { 753 int err; 754 755 err = aes_check_keylen(keylen); 756 if (err) 757 return err; 758 759 return skcipher_setkey(skcipher, key, keylen, 0); 760 } 761 762 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher, 763 const u8 *key, unsigned int keylen) 764 { 765 u32 ctx1_iv_off; 766 int err; 767 768 /* 769 * RFC3686 specific: 770 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 771 * | *key = {KEY, NONCE} 772 */ 773 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 774 keylen -= CTR_RFC3686_NONCE_SIZE; 775 776 err = aes_check_keylen(keylen); 777 if (err) 778 return err; 779 780 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 781 } 782 783 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher, 784 const u8 *key, unsigned int keylen) 785 { 786 u32 ctx1_iv_off; 787 int err; 788 789 /* 790 * AES-CTR needs to load IV in CONTEXT1 reg 791 * at an offset of 128bits (16bytes) 792 * CONTEXT1[255:128] = IV 793 */ 794 ctx1_iv_off = 16; 795 796 err = aes_check_keylen(keylen); 797 if (err) 798 return err; 799 800 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off); 801 } 802 803 static int arc4_skcipher_setkey(struct crypto_skcipher *skcipher, 804 const u8 *key, unsigned int keylen) 805 { 806 return skcipher_setkey(skcipher, key, keylen, 0); 807 } 808 809 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 810 const u8 *key, unsigned int keylen) 811 { 812 return verify_skcipher_des_key(skcipher, key) ?: 813 skcipher_setkey(skcipher, key, keylen, 0); 814 } 815 816 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher, 817 const u8 *key, unsigned int keylen) 818 { 819 return verify_skcipher_des3_key(skcipher, key) ?: 820 skcipher_setkey(skcipher, key, keylen, 0); 821 } 822 823 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 824 unsigned int keylen) 825 { 826 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 827 struct device *jrdev = ctx->jrdev; 828 u32 *desc; 829 830 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 831 dev_err(jrdev, "key size mismatch\n"); 832 return -EINVAL; 833 } 834 835 ctx->cdata.keylen = keylen; 836 ctx->cdata.key_virt = key; 837 ctx->cdata.key_inline = true; 838 839 /* xts_skcipher_encrypt shared descriptor */ 840 desc = ctx->sh_desc_enc; 841 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 842 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 843 desc_bytes(desc), ctx->dir); 844 845 /* xts_skcipher_decrypt shared descriptor */ 846 desc = ctx->sh_desc_dec; 847 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 848 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 849 desc_bytes(desc), ctx->dir); 850 851 return 0; 852 } 853 854 /* 855 * aead_edesc - s/w-extended aead descriptor 856 * @src_nents: number of segments in input s/w scatterlist 857 * @dst_nents: number of segments in output s/w scatterlist 858 * @mapped_src_nents: number of segments in input h/w link table 859 * @mapped_dst_nents: number of segments in output h/w link table 860 * @sec4_sg_bytes: length of dma mapped sec4_sg space 861 * @sec4_sg_dma: bus physical mapped address of h/w link table 862 * @sec4_sg: pointer to h/w link table 863 * @hw_desc: the h/w job descriptor followed by any referenced link tables 864 */ 865 struct aead_edesc { 866 int src_nents; 867 int dst_nents; 868 int mapped_src_nents; 869 int mapped_dst_nents; 870 int sec4_sg_bytes; 871 dma_addr_t sec4_sg_dma; 872 struct sec4_sg_entry *sec4_sg; 873 u32 hw_desc[]; 874 }; 875 876 /* 877 * skcipher_edesc - s/w-extended skcipher descriptor 878 * @src_nents: number of segments in input s/w scatterlist 879 * @dst_nents: number of segments in output s/w scatterlist 880 * @mapped_src_nents: number of segments in input h/w link table 881 * @mapped_dst_nents: number of segments in output h/w link table 882 * @iv_dma: dma address of iv for checking continuity and link table 883 * @sec4_sg_bytes: length of dma mapped sec4_sg space 884 * @sec4_sg_dma: bus physical mapped address of h/w link table 885 * @sec4_sg: pointer to h/w link table 886 * @hw_desc: the h/w job descriptor followed by any referenced link tables 887 * and IV 888 */ 889 struct skcipher_edesc { 890 int src_nents; 891 int dst_nents; 892 int mapped_src_nents; 893 int mapped_dst_nents; 894 dma_addr_t iv_dma; 895 int sec4_sg_bytes; 896 dma_addr_t sec4_sg_dma; 897 struct sec4_sg_entry *sec4_sg; 898 u32 hw_desc[0]; 899 }; 900 901 static void caam_unmap(struct device *dev, struct scatterlist *src, 902 struct scatterlist *dst, int src_nents, 903 int dst_nents, 904 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 905 int sec4_sg_bytes) 906 { 907 if (dst != src) { 908 if (src_nents) 909 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 910 if (dst_nents) 911 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 912 } else { 913 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 914 } 915 916 if (iv_dma) 917 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); 918 if (sec4_sg_bytes) 919 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 920 DMA_TO_DEVICE); 921 } 922 923 static void aead_unmap(struct device *dev, 924 struct aead_edesc *edesc, 925 struct aead_request *req) 926 { 927 caam_unmap(dev, req->src, req->dst, 928 edesc->src_nents, edesc->dst_nents, 0, 0, 929 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 930 } 931 932 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 933 struct skcipher_request *req) 934 { 935 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 936 int ivsize = crypto_skcipher_ivsize(skcipher); 937 938 caam_unmap(dev, req->src, req->dst, 939 edesc->src_nents, edesc->dst_nents, 940 edesc->iv_dma, ivsize, 941 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 942 } 943 944 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 945 void *context) 946 { 947 struct aead_request *req = context; 948 struct aead_edesc *edesc; 949 int ecode = 0; 950 951 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 952 953 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 954 955 if (err) 956 ecode = caam_jr_strstatus(jrdev, err); 957 958 aead_unmap(jrdev, edesc, req); 959 960 kfree(edesc); 961 962 aead_request_complete(req, ecode); 963 } 964 965 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 966 void *context) 967 { 968 struct aead_request *req = context; 969 struct aead_edesc *edesc; 970 int ecode = 0; 971 972 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 973 974 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 975 976 if (err) 977 ecode = caam_jr_strstatus(jrdev, err); 978 979 aead_unmap(jrdev, edesc, req); 980 981 kfree(edesc); 982 983 aead_request_complete(req, ecode); 984 } 985 986 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 987 void *context) 988 { 989 struct skcipher_request *req = context; 990 struct skcipher_edesc *edesc; 991 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 992 int ivsize = crypto_skcipher_ivsize(skcipher); 993 int ecode = 0; 994 995 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 996 997 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 998 999 if (err) 1000 ecode = caam_jr_strstatus(jrdev, err); 1001 1002 skcipher_unmap(jrdev, edesc, req); 1003 1004 /* 1005 * The crypto API expects us to set the IV (req->iv) to the last 1006 * ciphertext block (CBC mode) or last counter (CTR mode). 1007 * This is used e.g. by the CTS mode. 1008 */ 1009 if (ivsize && !ecode) { 1010 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1011 ivsize); 1012 print_hex_dump_debug("dstiv @"__stringify(__LINE__)": ", 1013 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1014 edesc->src_nents > 1 ? 100 : ivsize, 1); 1015 } 1016 1017 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1018 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1019 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1020 1021 kfree(edesc); 1022 1023 skcipher_request_complete(req, ecode); 1024 } 1025 1026 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 1027 void *context) 1028 { 1029 struct skcipher_request *req = context; 1030 struct skcipher_edesc *edesc; 1031 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1032 int ivsize = crypto_skcipher_ivsize(skcipher); 1033 int ecode = 0; 1034 1035 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1036 1037 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1038 if (err) 1039 ecode = caam_jr_strstatus(jrdev, err); 1040 1041 skcipher_unmap(jrdev, edesc, req); 1042 1043 /* 1044 * The crypto API expects us to set the IV (req->iv) to the last 1045 * ciphertext block (CBC mode) or last counter (CTR mode). 1046 * This is used e.g. by the CTS mode. 1047 */ 1048 if (ivsize && !ecode) { 1049 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1050 ivsize); 1051 1052 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ", 1053 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1054 ivsize, 1); 1055 } 1056 1057 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1058 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1059 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1060 1061 kfree(edesc); 1062 1063 skcipher_request_complete(req, ecode); 1064 } 1065 1066 /* 1067 * Fill in aead job descriptor 1068 */ 1069 static void init_aead_job(struct aead_request *req, 1070 struct aead_edesc *edesc, 1071 bool all_contig, bool encrypt) 1072 { 1073 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1074 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1075 int authsize = ctx->authsize; 1076 u32 *desc = edesc->hw_desc; 1077 u32 out_options, in_options; 1078 dma_addr_t dst_dma, src_dma; 1079 int len, sec4_sg_index = 0; 1080 dma_addr_t ptr; 1081 u32 *sh_desc; 1082 1083 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1084 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1085 1086 len = desc_len(sh_desc); 1087 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1088 1089 if (all_contig) { 1090 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1091 0; 1092 in_options = 0; 1093 } else { 1094 src_dma = edesc->sec4_sg_dma; 1095 sec4_sg_index += edesc->mapped_src_nents; 1096 in_options = LDST_SGF; 1097 } 1098 1099 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1100 in_options); 1101 1102 dst_dma = src_dma; 1103 out_options = in_options; 1104 1105 if (unlikely(req->src != req->dst)) { 1106 if (!edesc->mapped_dst_nents) { 1107 dst_dma = 0; 1108 out_options = 0; 1109 } else if (edesc->mapped_dst_nents == 1) { 1110 dst_dma = sg_dma_address(req->dst); 1111 out_options = 0; 1112 } else { 1113 dst_dma = edesc->sec4_sg_dma + 1114 sec4_sg_index * 1115 sizeof(struct sec4_sg_entry); 1116 out_options = LDST_SGF; 1117 } 1118 } 1119 1120 if (encrypt) 1121 append_seq_out_ptr(desc, dst_dma, 1122 req->assoclen + req->cryptlen + authsize, 1123 out_options); 1124 else 1125 append_seq_out_ptr(desc, dst_dma, 1126 req->assoclen + req->cryptlen - authsize, 1127 out_options); 1128 } 1129 1130 static void init_gcm_job(struct aead_request *req, 1131 struct aead_edesc *edesc, 1132 bool all_contig, bool encrypt) 1133 { 1134 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1135 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1136 unsigned int ivsize = crypto_aead_ivsize(aead); 1137 u32 *desc = edesc->hw_desc; 1138 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1139 unsigned int last; 1140 1141 init_aead_job(req, edesc, all_contig, encrypt); 1142 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1143 1144 /* BUG This should not be specific to generic GCM. */ 1145 last = 0; 1146 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1147 last = FIFOLD_TYPE_LAST1; 1148 1149 /* Read GCM IV */ 1150 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1151 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1152 /* Append Salt */ 1153 if (!generic_gcm) 1154 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1155 /* Append IV */ 1156 append_data(desc, req->iv, ivsize); 1157 /* End of blank commands */ 1158 } 1159 1160 static void init_chachapoly_job(struct aead_request *req, 1161 struct aead_edesc *edesc, bool all_contig, 1162 bool encrypt) 1163 { 1164 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1165 unsigned int ivsize = crypto_aead_ivsize(aead); 1166 unsigned int assoclen = req->assoclen; 1167 u32 *desc = edesc->hw_desc; 1168 u32 ctx_iv_off = 4; 1169 1170 init_aead_job(req, edesc, all_contig, encrypt); 1171 1172 if (ivsize != CHACHAPOLY_IV_SIZE) { 1173 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1174 ctx_iv_off += 4; 1175 1176 /* 1177 * The associated data comes already with the IV but we need 1178 * to skip it when we authenticate or encrypt... 1179 */ 1180 assoclen -= ivsize; 1181 } 1182 1183 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1184 1185 /* 1186 * For IPsec load the IV further in the same register. 1187 * For RFC7539 simply load the 12 bytes nonce in a single operation 1188 */ 1189 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1190 LDST_SRCDST_BYTE_CONTEXT | 1191 ctx_iv_off << LDST_OFFSET_SHIFT); 1192 } 1193 1194 static void init_authenc_job(struct aead_request *req, 1195 struct aead_edesc *edesc, 1196 bool all_contig, bool encrypt) 1197 { 1198 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1199 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1200 struct caam_aead_alg, aead); 1201 unsigned int ivsize = crypto_aead_ivsize(aead); 1202 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1203 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1204 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1205 OP_ALG_AAI_CTR_MOD128); 1206 const bool is_rfc3686 = alg->caam.rfc3686; 1207 u32 *desc = edesc->hw_desc; 1208 u32 ivoffset = 0; 1209 1210 /* 1211 * AES-CTR needs to load IV in CONTEXT1 reg 1212 * at an offset of 128bits (16bytes) 1213 * CONTEXT1[255:128] = IV 1214 */ 1215 if (ctr_mode) 1216 ivoffset = 16; 1217 1218 /* 1219 * RFC3686 specific: 1220 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1221 */ 1222 if (is_rfc3686) 1223 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1224 1225 init_aead_job(req, edesc, all_contig, encrypt); 1226 1227 /* 1228 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1229 * having DPOVRD as destination. 1230 */ 1231 if (ctrlpriv->era < 3) 1232 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1233 else 1234 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1235 1236 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1237 append_load_as_imm(desc, req->iv, ivsize, 1238 LDST_CLASS_1_CCB | 1239 LDST_SRCDST_BYTE_CONTEXT | 1240 (ivoffset << LDST_OFFSET_SHIFT)); 1241 } 1242 1243 /* 1244 * Fill in skcipher job descriptor 1245 */ 1246 static void init_skcipher_job(struct skcipher_request *req, 1247 struct skcipher_edesc *edesc, 1248 const bool encrypt) 1249 { 1250 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1251 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1252 struct device *jrdev = ctx->jrdev; 1253 int ivsize = crypto_skcipher_ivsize(skcipher); 1254 u32 *desc = edesc->hw_desc; 1255 u32 *sh_desc; 1256 u32 in_options = 0, out_options = 0; 1257 dma_addr_t src_dma, dst_dma, ptr; 1258 int len, sec4_sg_index = 0; 1259 1260 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ", 1261 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1262 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", 1263 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1264 1265 caam_dump_sg("src @" __stringify(__LINE__)": ", 1266 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1267 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1268 1269 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1270 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1271 1272 len = desc_len(sh_desc); 1273 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1274 1275 if (ivsize || edesc->mapped_src_nents > 1) { 1276 src_dma = edesc->sec4_sg_dma; 1277 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1278 in_options = LDST_SGF; 1279 } else { 1280 src_dma = sg_dma_address(req->src); 1281 } 1282 1283 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1284 1285 if (likely(req->src == req->dst)) { 1286 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1287 out_options = in_options; 1288 } else if (!ivsize && edesc->mapped_dst_nents == 1) { 1289 dst_dma = sg_dma_address(req->dst); 1290 } else { 1291 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1292 sizeof(struct sec4_sg_entry); 1293 out_options = LDST_SGF; 1294 } 1295 1296 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); 1297 } 1298 1299 /* 1300 * allocate and map the aead extended descriptor 1301 */ 1302 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1303 int desc_bytes, bool *all_contig_ptr, 1304 bool encrypt) 1305 { 1306 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1307 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1308 struct device *jrdev = ctx->jrdev; 1309 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1310 GFP_KERNEL : GFP_ATOMIC; 1311 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1312 int src_len, dst_len = 0; 1313 struct aead_edesc *edesc; 1314 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1315 unsigned int authsize = ctx->authsize; 1316 1317 if (unlikely(req->dst != req->src)) { 1318 src_len = req->assoclen + req->cryptlen; 1319 dst_len = src_len + (encrypt ? authsize : (-authsize)); 1320 1321 src_nents = sg_nents_for_len(req->src, src_len); 1322 if (unlikely(src_nents < 0)) { 1323 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1324 src_len); 1325 return ERR_PTR(src_nents); 1326 } 1327 1328 dst_nents = sg_nents_for_len(req->dst, dst_len); 1329 if (unlikely(dst_nents < 0)) { 1330 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1331 dst_len); 1332 return ERR_PTR(dst_nents); 1333 } 1334 } else { 1335 src_len = req->assoclen + req->cryptlen + 1336 (encrypt ? authsize : 0); 1337 1338 src_nents = sg_nents_for_len(req->src, src_len); 1339 if (unlikely(src_nents < 0)) { 1340 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1341 src_len); 1342 return ERR_PTR(src_nents); 1343 } 1344 } 1345 1346 if (likely(req->src == req->dst)) { 1347 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1348 DMA_BIDIRECTIONAL); 1349 if (unlikely(!mapped_src_nents)) { 1350 dev_err(jrdev, "unable to map source\n"); 1351 return ERR_PTR(-ENOMEM); 1352 } 1353 } else { 1354 /* Cover also the case of null (zero length) input data */ 1355 if (src_nents) { 1356 mapped_src_nents = dma_map_sg(jrdev, req->src, 1357 src_nents, DMA_TO_DEVICE); 1358 if (unlikely(!mapped_src_nents)) { 1359 dev_err(jrdev, "unable to map source\n"); 1360 return ERR_PTR(-ENOMEM); 1361 } 1362 } else { 1363 mapped_src_nents = 0; 1364 } 1365 1366 /* Cover also the case of null (zero length) output data */ 1367 if (dst_nents) { 1368 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1369 dst_nents, 1370 DMA_FROM_DEVICE); 1371 if (unlikely(!mapped_dst_nents)) { 1372 dev_err(jrdev, "unable to map destination\n"); 1373 dma_unmap_sg(jrdev, req->src, src_nents, 1374 DMA_TO_DEVICE); 1375 return ERR_PTR(-ENOMEM); 1376 } 1377 } else { 1378 mapped_dst_nents = 0; 1379 } 1380 } 1381 1382 /* 1383 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1384 * the end of the table by allocating more S/G entries. 1385 */ 1386 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1387 if (mapped_dst_nents > 1) 1388 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1389 else 1390 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1391 1392 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1393 1394 /* allocate space for base edesc and hw desc commands, link tables */ 1395 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1396 GFP_DMA | flags); 1397 if (!edesc) { 1398 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1399 0, 0, 0); 1400 return ERR_PTR(-ENOMEM); 1401 } 1402 1403 edesc->src_nents = src_nents; 1404 edesc->dst_nents = dst_nents; 1405 edesc->mapped_src_nents = mapped_src_nents; 1406 edesc->mapped_dst_nents = mapped_dst_nents; 1407 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1408 desc_bytes; 1409 *all_contig_ptr = !(mapped_src_nents > 1); 1410 1411 sec4_sg_index = 0; 1412 if (mapped_src_nents > 1) { 1413 sg_to_sec4_sg_last(req->src, src_len, 1414 edesc->sec4_sg + sec4_sg_index, 0); 1415 sec4_sg_index += mapped_src_nents; 1416 } 1417 if (mapped_dst_nents > 1) { 1418 sg_to_sec4_sg_last(req->dst, dst_len, 1419 edesc->sec4_sg + sec4_sg_index, 0); 1420 } 1421 1422 if (!sec4_sg_bytes) 1423 return edesc; 1424 1425 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1426 sec4_sg_bytes, DMA_TO_DEVICE); 1427 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1428 dev_err(jrdev, "unable to map S/G table\n"); 1429 aead_unmap(jrdev, edesc, req); 1430 kfree(edesc); 1431 return ERR_PTR(-ENOMEM); 1432 } 1433 1434 edesc->sec4_sg_bytes = sec4_sg_bytes; 1435 1436 return edesc; 1437 } 1438 1439 static int gcm_encrypt(struct aead_request *req) 1440 { 1441 struct aead_edesc *edesc; 1442 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1443 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1444 struct device *jrdev = ctx->jrdev; 1445 bool all_contig; 1446 u32 *desc; 1447 int ret = 0; 1448 1449 /* allocate extended descriptor */ 1450 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1451 if (IS_ERR(edesc)) 1452 return PTR_ERR(edesc); 1453 1454 /* Create and submit job descriptor */ 1455 init_gcm_job(req, edesc, all_contig, true); 1456 1457 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1458 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1459 desc_bytes(edesc->hw_desc), 1); 1460 1461 desc = edesc->hw_desc; 1462 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1463 if (!ret) { 1464 ret = -EINPROGRESS; 1465 } else { 1466 aead_unmap(jrdev, edesc, req); 1467 kfree(edesc); 1468 } 1469 1470 return ret; 1471 } 1472 1473 static int chachapoly_encrypt(struct aead_request *req) 1474 { 1475 struct aead_edesc *edesc; 1476 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1477 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1478 struct device *jrdev = ctx->jrdev; 1479 bool all_contig; 1480 u32 *desc; 1481 int ret; 1482 1483 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1484 true); 1485 if (IS_ERR(edesc)) 1486 return PTR_ERR(edesc); 1487 1488 desc = edesc->hw_desc; 1489 1490 init_chachapoly_job(req, edesc, all_contig, true); 1491 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1492 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1493 1); 1494 1495 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1496 if (!ret) { 1497 ret = -EINPROGRESS; 1498 } else { 1499 aead_unmap(jrdev, edesc, req); 1500 kfree(edesc); 1501 } 1502 1503 return ret; 1504 } 1505 1506 static int chachapoly_decrypt(struct aead_request *req) 1507 { 1508 struct aead_edesc *edesc; 1509 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1510 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1511 struct device *jrdev = ctx->jrdev; 1512 bool all_contig; 1513 u32 *desc; 1514 int ret; 1515 1516 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1517 false); 1518 if (IS_ERR(edesc)) 1519 return PTR_ERR(edesc); 1520 1521 desc = edesc->hw_desc; 1522 1523 init_chachapoly_job(req, edesc, all_contig, false); 1524 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1525 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1526 1); 1527 1528 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1529 if (!ret) { 1530 ret = -EINPROGRESS; 1531 } else { 1532 aead_unmap(jrdev, edesc, req); 1533 kfree(edesc); 1534 } 1535 1536 return ret; 1537 } 1538 1539 static int ipsec_gcm_encrypt(struct aead_request *req) 1540 { 1541 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); 1542 } 1543 1544 static int aead_encrypt(struct aead_request *req) 1545 { 1546 struct aead_edesc *edesc; 1547 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1548 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1549 struct device *jrdev = ctx->jrdev; 1550 bool all_contig; 1551 u32 *desc; 1552 int ret = 0; 1553 1554 /* allocate extended descriptor */ 1555 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1556 &all_contig, true); 1557 if (IS_ERR(edesc)) 1558 return PTR_ERR(edesc); 1559 1560 /* Create and submit job descriptor */ 1561 init_authenc_job(req, edesc, all_contig, true); 1562 1563 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1564 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1565 desc_bytes(edesc->hw_desc), 1); 1566 1567 desc = edesc->hw_desc; 1568 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1569 if (!ret) { 1570 ret = -EINPROGRESS; 1571 } else { 1572 aead_unmap(jrdev, edesc, req); 1573 kfree(edesc); 1574 } 1575 1576 return ret; 1577 } 1578 1579 static int gcm_decrypt(struct aead_request *req) 1580 { 1581 struct aead_edesc *edesc; 1582 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1583 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1584 struct device *jrdev = ctx->jrdev; 1585 bool all_contig; 1586 u32 *desc; 1587 int ret = 0; 1588 1589 /* allocate extended descriptor */ 1590 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1591 if (IS_ERR(edesc)) 1592 return PTR_ERR(edesc); 1593 1594 /* Create and submit job descriptor*/ 1595 init_gcm_job(req, edesc, all_contig, false); 1596 1597 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1598 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1599 desc_bytes(edesc->hw_desc), 1); 1600 1601 desc = edesc->hw_desc; 1602 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1603 if (!ret) { 1604 ret = -EINPROGRESS; 1605 } else { 1606 aead_unmap(jrdev, edesc, req); 1607 kfree(edesc); 1608 } 1609 1610 return ret; 1611 } 1612 1613 static int ipsec_gcm_decrypt(struct aead_request *req) 1614 { 1615 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); 1616 } 1617 1618 static int aead_decrypt(struct aead_request *req) 1619 { 1620 struct aead_edesc *edesc; 1621 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1622 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1623 struct device *jrdev = ctx->jrdev; 1624 bool all_contig; 1625 u32 *desc; 1626 int ret = 0; 1627 1628 caam_dump_sg("dec src@" __stringify(__LINE__)": ", 1629 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1630 req->assoclen + req->cryptlen, 1); 1631 1632 /* allocate extended descriptor */ 1633 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1634 &all_contig, false); 1635 if (IS_ERR(edesc)) 1636 return PTR_ERR(edesc); 1637 1638 /* Create and submit job descriptor*/ 1639 init_authenc_job(req, edesc, all_contig, false); 1640 1641 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1642 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1643 desc_bytes(edesc->hw_desc), 1); 1644 1645 desc = edesc->hw_desc; 1646 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1647 if (!ret) { 1648 ret = -EINPROGRESS; 1649 } else { 1650 aead_unmap(jrdev, edesc, req); 1651 kfree(edesc); 1652 } 1653 1654 return ret; 1655 } 1656 1657 /* 1658 * allocate and map the skcipher extended descriptor for skcipher 1659 */ 1660 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1661 int desc_bytes) 1662 { 1663 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1664 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1665 struct device *jrdev = ctx->jrdev; 1666 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1667 GFP_KERNEL : GFP_ATOMIC; 1668 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1669 struct skcipher_edesc *edesc; 1670 dma_addr_t iv_dma = 0; 1671 u8 *iv; 1672 int ivsize = crypto_skcipher_ivsize(skcipher); 1673 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1674 1675 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1676 if (unlikely(src_nents < 0)) { 1677 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1678 req->cryptlen); 1679 return ERR_PTR(src_nents); 1680 } 1681 1682 if (req->dst != req->src) { 1683 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1684 if (unlikely(dst_nents < 0)) { 1685 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1686 req->cryptlen); 1687 return ERR_PTR(dst_nents); 1688 } 1689 } 1690 1691 if (likely(req->src == req->dst)) { 1692 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1693 DMA_BIDIRECTIONAL); 1694 if (unlikely(!mapped_src_nents)) { 1695 dev_err(jrdev, "unable to map source\n"); 1696 return ERR_PTR(-ENOMEM); 1697 } 1698 } else { 1699 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1700 DMA_TO_DEVICE); 1701 if (unlikely(!mapped_src_nents)) { 1702 dev_err(jrdev, "unable to map source\n"); 1703 return ERR_PTR(-ENOMEM); 1704 } 1705 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1706 DMA_FROM_DEVICE); 1707 if (unlikely(!mapped_dst_nents)) { 1708 dev_err(jrdev, "unable to map destination\n"); 1709 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1710 return ERR_PTR(-ENOMEM); 1711 } 1712 } 1713 1714 if (!ivsize && mapped_src_nents == 1) 1715 sec4_sg_ents = 0; // no need for an input hw s/g table 1716 else 1717 sec4_sg_ents = mapped_src_nents + !!ivsize; 1718 dst_sg_idx = sec4_sg_ents; 1719 1720 /* 1721 * Input, output HW S/G tables: [IV, src][dst, IV] 1722 * IV entries point to the same buffer 1723 * If src == dst, S/G entries are reused (S/G tables overlap) 1724 * 1725 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1726 * the end of the table by allocating more S/G entries. Logic: 1727 * if (output S/G) 1728 * pad output S/G, if needed 1729 * else if (input S/G) ... 1730 * pad input S/G, if needed 1731 */ 1732 if (ivsize || mapped_dst_nents > 1) { 1733 if (req->src == req->dst) 1734 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents); 1735 else 1736 sec4_sg_ents += pad_sg_nents(mapped_dst_nents + 1737 !!ivsize); 1738 } else { 1739 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1740 } 1741 1742 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1743 1744 /* 1745 * allocate space for base edesc and hw desc commands, link tables, IV 1746 */ 1747 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1748 GFP_DMA | flags); 1749 if (!edesc) { 1750 dev_err(jrdev, "could not allocate extended descriptor\n"); 1751 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1752 0, 0, 0); 1753 return ERR_PTR(-ENOMEM); 1754 } 1755 1756 edesc->src_nents = src_nents; 1757 edesc->dst_nents = dst_nents; 1758 edesc->mapped_src_nents = mapped_src_nents; 1759 edesc->mapped_dst_nents = mapped_dst_nents; 1760 edesc->sec4_sg_bytes = sec4_sg_bytes; 1761 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1762 desc_bytes); 1763 1764 /* Make sure IV is located in a DMAable area */ 1765 if (ivsize) { 1766 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; 1767 memcpy(iv, req->iv, ivsize); 1768 1769 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1770 if (dma_mapping_error(jrdev, iv_dma)) { 1771 dev_err(jrdev, "unable to map IV\n"); 1772 caam_unmap(jrdev, req->src, req->dst, src_nents, 1773 dst_nents, 0, 0, 0, 0); 1774 kfree(edesc); 1775 return ERR_PTR(-ENOMEM); 1776 } 1777 1778 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1779 } 1780 if (dst_sg_idx) 1781 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + 1782 !!ivsize, 0); 1783 1784 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) 1785 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + 1786 dst_sg_idx, 0); 1787 1788 if (ivsize) 1789 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + 1790 mapped_dst_nents, iv_dma, ivsize, 0); 1791 1792 if (ivsize || mapped_dst_nents > 1) 1793 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + 1794 mapped_dst_nents); 1795 1796 if (sec4_sg_bytes) { 1797 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1798 sec4_sg_bytes, 1799 DMA_TO_DEVICE); 1800 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1801 dev_err(jrdev, "unable to map S/G table\n"); 1802 caam_unmap(jrdev, req->src, req->dst, src_nents, 1803 dst_nents, iv_dma, ivsize, 0, 0); 1804 kfree(edesc); 1805 return ERR_PTR(-ENOMEM); 1806 } 1807 } 1808 1809 edesc->iv_dma = iv_dma; 1810 1811 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1812 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1813 sec4_sg_bytes, 1); 1814 1815 return edesc; 1816 } 1817 1818 static int skcipher_encrypt(struct skcipher_request *req) 1819 { 1820 struct skcipher_edesc *edesc; 1821 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1822 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1823 struct device *jrdev = ctx->jrdev; 1824 u32 *desc; 1825 int ret = 0; 1826 1827 if (!req->cryptlen) 1828 return 0; 1829 1830 /* allocate extended descriptor */ 1831 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1832 if (IS_ERR(edesc)) 1833 return PTR_ERR(edesc); 1834 1835 /* Create and submit job descriptor*/ 1836 init_skcipher_job(req, edesc, true); 1837 1838 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1839 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1840 desc_bytes(edesc->hw_desc), 1); 1841 1842 desc = edesc->hw_desc; 1843 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1844 1845 if (!ret) { 1846 ret = -EINPROGRESS; 1847 } else { 1848 skcipher_unmap(jrdev, edesc, req); 1849 kfree(edesc); 1850 } 1851 1852 return ret; 1853 } 1854 1855 static int skcipher_decrypt(struct skcipher_request *req) 1856 { 1857 struct skcipher_edesc *edesc; 1858 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1859 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1860 struct device *jrdev = ctx->jrdev; 1861 u32 *desc; 1862 int ret = 0; 1863 1864 if (!req->cryptlen) 1865 return 0; 1866 1867 /* allocate extended descriptor */ 1868 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1869 if (IS_ERR(edesc)) 1870 return PTR_ERR(edesc); 1871 1872 /* Create and submit job descriptor*/ 1873 init_skcipher_job(req, edesc, false); 1874 desc = edesc->hw_desc; 1875 1876 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1877 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1878 desc_bytes(edesc->hw_desc), 1); 1879 1880 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1881 if (!ret) { 1882 ret = -EINPROGRESS; 1883 } else { 1884 skcipher_unmap(jrdev, edesc, req); 1885 kfree(edesc); 1886 } 1887 1888 return ret; 1889 } 1890 1891 static struct caam_skcipher_alg driver_algs[] = { 1892 { 1893 .skcipher = { 1894 .base = { 1895 .cra_name = "cbc(aes)", 1896 .cra_driver_name = "cbc-aes-caam", 1897 .cra_blocksize = AES_BLOCK_SIZE, 1898 }, 1899 .setkey = aes_skcipher_setkey, 1900 .encrypt = skcipher_encrypt, 1901 .decrypt = skcipher_decrypt, 1902 .min_keysize = AES_MIN_KEY_SIZE, 1903 .max_keysize = AES_MAX_KEY_SIZE, 1904 .ivsize = AES_BLOCK_SIZE, 1905 }, 1906 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1907 }, 1908 { 1909 .skcipher = { 1910 .base = { 1911 .cra_name = "cbc(des3_ede)", 1912 .cra_driver_name = "cbc-3des-caam", 1913 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1914 }, 1915 .setkey = des3_skcipher_setkey, 1916 .encrypt = skcipher_encrypt, 1917 .decrypt = skcipher_decrypt, 1918 .min_keysize = DES3_EDE_KEY_SIZE, 1919 .max_keysize = DES3_EDE_KEY_SIZE, 1920 .ivsize = DES3_EDE_BLOCK_SIZE, 1921 }, 1922 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1923 }, 1924 { 1925 .skcipher = { 1926 .base = { 1927 .cra_name = "cbc(des)", 1928 .cra_driver_name = "cbc-des-caam", 1929 .cra_blocksize = DES_BLOCK_SIZE, 1930 }, 1931 .setkey = des_skcipher_setkey, 1932 .encrypt = skcipher_encrypt, 1933 .decrypt = skcipher_decrypt, 1934 .min_keysize = DES_KEY_SIZE, 1935 .max_keysize = DES_KEY_SIZE, 1936 .ivsize = DES_BLOCK_SIZE, 1937 }, 1938 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1939 }, 1940 { 1941 .skcipher = { 1942 .base = { 1943 .cra_name = "ctr(aes)", 1944 .cra_driver_name = "ctr-aes-caam", 1945 .cra_blocksize = 1, 1946 }, 1947 .setkey = ctr_skcipher_setkey, 1948 .encrypt = skcipher_encrypt, 1949 .decrypt = skcipher_decrypt, 1950 .min_keysize = AES_MIN_KEY_SIZE, 1951 .max_keysize = AES_MAX_KEY_SIZE, 1952 .ivsize = AES_BLOCK_SIZE, 1953 .chunksize = AES_BLOCK_SIZE, 1954 }, 1955 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1956 OP_ALG_AAI_CTR_MOD128, 1957 }, 1958 { 1959 .skcipher = { 1960 .base = { 1961 .cra_name = "rfc3686(ctr(aes))", 1962 .cra_driver_name = "rfc3686-ctr-aes-caam", 1963 .cra_blocksize = 1, 1964 }, 1965 .setkey = rfc3686_skcipher_setkey, 1966 .encrypt = skcipher_encrypt, 1967 .decrypt = skcipher_decrypt, 1968 .min_keysize = AES_MIN_KEY_SIZE + 1969 CTR_RFC3686_NONCE_SIZE, 1970 .max_keysize = AES_MAX_KEY_SIZE + 1971 CTR_RFC3686_NONCE_SIZE, 1972 .ivsize = CTR_RFC3686_IV_SIZE, 1973 .chunksize = AES_BLOCK_SIZE, 1974 }, 1975 .caam = { 1976 .class1_alg_type = OP_ALG_ALGSEL_AES | 1977 OP_ALG_AAI_CTR_MOD128, 1978 .rfc3686 = true, 1979 }, 1980 }, 1981 { 1982 .skcipher = { 1983 .base = { 1984 .cra_name = "xts(aes)", 1985 .cra_driver_name = "xts-aes-caam", 1986 .cra_blocksize = AES_BLOCK_SIZE, 1987 }, 1988 .setkey = xts_skcipher_setkey, 1989 .encrypt = skcipher_encrypt, 1990 .decrypt = skcipher_decrypt, 1991 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1992 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1993 .ivsize = AES_BLOCK_SIZE, 1994 }, 1995 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1996 }, 1997 { 1998 .skcipher = { 1999 .base = { 2000 .cra_name = "ecb(des)", 2001 .cra_driver_name = "ecb-des-caam", 2002 .cra_blocksize = DES_BLOCK_SIZE, 2003 }, 2004 .setkey = des_skcipher_setkey, 2005 .encrypt = skcipher_encrypt, 2006 .decrypt = skcipher_decrypt, 2007 .min_keysize = DES_KEY_SIZE, 2008 .max_keysize = DES_KEY_SIZE, 2009 }, 2010 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 2011 }, 2012 { 2013 .skcipher = { 2014 .base = { 2015 .cra_name = "ecb(aes)", 2016 .cra_driver_name = "ecb-aes-caam", 2017 .cra_blocksize = AES_BLOCK_SIZE, 2018 }, 2019 .setkey = aes_skcipher_setkey, 2020 .encrypt = skcipher_encrypt, 2021 .decrypt = skcipher_decrypt, 2022 .min_keysize = AES_MIN_KEY_SIZE, 2023 .max_keysize = AES_MAX_KEY_SIZE, 2024 }, 2025 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2026 }, 2027 { 2028 .skcipher = { 2029 .base = { 2030 .cra_name = "ecb(des3_ede)", 2031 .cra_driver_name = "ecb-des3-caam", 2032 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2033 }, 2034 .setkey = des3_skcipher_setkey, 2035 .encrypt = skcipher_encrypt, 2036 .decrypt = skcipher_decrypt, 2037 .min_keysize = DES3_EDE_KEY_SIZE, 2038 .max_keysize = DES3_EDE_KEY_SIZE, 2039 }, 2040 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2041 }, 2042 { 2043 .skcipher = { 2044 .base = { 2045 .cra_name = "ecb(arc4)", 2046 .cra_driver_name = "ecb-arc4-caam", 2047 .cra_blocksize = ARC4_BLOCK_SIZE, 2048 }, 2049 .setkey = arc4_skcipher_setkey, 2050 .encrypt = skcipher_encrypt, 2051 .decrypt = skcipher_decrypt, 2052 .min_keysize = ARC4_MIN_KEY_SIZE, 2053 .max_keysize = ARC4_MAX_KEY_SIZE, 2054 }, 2055 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB, 2056 }, 2057 }; 2058 2059 static struct caam_aead_alg driver_aeads[] = { 2060 { 2061 .aead = { 2062 .base = { 2063 .cra_name = "rfc4106(gcm(aes))", 2064 .cra_driver_name = "rfc4106-gcm-aes-caam", 2065 .cra_blocksize = 1, 2066 }, 2067 .setkey = rfc4106_setkey, 2068 .setauthsize = rfc4106_setauthsize, 2069 .encrypt = ipsec_gcm_encrypt, 2070 .decrypt = ipsec_gcm_decrypt, 2071 .ivsize = GCM_RFC4106_IV_SIZE, 2072 .maxauthsize = AES_BLOCK_SIZE, 2073 }, 2074 .caam = { 2075 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2076 .nodkp = true, 2077 }, 2078 }, 2079 { 2080 .aead = { 2081 .base = { 2082 .cra_name = "rfc4543(gcm(aes))", 2083 .cra_driver_name = "rfc4543-gcm-aes-caam", 2084 .cra_blocksize = 1, 2085 }, 2086 .setkey = rfc4543_setkey, 2087 .setauthsize = rfc4543_setauthsize, 2088 .encrypt = ipsec_gcm_encrypt, 2089 .decrypt = ipsec_gcm_decrypt, 2090 .ivsize = GCM_RFC4543_IV_SIZE, 2091 .maxauthsize = AES_BLOCK_SIZE, 2092 }, 2093 .caam = { 2094 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2095 .nodkp = true, 2096 }, 2097 }, 2098 /* Galois Counter Mode */ 2099 { 2100 .aead = { 2101 .base = { 2102 .cra_name = "gcm(aes)", 2103 .cra_driver_name = "gcm-aes-caam", 2104 .cra_blocksize = 1, 2105 }, 2106 .setkey = gcm_setkey, 2107 .setauthsize = gcm_setauthsize, 2108 .encrypt = gcm_encrypt, 2109 .decrypt = gcm_decrypt, 2110 .ivsize = GCM_AES_IV_SIZE, 2111 .maxauthsize = AES_BLOCK_SIZE, 2112 }, 2113 .caam = { 2114 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2115 .nodkp = true, 2116 }, 2117 }, 2118 /* single-pass ipsec_esp descriptor */ 2119 { 2120 .aead = { 2121 .base = { 2122 .cra_name = "authenc(hmac(md5)," 2123 "ecb(cipher_null))", 2124 .cra_driver_name = "authenc-hmac-md5-" 2125 "ecb-cipher_null-caam", 2126 .cra_blocksize = NULL_BLOCK_SIZE, 2127 }, 2128 .setkey = aead_setkey, 2129 .setauthsize = aead_setauthsize, 2130 .encrypt = aead_encrypt, 2131 .decrypt = aead_decrypt, 2132 .ivsize = NULL_IV_SIZE, 2133 .maxauthsize = MD5_DIGEST_SIZE, 2134 }, 2135 .caam = { 2136 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2137 OP_ALG_AAI_HMAC_PRECOMP, 2138 }, 2139 }, 2140 { 2141 .aead = { 2142 .base = { 2143 .cra_name = "authenc(hmac(sha1)," 2144 "ecb(cipher_null))", 2145 .cra_driver_name = "authenc-hmac-sha1-" 2146 "ecb-cipher_null-caam", 2147 .cra_blocksize = NULL_BLOCK_SIZE, 2148 }, 2149 .setkey = aead_setkey, 2150 .setauthsize = aead_setauthsize, 2151 .encrypt = aead_encrypt, 2152 .decrypt = aead_decrypt, 2153 .ivsize = NULL_IV_SIZE, 2154 .maxauthsize = SHA1_DIGEST_SIZE, 2155 }, 2156 .caam = { 2157 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2158 OP_ALG_AAI_HMAC_PRECOMP, 2159 }, 2160 }, 2161 { 2162 .aead = { 2163 .base = { 2164 .cra_name = "authenc(hmac(sha224)," 2165 "ecb(cipher_null))", 2166 .cra_driver_name = "authenc-hmac-sha224-" 2167 "ecb-cipher_null-caam", 2168 .cra_blocksize = NULL_BLOCK_SIZE, 2169 }, 2170 .setkey = aead_setkey, 2171 .setauthsize = aead_setauthsize, 2172 .encrypt = aead_encrypt, 2173 .decrypt = aead_decrypt, 2174 .ivsize = NULL_IV_SIZE, 2175 .maxauthsize = SHA224_DIGEST_SIZE, 2176 }, 2177 .caam = { 2178 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2179 OP_ALG_AAI_HMAC_PRECOMP, 2180 }, 2181 }, 2182 { 2183 .aead = { 2184 .base = { 2185 .cra_name = "authenc(hmac(sha256)," 2186 "ecb(cipher_null))", 2187 .cra_driver_name = "authenc-hmac-sha256-" 2188 "ecb-cipher_null-caam", 2189 .cra_blocksize = NULL_BLOCK_SIZE, 2190 }, 2191 .setkey = aead_setkey, 2192 .setauthsize = aead_setauthsize, 2193 .encrypt = aead_encrypt, 2194 .decrypt = aead_decrypt, 2195 .ivsize = NULL_IV_SIZE, 2196 .maxauthsize = SHA256_DIGEST_SIZE, 2197 }, 2198 .caam = { 2199 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2200 OP_ALG_AAI_HMAC_PRECOMP, 2201 }, 2202 }, 2203 { 2204 .aead = { 2205 .base = { 2206 .cra_name = "authenc(hmac(sha384)," 2207 "ecb(cipher_null))", 2208 .cra_driver_name = "authenc-hmac-sha384-" 2209 "ecb-cipher_null-caam", 2210 .cra_blocksize = NULL_BLOCK_SIZE, 2211 }, 2212 .setkey = aead_setkey, 2213 .setauthsize = aead_setauthsize, 2214 .encrypt = aead_encrypt, 2215 .decrypt = aead_decrypt, 2216 .ivsize = NULL_IV_SIZE, 2217 .maxauthsize = SHA384_DIGEST_SIZE, 2218 }, 2219 .caam = { 2220 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2221 OP_ALG_AAI_HMAC_PRECOMP, 2222 }, 2223 }, 2224 { 2225 .aead = { 2226 .base = { 2227 .cra_name = "authenc(hmac(sha512)," 2228 "ecb(cipher_null))", 2229 .cra_driver_name = "authenc-hmac-sha512-" 2230 "ecb-cipher_null-caam", 2231 .cra_blocksize = NULL_BLOCK_SIZE, 2232 }, 2233 .setkey = aead_setkey, 2234 .setauthsize = aead_setauthsize, 2235 .encrypt = aead_encrypt, 2236 .decrypt = aead_decrypt, 2237 .ivsize = NULL_IV_SIZE, 2238 .maxauthsize = SHA512_DIGEST_SIZE, 2239 }, 2240 .caam = { 2241 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2242 OP_ALG_AAI_HMAC_PRECOMP, 2243 }, 2244 }, 2245 { 2246 .aead = { 2247 .base = { 2248 .cra_name = "authenc(hmac(md5),cbc(aes))", 2249 .cra_driver_name = "authenc-hmac-md5-" 2250 "cbc-aes-caam", 2251 .cra_blocksize = AES_BLOCK_SIZE, 2252 }, 2253 .setkey = aead_setkey, 2254 .setauthsize = aead_setauthsize, 2255 .encrypt = aead_encrypt, 2256 .decrypt = aead_decrypt, 2257 .ivsize = AES_BLOCK_SIZE, 2258 .maxauthsize = MD5_DIGEST_SIZE, 2259 }, 2260 .caam = { 2261 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2262 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2263 OP_ALG_AAI_HMAC_PRECOMP, 2264 }, 2265 }, 2266 { 2267 .aead = { 2268 .base = { 2269 .cra_name = "echainiv(authenc(hmac(md5)," 2270 "cbc(aes)))", 2271 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2272 "cbc-aes-caam", 2273 .cra_blocksize = AES_BLOCK_SIZE, 2274 }, 2275 .setkey = aead_setkey, 2276 .setauthsize = aead_setauthsize, 2277 .encrypt = aead_encrypt, 2278 .decrypt = aead_decrypt, 2279 .ivsize = AES_BLOCK_SIZE, 2280 .maxauthsize = MD5_DIGEST_SIZE, 2281 }, 2282 .caam = { 2283 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2284 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2285 OP_ALG_AAI_HMAC_PRECOMP, 2286 .geniv = true, 2287 }, 2288 }, 2289 { 2290 .aead = { 2291 .base = { 2292 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2293 .cra_driver_name = "authenc-hmac-sha1-" 2294 "cbc-aes-caam", 2295 .cra_blocksize = AES_BLOCK_SIZE, 2296 }, 2297 .setkey = aead_setkey, 2298 .setauthsize = aead_setauthsize, 2299 .encrypt = aead_encrypt, 2300 .decrypt = aead_decrypt, 2301 .ivsize = AES_BLOCK_SIZE, 2302 .maxauthsize = SHA1_DIGEST_SIZE, 2303 }, 2304 .caam = { 2305 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2306 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2307 OP_ALG_AAI_HMAC_PRECOMP, 2308 }, 2309 }, 2310 { 2311 .aead = { 2312 .base = { 2313 .cra_name = "echainiv(authenc(hmac(sha1)," 2314 "cbc(aes)))", 2315 .cra_driver_name = "echainiv-authenc-" 2316 "hmac-sha1-cbc-aes-caam", 2317 .cra_blocksize = AES_BLOCK_SIZE, 2318 }, 2319 .setkey = aead_setkey, 2320 .setauthsize = aead_setauthsize, 2321 .encrypt = aead_encrypt, 2322 .decrypt = aead_decrypt, 2323 .ivsize = AES_BLOCK_SIZE, 2324 .maxauthsize = SHA1_DIGEST_SIZE, 2325 }, 2326 .caam = { 2327 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2328 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2329 OP_ALG_AAI_HMAC_PRECOMP, 2330 .geniv = true, 2331 }, 2332 }, 2333 { 2334 .aead = { 2335 .base = { 2336 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2337 .cra_driver_name = "authenc-hmac-sha224-" 2338 "cbc-aes-caam", 2339 .cra_blocksize = AES_BLOCK_SIZE, 2340 }, 2341 .setkey = aead_setkey, 2342 .setauthsize = aead_setauthsize, 2343 .encrypt = aead_encrypt, 2344 .decrypt = aead_decrypt, 2345 .ivsize = AES_BLOCK_SIZE, 2346 .maxauthsize = SHA224_DIGEST_SIZE, 2347 }, 2348 .caam = { 2349 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2350 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2351 OP_ALG_AAI_HMAC_PRECOMP, 2352 }, 2353 }, 2354 { 2355 .aead = { 2356 .base = { 2357 .cra_name = "echainiv(authenc(hmac(sha224)," 2358 "cbc(aes)))", 2359 .cra_driver_name = "echainiv-authenc-" 2360 "hmac-sha224-cbc-aes-caam", 2361 .cra_blocksize = AES_BLOCK_SIZE, 2362 }, 2363 .setkey = aead_setkey, 2364 .setauthsize = aead_setauthsize, 2365 .encrypt = aead_encrypt, 2366 .decrypt = aead_decrypt, 2367 .ivsize = AES_BLOCK_SIZE, 2368 .maxauthsize = SHA224_DIGEST_SIZE, 2369 }, 2370 .caam = { 2371 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2372 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2373 OP_ALG_AAI_HMAC_PRECOMP, 2374 .geniv = true, 2375 }, 2376 }, 2377 { 2378 .aead = { 2379 .base = { 2380 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2381 .cra_driver_name = "authenc-hmac-sha256-" 2382 "cbc-aes-caam", 2383 .cra_blocksize = AES_BLOCK_SIZE, 2384 }, 2385 .setkey = aead_setkey, 2386 .setauthsize = aead_setauthsize, 2387 .encrypt = aead_encrypt, 2388 .decrypt = aead_decrypt, 2389 .ivsize = AES_BLOCK_SIZE, 2390 .maxauthsize = SHA256_DIGEST_SIZE, 2391 }, 2392 .caam = { 2393 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2394 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2395 OP_ALG_AAI_HMAC_PRECOMP, 2396 }, 2397 }, 2398 { 2399 .aead = { 2400 .base = { 2401 .cra_name = "echainiv(authenc(hmac(sha256)," 2402 "cbc(aes)))", 2403 .cra_driver_name = "echainiv-authenc-" 2404 "hmac-sha256-cbc-aes-caam", 2405 .cra_blocksize = AES_BLOCK_SIZE, 2406 }, 2407 .setkey = aead_setkey, 2408 .setauthsize = aead_setauthsize, 2409 .encrypt = aead_encrypt, 2410 .decrypt = aead_decrypt, 2411 .ivsize = AES_BLOCK_SIZE, 2412 .maxauthsize = SHA256_DIGEST_SIZE, 2413 }, 2414 .caam = { 2415 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2416 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2417 OP_ALG_AAI_HMAC_PRECOMP, 2418 .geniv = true, 2419 }, 2420 }, 2421 { 2422 .aead = { 2423 .base = { 2424 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2425 .cra_driver_name = "authenc-hmac-sha384-" 2426 "cbc-aes-caam", 2427 .cra_blocksize = AES_BLOCK_SIZE, 2428 }, 2429 .setkey = aead_setkey, 2430 .setauthsize = aead_setauthsize, 2431 .encrypt = aead_encrypt, 2432 .decrypt = aead_decrypt, 2433 .ivsize = AES_BLOCK_SIZE, 2434 .maxauthsize = SHA384_DIGEST_SIZE, 2435 }, 2436 .caam = { 2437 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2438 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2439 OP_ALG_AAI_HMAC_PRECOMP, 2440 }, 2441 }, 2442 { 2443 .aead = { 2444 .base = { 2445 .cra_name = "echainiv(authenc(hmac(sha384)," 2446 "cbc(aes)))", 2447 .cra_driver_name = "echainiv-authenc-" 2448 "hmac-sha384-cbc-aes-caam", 2449 .cra_blocksize = AES_BLOCK_SIZE, 2450 }, 2451 .setkey = aead_setkey, 2452 .setauthsize = aead_setauthsize, 2453 .encrypt = aead_encrypt, 2454 .decrypt = aead_decrypt, 2455 .ivsize = AES_BLOCK_SIZE, 2456 .maxauthsize = SHA384_DIGEST_SIZE, 2457 }, 2458 .caam = { 2459 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2460 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2461 OP_ALG_AAI_HMAC_PRECOMP, 2462 .geniv = true, 2463 }, 2464 }, 2465 { 2466 .aead = { 2467 .base = { 2468 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2469 .cra_driver_name = "authenc-hmac-sha512-" 2470 "cbc-aes-caam", 2471 .cra_blocksize = AES_BLOCK_SIZE, 2472 }, 2473 .setkey = aead_setkey, 2474 .setauthsize = aead_setauthsize, 2475 .encrypt = aead_encrypt, 2476 .decrypt = aead_decrypt, 2477 .ivsize = AES_BLOCK_SIZE, 2478 .maxauthsize = SHA512_DIGEST_SIZE, 2479 }, 2480 .caam = { 2481 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2482 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2483 OP_ALG_AAI_HMAC_PRECOMP, 2484 }, 2485 }, 2486 { 2487 .aead = { 2488 .base = { 2489 .cra_name = "echainiv(authenc(hmac(sha512)," 2490 "cbc(aes)))", 2491 .cra_driver_name = "echainiv-authenc-" 2492 "hmac-sha512-cbc-aes-caam", 2493 .cra_blocksize = AES_BLOCK_SIZE, 2494 }, 2495 .setkey = aead_setkey, 2496 .setauthsize = aead_setauthsize, 2497 .encrypt = aead_encrypt, 2498 .decrypt = aead_decrypt, 2499 .ivsize = AES_BLOCK_SIZE, 2500 .maxauthsize = SHA512_DIGEST_SIZE, 2501 }, 2502 .caam = { 2503 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2504 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2505 OP_ALG_AAI_HMAC_PRECOMP, 2506 .geniv = true, 2507 }, 2508 }, 2509 { 2510 .aead = { 2511 .base = { 2512 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2513 .cra_driver_name = "authenc-hmac-md5-" 2514 "cbc-des3_ede-caam", 2515 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2516 }, 2517 .setkey = des3_aead_setkey, 2518 .setauthsize = aead_setauthsize, 2519 .encrypt = aead_encrypt, 2520 .decrypt = aead_decrypt, 2521 .ivsize = DES3_EDE_BLOCK_SIZE, 2522 .maxauthsize = MD5_DIGEST_SIZE, 2523 }, 2524 .caam = { 2525 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2526 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2527 OP_ALG_AAI_HMAC_PRECOMP, 2528 } 2529 }, 2530 { 2531 .aead = { 2532 .base = { 2533 .cra_name = "echainiv(authenc(hmac(md5)," 2534 "cbc(des3_ede)))", 2535 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2536 "cbc-des3_ede-caam", 2537 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2538 }, 2539 .setkey = des3_aead_setkey, 2540 .setauthsize = aead_setauthsize, 2541 .encrypt = aead_encrypt, 2542 .decrypt = aead_decrypt, 2543 .ivsize = DES3_EDE_BLOCK_SIZE, 2544 .maxauthsize = MD5_DIGEST_SIZE, 2545 }, 2546 .caam = { 2547 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2548 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2549 OP_ALG_AAI_HMAC_PRECOMP, 2550 .geniv = true, 2551 } 2552 }, 2553 { 2554 .aead = { 2555 .base = { 2556 .cra_name = "authenc(hmac(sha1)," 2557 "cbc(des3_ede))", 2558 .cra_driver_name = "authenc-hmac-sha1-" 2559 "cbc-des3_ede-caam", 2560 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2561 }, 2562 .setkey = des3_aead_setkey, 2563 .setauthsize = aead_setauthsize, 2564 .encrypt = aead_encrypt, 2565 .decrypt = aead_decrypt, 2566 .ivsize = DES3_EDE_BLOCK_SIZE, 2567 .maxauthsize = SHA1_DIGEST_SIZE, 2568 }, 2569 .caam = { 2570 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2571 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2572 OP_ALG_AAI_HMAC_PRECOMP, 2573 }, 2574 }, 2575 { 2576 .aead = { 2577 .base = { 2578 .cra_name = "echainiv(authenc(hmac(sha1)," 2579 "cbc(des3_ede)))", 2580 .cra_driver_name = "echainiv-authenc-" 2581 "hmac-sha1-" 2582 "cbc-des3_ede-caam", 2583 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2584 }, 2585 .setkey = des3_aead_setkey, 2586 .setauthsize = aead_setauthsize, 2587 .encrypt = aead_encrypt, 2588 .decrypt = aead_decrypt, 2589 .ivsize = DES3_EDE_BLOCK_SIZE, 2590 .maxauthsize = SHA1_DIGEST_SIZE, 2591 }, 2592 .caam = { 2593 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2594 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2595 OP_ALG_AAI_HMAC_PRECOMP, 2596 .geniv = true, 2597 }, 2598 }, 2599 { 2600 .aead = { 2601 .base = { 2602 .cra_name = "authenc(hmac(sha224)," 2603 "cbc(des3_ede))", 2604 .cra_driver_name = "authenc-hmac-sha224-" 2605 "cbc-des3_ede-caam", 2606 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2607 }, 2608 .setkey = des3_aead_setkey, 2609 .setauthsize = aead_setauthsize, 2610 .encrypt = aead_encrypt, 2611 .decrypt = aead_decrypt, 2612 .ivsize = DES3_EDE_BLOCK_SIZE, 2613 .maxauthsize = SHA224_DIGEST_SIZE, 2614 }, 2615 .caam = { 2616 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2617 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2618 OP_ALG_AAI_HMAC_PRECOMP, 2619 }, 2620 }, 2621 { 2622 .aead = { 2623 .base = { 2624 .cra_name = "echainiv(authenc(hmac(sha224)," 2625 "cbc(des3_ede)))", 2626 .cra_driver_name = "echainiv-authenc-" 2627 "hmac-sha224-" 2628 "cbc-des3_ede-caam", 2629 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2630 }, 2631 .setkey = des3_aead_setkey, 2632 .setauthsize = aead_setauthsize, 2633 .encrypt = aead_encrypt, 2634 .decrypt = aead_decrypt, 2635 .ivsize = DES3_EDE_BLOCK_SIZE, 2636 .maxauthsize = SHA224_DIGEST_SIZE, 2637 }, 2638 .caam = { 2639 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2640 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2641 OP_ALG_AAI_HMAC_PRECOMP, 2642 .geniv = true, 2643 }, 2644 }, 2645 { 2646 .aead = { 2647 .base = { 2648 .cra_name = "authenc(hmac(sha256)," 2649 "cbc(des3_ede))", 2650 .cra_driver_name = "authenc-hmac-sha256-" 2651 "cbc-des3_ede-caam", 2652 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2653 }, 2654 .setkey = des3_aead_setkey, 2655 .setauthsize = aead_setauthsize, 2656 .encrypt = aead_encrypt, 2657 .decrypt = aead_decrypt, 2658 .ivsize = DES3_EDE_BLOCK_SIZE, 2659 .maxauthsize = SHA256_DIGEST_SIZE, 2660 }, 2661 .caam = { 2662 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2663 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2664 OP_ALG_AAI_HMAC_PRECOMP, 2665 }, 2666 }, 2667 { 2668 .aead = { 2669 .base = { 2670 .cra_name = "echainiv(authenc(hmac(sha256)," 2671 "cbc(des3_ede)))", 2672 .cra_driver_name = "echainiv-authenc-" 2673 "hmac-sha256-" 2674 "cbc-des3_ede-caam", 2675 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2676 }, 2677 .setkey = des3_aead_setkey, 2678 .setauthsize = aead_setauthsize, 2679 .encrypt = aead_encrypt, 2680 .decrypt = aead_decrypt, 2681 .ivsize = DES3_EDE_BLOCK_SIZE, 2682 .maxauthsize = SHA256_DIGEST_SIZE, 2683 }, 2684 .caam = { 2685 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2686 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2687 OP_ALG_AAI_HMAC_PRECOMP, 2688 .geniv = true, 2689 }, 2690 }, 2691 { 2692 .aead = { 2693 .base = { 2694 .cra_name = "authenc(hmac(sha384)," 2695 "cbc(des3_ede))", 2696 .cra_driver_name = "authenc-hmac-sha384-" 2697 "cbc-des3_ede-caam", 2698 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2699 }, 2700 .setkey = des3_aead_setkey, 2701 .setauthsize = aead_setauthsize, 2702 .encrypt = aead_encrypt, 2703 .decrypt = aead_decrypt, 2704 .ivsize = DES3_EDE_BLOCK_SIZE, 2705 .maxauthsize = SHA384_DIGEST_SIZE, 2706 }, 2707 .caam = { 2708 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2709 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2710 OP_ALG_AAI_HMAC_PRECOMP, 2711 }, 2712 }, 2713 { 2714 .aead = { 2715 .base = { 2716 .cra_name = "echainiv(authenc(hmac(sha384)," 2717 "cbc(des3_ede)))", 2718 .cra_driver_name = "echainiv-authenc-" 2719 "hmac-sha384-" 2720 "cbc-des3_ede-caam", 2721 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2722 }, 2723 .setkey = des3_aead_setkey, 2724 .setauthsize = aead_setauthsize, 2725 .encrypt = aead_encrypt, 2726 .decrypt = aead_decrypt, 2727 .ivsize = DES3_EDE_BLOCK_SIZE, 2728 .maxauthsize = SHA384_DIGEST_SIZE, 2729 }, 2730 .caam = { 2731 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2732 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2733 OP_ALG_AAI_HMAC_PRECOMP, 2734 .geniv = true, 2735 }, 2736 }, 2737 { 2738 .aead = { 2739 .base = { 2740 .cra_name = "authenc(hmac(sha512)," 2741 "cbc(des3_ede))", 2742 .cra_driver_name = "authenc-hmac-sha512-" 2743 "cbc-des3_ede-caam", 2744 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2745 }, 2746 .setkey = des3_aead_setkey, 2747 .setauthsize = aead_setauthsize, 2748 .encrypt = aead_encrypt, 2749 .decrypt = aead_decrypt, 2750 .ivsize = DES3_EDE_BLOCK_SIZE, 2751 .maxauthsize = SHA512_DIGEST_SIZE, 2752 }, 2753 .caam = { 2754 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2755 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2756 OP_ALG_AAI_HMAC_PRECOMP, 2757 }, 2758 }, 2759 { 2760 .aead = { 2761 .base = { 2762 .cra_name = "echainiv(authenc(hmac(sha512)," 2763 "cbc(des3_ede)))", 2764 .cra_driver_name = "echainiv-authenc-" 2765 "hmac-sha512-" 2766 "cbc-des3_ede-caam", 2767 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2768 }, 2769 .setkey = des3_aead_setkey, 2770 .setauthsize = aead_setauthsize, 2771 .encrypt = aead_encrypt, 2772 .decrypt = aead_decrypt, 2773 .ivsize = DES3_EDE_BLOCK_SIZE, 2774 .maxauthsize = SHA512_DIGEST_SIZE, 2775 }, 2776 .caam = { 2777 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2778 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2779 OP_ALG_AAI_HMAC_PRECOMP, 2780 .geniv = true, 2781 }, 2782 }, 2783 { 2784 .aead = { 2785 .base = { 2786 .cra_name = "authenc(hmac(md5),cbc(des))", 2787 .cra_driver_name = "authenc-hmac-md5-" 2788 "cbc-des-caam", 2789 .cra_blocksize = DES_BLOCK_SIZE, 2790 }, 2791 .setkey = aead_setkey, 2792 .setauthsize = aead_setauthsize, 2793 .encrypt = aead_encrypt, 2794 .decrypt = aead_decrypt, 2795 .ivsize = DES_BLOCK_SIZE, 2796 .maxauthsize = MD5_DIGEST_SIZE, 2797 }, 2798 .caam = { 2799 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2800 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2801 OP_ALG_AAI_HMAC_PRECOMP, 2802 }, 2803 }, 2804 { 2805 .aead = { 2806 .base = { 2807 .cra_name = "echainiv(authenc(hmac(md5)," 2808 "cbc(des)))", 2809 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2810 "cbc-des-caam", 2811 .cra_blocksize = DES_BLOCK_SIZE, 2812 }, 2813 .setkey = aead_setkey, 2814 .setauthsize = aead_setauthsize, 2815 .encrypt = aead_encrypt, 2816 .decrypt = aead_decrypt, 2817 .ivsize = DES_BLOCK_SIZE, 2818 .maxauthsize = MD5_DIGEST_SIZE, 2819 }, 2820 .caam = { 2821 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2822 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2823 OP_ALG_AAI_HMAC_PRECOMP, 2824 .geniv = true, 2825 }, 2826 }, 2827 { 2828 .aead = { 2829 .base = { 2830 .cra_name = "authenc(hmac(sha1),cbc(des))", 2831 .cra_driver_name = "authenc-hmac-sha1-" 2832 "cbc-des-caam", 2833 .cra_blocksize = DES_BLOCK_SIZE, 2834 }, 2835 .setkey = aead_setkey, 2836 .setauthsize = aead_setauthsize, 2837 .encrypt = aead_encrypt, 2838 .decrypt = aead_decrypt, 2839 .ivsize = DES_BLOCK_SIZE, 2840 .maxauthsize = SHA1_DIGEST_SIZE, 2841 }, 2842 .caam = { 2843 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2844 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2845 OP_ALG_AAI_HMAC_PRECOMP, 2846 }, 2847 }, 2848 { 2849 .aead = { 2850 .base = { 2851 .cra_name = "echainiv(authenc(hmac(sha1)," 2852 "cbc(des)))", 2853 .cra_driver_name = "echainiv-authenc-" 2854 "hmac-sha1-cbc-des-caam", 2855 .cra_blocksize = DES_BLOCK_SIZE, 2856 }, 2857 .setkey = aead_setkey, 2858 .setauthsize = aead_setauthsize, 2859 .encrypt = aead_encrypt, 2860 .decrypt = aead_decrypt, 2861 .ivsize = DES_BLOCK_SIZE, 2862 .maxauthsize = SHA1_DIGEST_SIZE, 2863 }, 2864 .caam = { 2865 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2866 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2867 OP_ALG_AAI_HMAC_PRECOMP, 2868 .geniv = true, 2869 }, 2870 }, 2871 { 2872 .aead = { 2873 .base = { 2874 .cra_name = "authenc(hmac(sha224),cbc(des))", 2875 .cra_driver_name = "authenc-hmac-sha224-" 2876 "cbc-des-caam", 2877 .cra_blocksize = DES_BLOCK_SIZE, 2878 }, 2879 .setkey = aead_setkey, 2880 .setauthsize = aead_setauthsize, 2881 .encrypt = aead_encrypt, 2882 .decrypt = aead_decrypt, 2883 .ivsize = DES_BLOCK_SIZE, 2884 .maxauthsize = SHA224_DIGEST_SIZE, 2885 }, 2886 .caam = { 2887 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2888 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2889 OP_ALG_AAI_HMAC_PRECOMP, 2890 }, 2891 }, 2892 { 2893 .aead = { 2894 .base = { 2895 .cra_name = "echainiv(authenc(hmac(sha224)," 2896 "cbc(des)))", 2897 .cra_driver_name = "echainiv-authenc-" 2898 "hmac-sha224-cbc-des-caam", 2899 .cra_blocksize = DES_BLOCK_SIZE, 2900 }, 2901 .setkey = aead_setkey, 2902 .setauthsize = aead_setauthsize, 2903 .encrypt = aead_encrypt, 2904 .decrypt = aead_decrypt, 2905 .ivsize = DES_BLOCK_SIZE, 2906 .maxauthsize = SHA224_DIGEST_SIZE, 2907 }, 2908 .caam = { 2909 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2910 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2911 OP_ALG_AAI_HMAC_PRECOMP, 2912 .geniv = true, 2913 }, 2914 }, 2915 { 2916 .aead = { 2917 .base = { 2918 .cra_name = "authenc(hmac(sha256),cbc(des))", 2919 .cra_driver_name = "authenc-hmac-sha256-" 2920 "cbc-des-caam", 2921 .cra_blocksize = DES_BLOCK_SIZE, 2922 }, 2923 .setkey = aead_setkey, 2924 .setauthsize = aead_setauthsize, 2925 .encrypt = aead_encrypt, 2926 .decrypt = aead_decrypt, 2927 .ivsize = DES_BLOCK_SIZE, 2928 .maxauthsize = SHA256_DIGEST_SIZE, 2929 }, 2930 .caam = { 2931 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2932 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2933 OP_ALG_AAI_HMAC_PRECOMP, 2934 }, 2935 }, 2936 { 2937 .aead = { 2938 .base = { 2939 .cra_name = "echainiv(authenc(hmac(sha256)," 2940 "cbc(des)))", 2941 .cra_driver_name = "echainiv-authenc-" 2942 "hmac-sha256-cbc-des-caam", 2943 .cra_blocksize = DES_BLOCK_SIZE, 2944 }, 2945 .setkey = aead_setkey, 2946 .setauthsize = aead_setauthsize, 2947 .encrypt = aead_encrypt, 2948 .decrypt = aead_decrypt, 2949 .ivsize = DES_BLOCK_SIZE, 2950 .maxauthsize = SHA256_DIGEST_SIZE, 2951 }, 2952 .caam = { 2953 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2954 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2955 OP_ALG_AAI_HMAC_PRECOMP, 2956 .geniv = true, 2957 }, 2958 }, 2959 { 2960 .aead = { 2961 .base = { 2962 .cra_name = "authenc(hmac(sha384),cbc(des))", 2963 .cra_driver_name = "authenc-hmac-sha384-" 2964 "cbc-des-caam", 2965 .cra_blocksize = DES_BLOCK_SIZE, 2966 }, 2967 .setkey = aead_setkey, 2968 .setauthsize = aead_setauthsize, 2969 .encrypt = aead_encrypt, 2970 .decrypt = aead_decrypt, 2971 .ivsize = DES_BLOCK_SIZE, 2972 .maxauthsize = SHA384_DIGEST_SIZE, 2973 }, 2974 .caam = { 2975 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2976 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2977 OP_ALG_AAI_HMAC_PRECOMP, 2978 }, 2979 }, 2980 { 2981 .aead = { 2982 .base = { 2983 .cra_name = "echainiv(authenc(hmac(sha384)," 2984 "cbc(des)))", 2985 .cra_driver_name = "echainiv-authenc-" 2986 "hmac-sha384-cbc-des-caam", 2987 .cra_blocksize = DES_BLOCK_SIZE, 2988 }, 2989 .setkey = aead_setkey, 2990 .setauthsize = aead_setauthsize, 2991 .encrypt = aead_encrypt, 2992 .decrypt = aead_decrypt, 2993 .ivsize = DES_BLOCK_SIZE, 2994 .maxauthsize = SHA384_DIGEST_SIZE, 2995 }, 2996 .caam = { 2997 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2998 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2999 OP_ALG_AAI_HMAC_PRECOMP, 3000 .geniv = true, 3001 }, 3002 }, 3003 { 3004 .aead = { 3005 .base = { 3006 .cra_name = "authenc(hmac(sha512),cbc(des))", 3007 .cra_driver_name = "authenc-hmac-sha512-" 3008 "cbc-des-caam", 3009 .cra_blocksize = DES_BLOCK_SIZE, 3010 }, 3011 .setkey = aead_setkey, 3012 .setauthsize = aead_setauthsize, 3013 .encrypt = aead_encrypt, 3014 .decrypt = aead_decrypt, 3015 .ivsize = DES_BLOCK_SIZE, 3016 .maxauthsize = SHA512_DIGEST_SIZE, 3017 }, 3018 .caam = { 3019 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3020 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3021 OP_ALG_AAI_HMAC_PRECOMP, 3022 }, 3023 }, 3024 { 3025 .aead = { 3026 .base = { 3027 .cra_name = "echainiv(authenc(hmac(sha512)," 3028 "cbc(des)))", 3029 .cra_driver_name = "echainiv-authenc-" 3030 "hmac-sha512-cbc-des-caam", 3031 .cra_blocksize = DES_BLOCK_SIZE, 3032 }, 3033 .setkey = aead_setkey, 3034 .setauthsize = aead_setauthsize, 3035 .encrypt = aead_encrypt, 3036 .decrypt = aead_decrypt, 3037 .ivsize = DES_BLOCK_SIZE, 3038 .maxauthsize = SHA512_DIGEST_SIZE, 3039 }, 3040 .caam = { 3041 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3042 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3043 OP_ALG_AAI_HMAC_PRECOMP, 3044 .geniv = true, 3045 }, 3046 }, 3047 { 3048 .aead = { 3049 .base = { 3050 .cra_name = "authenc(hmac(md5)," 3051 "rfc3686(ctr(aes)))", 3052 .cra_driver_name = "authenc-hmac-md5-" 3053 "rfc3686-ctr-aes-caam", 3054 .cra_blocksize = 1, 3055 }, 3056 .setkey = aead_setkey, 3057 .setauthsize = aead_setauthsize, 3058 .encrypt = aead_encrypt, 3059 .decrypt = aead_decrypt, 3060 .ivsize = CTR_RFC3686_IV_SIZE, 3061 .maxauthsize = MD5_DIGEST_SIZE, 3062 }, 3063 .caam = { 3064 .class1_alg_type = OP_ALG_ALGSEL_AES | 3065 OP_ALG_AAI_CTR_MOD128, 3066 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3067 OP_ALG_AAI_HMAC_PRECOMP, 3068 .rfc3686 = true, 3069 }, 3070 }, 3071 { 3072 .aead = { 3073 .base = { 3074 .cra_name = "seqiv(authenc(" 3075 "hmac(md5),rfc3686(ctr(aes))))", 3076 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3077 "rfc3686-ctr-aes-caam", 3078 .cra_blocksize = 1, 3079 }, 3080 .setkey = aead_setkey, 3081 .setauthsize = aead_setauthsize, 3082 .encrypt = aead_encrypt, 3083 .decrypt = aead_decrypt, 3084 .ivsize = CTR_RFC3686_IV_SIZE, 3085 .maxauthsize = MD5_DIGEST_SIZE, 3086 }, 3087 .caam = { 3088 .class1_alg_type = OP_ALG_ALGSEL_AES | 3089 OP_ALG_AAI_CTR_MOD128, 3090 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3091 OP_ALG_AAI_HMAC_PRECOMP, 3092 .rfc3686 = true, 3093 .geniv = true, 3094 }, 3095 }, 3096 { 3097 .aead = { 3098 .base = { 3099 .cra_name = "authenc(hmac(sha1)," 3100 "rfc3686(ctr(aes)))", 3101 .cra_driver_name = "authenc-hmac-sha1-" 3102 "rfc3686-ctr-aes-caam", 3103 .cra_blocksize = 1, 3104 }, 3105 .setkey = aead_setkey, 3106 .setauthsize = aead_setauthsize, 3107 .encrypt = aead_encrypt, 3108 .decrypt = aead_decrypt, 3109 .ivsize = CTR_RFC3686_IV_SIZE, 3110 .maxauthsize = SHA1_DIGEST_SIZE, 3111 }, 3112 .caam = { 3113 .class1_alg_type = OP_ALG_ALGSEL_AES | 3114 OP_ALG_AAI_CTR_MOD128, 3115 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3116 OP_ALG_AAI_HMAC_PRECOMP, 3117 .rfc3686 = true, 3118 }, 3119 }, 3120 { 3121 .aead = { 3122 .base = { 3123 .cra_name = "seqiv(authenc(" 3124 "hmac(sha1),rfc3686(ctr(aes))))", 3125 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3126 "rfc3686-ctr-aes-caam", 3127 .cra_blocksize = 1, 3128 }, 3129 .setkey = aead_setkey, 3130 .setauthsize = aead_setauthsize, 3131 .encrypt = aead_encrypt, 3132 .decrypt = aead_decrypt, 3133 .ivsize = CTR_RFC3686_IV_SIZE, 3134 .maxauthsize = SHA1_DIGEST_SIZE, 3135 }, 3136 .caam = { 3137 .class1_alg_type = OP_ALG_ALGSEL_AES | 3138 OP_ALG_AAI_CTR_MOD128, 3139 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3140 OP_ALG_AAI_HMAC_PRECOMP, 3141 .rfc3686 = true, 3142 .geniv = true, 3143 }, 3144 }, 3145 { 3146 .aead = { 3147 .base = { 3148 .cra_name = "authenc(hmac(sha224)," 3149 "rfc3686(ctr(aes)))", 3150 .cra_driver_name = "authenc-hmac-sha224-" 3151 "rfc3686-ctr-aes-caam", 3152 .cra_blocksize = 1, 3153 }, 3154 .setkey = aead_setkey, 3155 .setauthsize = aead_setauthsize, 3156 .encrypt = aead_encrypt, 3157 .decrypt = aead_decrypt, 3158 .ivsize = CTR_RFC3686_IV_SIZE, 3159 .maxauthsize = SHA224_DIGEST_SIZE, 3160 }, 3161 .caam = { 3162 .class1_alg_type = OP_ALG_ALGSEL_AES | 3163 OP_ALG_AAI_CTR_MOD128, 3164 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3165 OP_ALG_AAI_HMAC_PRECOMP, 3166 .rfc3686 = true, 3167 }, 3168 }, 3169 { 3170 .aead = { 3171 .base = { 3172 .cra_name = "seqiv(authenc(" 3173 "hmac(sha224),rfc3686(ctr(aes))))", 3174 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3175 "rfc3686-ctr-aes-caam", 3176 .cra_blocksize = 1, 3177 }, 3178 .setkey = aead_setkey, 3179 .setauthsize = aead_setauthsize, 3180 .encrypt = aead_encrypt, 3181 .decrypt = aead_decrypt, 3182 .ivsize = CTR_RFC3686_IV_SIZE, 3183 .maxauthsize = SHA224_DIGEST_SIZE, 3184 }, 3185 .caam = { 3186 .class1_alg_type = OP_ALG_ALGSEL_AES | 3187 OP_ALG_AAI_CTR_MOD128, 3188 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3189 OP_ALG_AAI_HMAC_PRECOMP, 3190 .rfc3686 = true, 3191 .geniv = true, 3192 }, 3193 }, 3194 { 3195 .aead = { 3196 .base = { 3197 .cra_name = "authenc(hmac(sha256)," 3198 "rfc3686(ctr(aes)))", 3199 .cra_driver_name = "authenc-hmac-sha256-" 3200 "rfc3686-ctr-aes-caam", 3201 .cra_blocksize = 1, 3202 }, 3203 .setkey = aead_setkey, 3204 .setauthsize = aead_setauthsize, 3205 .encrypt = aead_encrypt, 3206 .decrypt = aead_decrypt, 3207 .ivsize = CTR_RFC3686_IV_SIZE, 3208 .maxauthsize = SHA256_DIGEST_SIZE, 3209 }, 3210 .caam = { 3211 .class1_alg_type = OP_ALG_ALGSEL_AES | 3212 OP_ALG_AAI_CTR_MOD128, 3213 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3214 OP_ALG_AAI_HMAC_PRECOMP, 3215 .rfc3686 = true, 3216 }, 3217 }, 3218 { 3219 .aead = { 3220 .base = { 3221 .cra_name = "seqiv(authenc(hmac(sha256)," 3222 "rfc3686(ctr(aes))))", 3223 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3224 "rfc3686-ctr-aes-caam", 3225 .cra_blocksize = 1, 3226 }, 3227 .setkey = aead_setkey, 3228 .setauthsize = aead_setauthsize, 3229 .encrypt = aead_encrypt, 3230 .decrypt = aead_decrypt, 3231 .ivsize = CTR_RFC3686_IV_SIZE, 3232 .maxauthsize = SHA256_DIGEST_SIZE, 3233 }, 3234 .caam = { 3235 .class1_alg_type = OP_ALG_ALGSEL_AES | 3236 OP_ALG_AAI_CTR_MOD128, 3237 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3238 OP_ALG_AAI_HMAC_PRECOMP, 3239 .rfc3686 = true, 3240 .geniv = true, 3241 }, 3242 }, 3243 { 3244 .aead = { 3245 .base = { 3246 .cra_name = "authenc(hmac(sha384)," 3247 "rfc3686(ctr(aes)))", 3248 .cra_driver_name = "authenc-hmac-sha384-" 3249 "rfc3686-ctr-aes-caam", 3250 .cra_blocksize = 1, 3251 }, 3252 .setkey = aead_setkey, 3253 .setauthsize = aead_setauthsize, 3254 .encrypt = aead_encrypt, 3255 .decrypt = aead_decrypt, 3256 .ivsize = CTR_RFC3686_IV_SIZE, 3257 .maxauthsize = SHA384_DIGEST_SIZE, 3258 }, 3259 .caam = { 3260 .class1_alg_type = OP_ALG_ALGSEL_AES | 3261 OP_ALG_AAI_CTR_MOD128, 3262 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3263 OP_ALG_AAI_HMAC_PRECOMP, 3264 .rfc3686 = true, 3265 }, 3266 }, 3267 { 3268 .aead = { 3269 .base = { 3270 .cra_name = "seqiv(authenc(hmac(sha384)," 3271 "rfc3686(ctr(aes))))", 3272 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3273 "rfc3686-ctr-aes-caam", 3274 .cra_blocksize = 1, 3275 }, 3276 .setkey = aead_setkey, 3277 .setauthsize = aead_setauthsize, 3278 .encrypt = aead_encrypt, 3279 .decrypt = aead_decrypt, 3280 .ivsize = CTR_RFC3686_IV_SIZE, 3281 .maxauthsize = SHA384_DIGEST_SIZE, 3282 }, 3283 .caam = { 3284 .class1_alg_type = OP_ALG_ALGSEL_AES | 3285 OP_ALG_AAI_CTR_MOD128, 3286 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3287 OP_ALG_AAI_HMAC_PRECOMP, 3288 .rfc3686 = true, 3289 .geniv = true, 3290 }, 3291 }, 3292 { 3293 .aead = { 3294 .base = { 3295 .cra_name = "authenc(hmac(sha512)," 3296 "rfc3686(ctr(aes)))", 3297 .cra_driver_name = "authenc-hmac-sha512-" 3298 "rfc3686-ctr-aes-caam", 3299 .cra_blocksize = 1, 3300 }, 3301 .setkey = aead_setkey, 3302 .setauthsize = aead_setauthsize, 3303 .encrypt = aead_encrypt, 3304 .decrypt = aead_decrypt, 3305 .ivsize = CTR_RFC3686_IV_SIZE, 3306 .maxauthsize = SHA512_DIGEST_SIZE, 3307 }, 3308 .caam = { 3309 .class1_alg_type = OP_ALG_ALGSEL_AES | 3310 OP_ALG_AAI_CTR_MOD128, 3311 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3312 OP_ALG_AAI_HMAC_PRECOMP, 3313 .rfc3686 = true, 3314 }, 3315 }, 3316 { 3317 .aead = { 3318 .base = { 3319 .cra_name = "seqiv(authenc(hmac(sha512)," 3320 "rfc3686(ctr(aes))))", 3321 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3322 "rfc3686-ctr-aes-caam", 3323 .cra_blocksize = 1, 3324 }, 3325 .setkey = aead_setkey, 3326 .setauthsize = aead_setauthsize, 3327 .encrypt = aead_encrypt, 3328 .decrypt = aead_decrypt, 3329 .ivsize = CTR_RFC3686_IV_SIZE, 3330 .maxauthsize = SHA512_DIGEST_SIZE, 3331 }, 3332 .caam = { 3333 .class1_alg_type = OP_ALG_ALGSEL_AES | 3334 OP_ALG_AAI_CTR_MOD128, 3335 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3336 OP_ALG_AAI_HMAC_PRECOMP, 3337 .rfc3686 = true, 3338 .geniv = true, 3339 }, 3340 }, 3341 { 3342 .aead = { 3343 .base = { 3344 .cra_name = "rfc7539(chacha20,poly1305)", 3345 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3346 "caam", 3347 .cra_blocksize = 1, 3348 }, 3349 .setkey = chachapoly_setkey, 3350 .setauthsize = chachapoly_setauthsize, 3351 .encrypt = chachapoly_encrypt, 3352 .decrypt = chachapoly_decrypt, 3353 .ivsize = CHACHAPOLY_IV_SIZE, 3354 .maxauthsize = POLY1305_DIGEST_SIZE, 3355 }, 3356 .caam = { 3357 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3358 OP_ALG_AAI_AEAD, 3359 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3360 OP_ALG_AAI_AEAD, 3361 .nodkp = true, 3362 }, 3363 }, 3364 { 3365 .aead = { 3366 .base = { 3367 .cra_name = "rfc7539esp(chacha20,poly1305)", 3368 .cra_driver_name = "rfc7539esp-chacha20-" 3369 "poly1305-caam", 3370 .cra_blocksize = 1, 3371 }, 3372 .setkey = chachapoly_setkey, 3373 .setauthsize = chachapoly_setauthsize, 3374 .encrypt = chachapoly_encrypt, 3375 .decrypt = chachapoly_decrypt, 3376 .ivsize = 8, 3377 .maxauthsize = POLY1305_DIGEST_SIZE, 3378 }, 3379 .caam = { 3380 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3381 OP_ALG_AAI_AEAD, 3382 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3383 OP_ALG_AAI_AEAD, 3384 .nodkp = true, 3385 }, 3386 }, 3387 }; 3388 3389 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3390 bool uses_dkp) 3391 { 3392 dma_addr_t dma_addr; 3393 struct caam_drv_private *priv; 3394 3395 ctx->jrdev = caam_jr_alloc(); 3396 if (IS_ERR(ctx->jrdev)) { 3397 pr_err("Job Ring Device allocation for transform failed\n"); 3398 return PTR_ERR(ctx->jrdev); 3399 } 3400 3401 priv = dev_get_drvdata(ctx->jrdev->parent); 3402 if (priv->era >= 6 && uses_dkp) 3403 ctx->dir = DMA_BIDIRECTIONAL; 3404 else 3405 ctx->dir = DMA_TO_DEVICE; 3406 3407 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3408 offsetof(struct caam_ctx, 3409 sh_desc_enc_dma), 3410 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3411 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3412 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3413 caam_jr_free(ctx->jrdev); 3414 return -ENOMEM; 3415 } 3416 3417 ctx->sh_desc_enc_dma = dma_addr; 3418 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3419 sh_desc_dec); 3420 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3421 3422 /* copy descriptor header template value */ 3423 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3424 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3425 3426 return 0; 3427 } 3428 3429 static int caam_cra_init(struct crypto_skcipher *tfm) 3430 { 3431 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3432 struct caam_skcipher_alg *caam_alg = 3433 container_of(alg, typeof(*caam_alg), skcipher); 3434 3435 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3436 false); 3437 } 3438 3439 static int caam_aead_init(struct crypto_aead *tfm) 3440 { 3441 struct aead_alg *alg = crypto_aead_alg(tfm); 3442 struct caam_aead_alg *caam_alg = 3443 container_of(alg, struct caam_aead_alg, aead); 3444 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3445 3446 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3447 } 3448 3449 static void caam_exit_common(struct caam_ctx *ctx) 3450 { 3451 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3452 offsetof(struct caam_ctx, sh_desc_enc_dma), 3453 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3454 caam_jr_free(ctx->jrdev); 3455 } 3456 3457 static void caam_cra_exit(struct crypto_skcipher *tfm) 3458 { 3459 caam_exit_common(crypto_skcipher_ctx(tfm)); 3460 } 3461 3462 static void caam_aead_exit(struct crypto_aead *tfm) 3463 { 3464 caam_exit_common(crypto_aead_ctx(tfm)); 3465 } 3466 3467 void caam_algapi_exit(void) 3468 { 3469 int i; 3470 3471 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3472 struct caam_aead_alg *t_alg = driver_aeads + i; 3473 3474 if (t_alg->registered) 3475 crypto_unregister_aead(&t_alg->aead); 3476 } 3477 3478 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3479 struct caam_skcipher_alg *t_alg = driver_algs + i; 3480 3481 if (t_alg->registered) 3482 crypto_unregister_skcipher(&t_alg->skcipher); 3483 } 3484 } 3485 3486 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3487 { 3488 struct skcipher_alg *alg = &t_alg->skcipher; 3489 3490 alg->base.cra_module = THIS_MODULE; 3491 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3492 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3493 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3494 3495 alg->init = caam_cra_init; 3496 alg->exit = caam_cra_exit; 3497 } 3498 3499 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3500 { 3501 struct aead_alg *alg = &t_alg->aead; 3502 3503 alg->base.cra_module = THIS_MODULE; 3504 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3505 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3506 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3507 3508 alg->init = caam_aead_init; 3509 alg->exit = caam_aead_exit; 3510 } 3511 3512 int caam_algapi_init(struct device *ctrldev) 3513 { 3514 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3515 int i = 0, err = 0; 3516 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3517 u32 arc4_inst; 3518 unsigned int md_limit = SHA512_DIGEST_SIZE; 3519 bool registered = false, gcm_support; 3520 3521 /* 3522 * Register crypto algorithms the device supports. 3523 * First, detect presence and attributes of DES, AES, and MD blocks. 3524 */ 3525 if (priv->era < 10) { 3526 u32 cha_vid, cha_inst, aes_rn; 3527 3528 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3529 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3530 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3531 3532 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3533 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3534 CHA_ID_LS_DES_SHIFT; 3535 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3536 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3537 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >> 3538 CHA_ID_LS_ARC4_SHIFT; 3539 ccha_inst = 0; 3540 ptha_inst = 0; 3541 3542 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3543 CHA_ID_LS_AES_MASK; 3544 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3545 } else { 3546 u32 aesa, mdha; 3547 3548 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3549 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3550 3551 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3552 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3553 3554 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3555 aes_inst = aesa & CHA_VER_NUM_MASK; 3556 md_inst = mdha & CHA_VER_NUM_MASK; 3557 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3558 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3559 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK; 3560 3561 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3562 } 3563 3564 /* If MD is present, limit digest size based on LP256 */ 3565 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3566 md_limit = SHA256_DIGEST_SIZE; 3567 3568 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3569 struct caam_skcipher_alg *t_alg = driver_algs + i; 3570 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3571 3572 /* Skip DES algorithms if not supported by device */ 3573 if (!des_inst && 3574 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3575 (alg_sel == OP_ALG_ALGSEL_DES))) 3576 continue; 3577 3578 /* Skip AES algorithms if not supported by device */ 3579 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3580 continue; 3581 3582 /* Skip ARC4 algorithms if not supported by device */ 3583 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4) 3584 continue; 3585 3586 /* 3587 * Check support for AES modes not available 3588 * on LP devices. 3589 */ 3590 if (aes_vid == CHA_VER_VID_AES_LP && 3591 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3592 OP_ALG_AAI_XTS) 3593 continue; 3594 3595 caam_skcipher_alg_init(t_alg); 3596 3597 err = crypto_register_skcipher(&t_alg->skcipher); 3598 if (err) { 3599 pr_warn("%s alg registration failed\n", 3600 t_alg->skcipher.base.cra_driver_name); 3601 continue; 3602 } 3603 3604 t_alg->registered = true; 3605 registered = true; 3606 } 3607 3608 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3609 struct caam_aead_alg *t_alg = driver_aeads + i; 3610 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3611 OP_ALG_ALGSEL_MASK; 3612 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3613 OP_ALG_ALGSEL_MASK; 3614 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3615 3616 /* Skip DES algorithms if not supported by device */ 3617 if (!des_inst && 3618 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3619 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3620 continue; 3621 3622 /* Skip AES algorithms if not supported by device */ 3623 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3624 continue; 3625 3626 /* Skip CHACHA20 algorithms if not supported by device */ 3627 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3628 continue; 3629 3630 /* Skip POLY1305 algorithms if not supported by device */ 3631 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3632 continue; 3633 3634 /* Skip GCM algorithms if not supported by device */ 3635 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3636 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3637 continue; 3638 3639 /* 3640 * Skip algorithms requiring message digests 3641 * if MD or MD size is not supported by device. 3642 */ 3643 if (is_mdha(c2_alg_sel) && 3644 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3645 continue; 3646 3647 caam_aead_alg_init(t_alg); 3648 3649 err = crypto_register_aead(&t_alg->aead); 3650 if (err) { 3651 pr_warn("%s alg registration failed\n", 3652 t_alg->aead.base.cra_driver_name); 3653 continue; 3654 } 3655 3656 t_alg->registered = true; 3657 registered = true; 3658 } 3659 3660 if (registered) 3661 pr_info("caam algorithms registered in /proc/crypto\n"); 3662 3663 return err; 3664 } 3665