1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 60 /* 61 * crypto alg 62 */ 63 #define CAAM_CRA_PRIORITY 3000 64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ 67 SHA512_DIGEST_SIZE * 2) 68 69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 71 CAAM_CMD_SZ * 4) 72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 73 CAAM_CMD_SZ * 5) 74 75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 76 77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 79 80 struct caam_alg_entry { 81 int class1_alg_type; 82 int class2_alg_type; 83 bool rfc3686; 84 bool geniv; 85 bool nodkp; 86 }; 87 88 struct caam_aead_alg { 89 struct aead_alg aead; 90 struct caam_alg_entry caam; 91 bool registered; 92 }; 93 94 struct caam_skcipher_alg { 95 struct skcipher_alg skcipher; 96 struct caam_alg_entry caam; 97 bool registered; 98 }; 99 100 /* 101 * per-session context 102 */ 103 struct caam_ctx { 104 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 105 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 106 u8 key[CAAM_MAX_KEY_SIZE]; 107 dma_addr_t sh_desc_enc_dma; 108 dma_addr_t sh_desc_dec_dma; 109 dma_addr_t key_dma; 110 enum dma_data_direction dir; 111 struct device *jrdev; 112 struct alginfo adata; 113 struct alginfo cdata; 114 unsigned int authsize; 115 }; 116 117 static int aead_null_set_sh_desc(struct crypto_aead *aead) 118 { 119 struct caam_ctx *ctx = crypto_aead_ctx(aead); 120 struct device *jrdev = ctx->jrdev; 121 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 122 u32 *desc; 123 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 124 ctx->adata.keylen_pad; 125 126 /* 127 * Job Descriptor and Shared Descriptors 128 * must all fit into the 64-word Descriptor h/w Buffer 129 */ 130 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 131 ctx->adata.key_inline = true; 132 ctx->adata.key_virt = ctx->key; 133 } else { 134 ctx->adata.key_inline = false; 135 ctx->adata.key_dma = ctx->key_dma; 136 } 137 138 /* aead_encrypt shared descriptor */ 139 desc = ctx->sh_desc_enc; 140 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 141 ctrlpriv->era); 142 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 143 desc_bytes(desc), ctx->dir); 144 145 /* 146 * Job Descriptor and Shared Descriptors 147 * must all fit into the 64-word Descriptor h/w Buffer 148 */ 149 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 150 ctx->adata.key_inline = true; 151 ctx->adata.key_virt = ctx->key; 152 } else { 153 ctx->adata.key_inline = false; 154 ctx->adata.key_dma = ctx->key_dma; 155 } 156 157 /* aead_decrypt shared descriptor */ 158 desc = ctx->sh_desc_dec; 159 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 160 ctrlpriv->era); 161 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 162 desc_bytes(desc), ctx->dir); 163 164 return 0; 165 } 166 167 static int aead_set_sh_desc(struct crypto_aead *aead) 168 { 169 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 170 struct caam_aead_alg, aead); 171 unsigned int ivsize = crypto_aead_ivsize(aead); 172 struct caam_ctx *ctx = crypto_aead_ctx(aead); 173 struct device *jrdev = ctx->jrdev; 174 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 175 u32 ctx1_iv_off = 0; 176 u32 *desc, *nonce = NULL; 177 u32 inl_mask; 178 unsigned int data_len[2]; 179 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 180 OP_ALG_AAI_CTR_MOD128); 181 const bool is_rfc3686 = alg->caam.rfc3686; 182 183 if (!ctx->authsize) 184 return 0; 185 186 /* NULL encryption / decryption */ 187 if (!ctx->cdata.keylen) 188 return aead_null_set_sh_desc(aead); 189 190 /* 191 * AES-CTR needs to load IV in CONTEXT1 reg 192 * at an offset of 128bits (16bytes) 193 * CONTEXT1[255:128] = IV 194 */ 195 if (ctr_mode) 196 ctx1_iv_off = 16; 197 198 /* 199 * RFC3686 specific: 200 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 201 */ 202 if (is_rfc3686) { 203 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 204 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 205 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 206 } 207 208 data_len[0] = ctx->adata.keylen_pad; 209 data_len[1] = ctx->cdata.keylen; 210 211 if (alg->caam.geniv) 212 goto skip_enc; 213 214 /* 215 * Job Descriptor and Shared Descriptors 216 * must all fit into the 64-word Descriptor h/w Buffer 217 */ 218 if (desc_inline_query(DESC_AEAD_ENC_LEN + 219 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 220 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 221 ARRAY_SIZE(data_len)) < 0) 222 return -EINVAL; 223 224 if (inl_mask & 1) 225 ctx->adata.key_virt = ctx->key; 226 else 227 ctx->adata.key_dma = ctx->key_dma; 228 229 if (inl_mask & 2) 230 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 231 else 232 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 233 234 ctx->adata.key_inline = !!(inl_mask & 1); 235 ctx->cdata.key_inline = !!(inl_mask & 2); 236 237 /* aead_encrypt shared descriptor */ 238 desc = ctx->sh_desc_enc; 239 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 240 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 241 false, ctrlpriv->era); 242 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 243 desc_bytes(desc), ctx->dir); 244 245 skip_enc: 246 /* 247 * Job Descriptor and Shared Descriptors 248 * must all fit into the 64-word Descriptor h/w Buffer 249 */ 250 if (desc_inline_query(DESC_AEAD_DEC_LEN + 251 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 252 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 253 ARRAY_SIZE(data_len)) < 0) 254 return -EINVAL; 255 256 if (inl_mask & 1) 257 ctx->adata.key_virt = ctx->key; 258 else 259 ctx->adata.key_dma = ctx->key_dma; 260 261 if (inl_mask & 2) 262 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 263 else 264 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 265 266 ctx->adata.key_inline = !!(inl_mask & 1); 267 ctx->cdata.key_inline = !!(inl_mask & 2); 268 269 /* aead_decrypt shared descriptor */ 270 desc = ctx->sh_desc_dec; 271 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 272 ctx->authsize, alg->caam.geniv, is_rfc3686, 273 nonce, ctx1_iv_off, false, ctrlpriv->era); 274 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 275 desc_bytes(desc), ctx->dir); 276 277 if (!alg->caam.geniv) 278 goto skip_givenc; 279 280 /* 281 * Job Descriptor and Shared Descriptors 282 * must all fit into the 64-word Descriptor h/w Buffer 283 */ 284 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 285 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 286 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 287 ARRAY_SIZE(data_len)) < 0) 288 return -EINVAL; 289 290 if (inl_mask & 1) 291 ctx->adata.key_virt = ctx->key; 292 else 293 ctx->adata.key_dma = ctx->key_dma; 294 295 if (inl_mask & 2) 296 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 297 else 298 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 299 300 ctx->adata.key_inline = !!(inl_mask & 1); 301 ctx->cdata.key_inline = !!(inl_mask & 2); 302 303 /* aead_givencrypt shared descriptor */ 304 desc = ctx->sh_desc_enc; 305 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 306 ctx->authsize, is_rfc3686, nonce, 307 ctx1_iv_off, false, ctrlpriv->era); 308 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 309 desc_bytes(desc), ctx->dir); 310 311 skip_givenc: 312 return 0; 313 } 314 315 static int aead_setauthsize(struct crypto_aead *authenc, 316 unsigned int authsize) 317 { 318 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 319 320 ctx->authsize = authsize; 321 aead_set_sh_desc(authenc); 322 323 return 0; 324 } 325 326 static int gcm_set_sh_desc(struct crypto_aead *aead) 327 { 328 struct caam_ctx *ctx = crypto_aead_ctx(aead); 329 struct device *jrdev = ctx->jrdev; 330 unsigned int ivsize = crypto_aead_ivsize(aead); 331 u32 *desc; 332 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 333 ctx->cdata.keylen; 334 335 if (!ctx->cdata.keylen || !ctx->authsize) 336 return 0; 337 338 /* 339 * AES GCM encrypt shared descriptor 340 * Job Descriptor and Shared Descriptor 341 * must fit into the 64-word Descriptor h/w Buffer 342 */ 343 if (rem_bytes >= DESC_GCM_ENC_LEN) { 344 ctx->cdata.key_inline = true; 345 ctx->cdata.key_virt = ctx->key; 346 } else { 347 ctx->cdata.key_inline = false; 348 ctx->cdata.key_dma = ctx->key_dma; 349 } 350 351 desc = ctx->sh_desc_enc; 352 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 353 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 354 desc_bytes(desc), ctx->dir); 355 356 /* 357 * Job Descriptor and Shared Descriptors 358 * must all fit into the 64-word Descriptor h/w Buffer 359 */ 360 if (rem_bytes >= DESC_GCM_DEC_LEN) { 361 ctx->cdata.key_inline = true; 362 ctx->cdata.key_virt = ctx->key; 363 } else { 364 ctx->cdata.key_inline = false; 365 ctx->cdata.key_dma = ctx->key_dma; 366 } 367 368 desc = ctx->sh_desc_dec; 369 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 370 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 371 desc_bytes(desc), ctx->dir); 372 373 return 0; 374 } 375 376 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 377 { 378 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 379 380 ctx->authsize = authsize; 381 gcm_set_sh_desc(authenc); 382 383 return 0; 384 } 385 386 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 387 { 388 struct caam_ctx *ctx = crypto_aead_ctx(aead); 389 struct device *jrdev = ctx->jrdev; 390 unsigned int ivsize = crypto_aead_ivsize(aead); 391 u32 *desc; 392 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 393 ctx->cdata.keylen; 394 395 if (!ctx->cdata.keylen || !ctx->authsize) 396 return 0; 397 398 /* 399 * RFC4106 encrypt shared descriptor 400 * Job Descriptor and Shared Descriptor 401 * must fit into the 64-word Descriptor h/w Buffer 402 */ 403 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 404 ctx->cdata.key_inline = true; 405 ctx->cdata.key_virt = ctx->key; 406 } else { 407 ctx->cdata.key_inline = false; 408 ctx->cdata.key_dma = ctx->key_dma; 409 } 410 411 desc = ctx->sh_desc_enc; 412 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 413 false); 414 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 415 desc_bytes(desc), ctx->dir); 416 417 /* 418 * Job Descriptor and Shared Descriptors 419 * must all fit into the 64-word Descriptor h/w Buffer 420 */ 421 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 422 ctx->cdata.key_inline = true; 423 ctx->cdata.key_virt = ctx->key; 424 } else { 425 ctx->cdata.key_inline = false; 426 ctx->cdata.key_dma = ctx->key_dma; 427 } 428 429 desc = ctx->sh_desc_dec; 430 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 431 false); 432 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 433 desc_bytes(desc), ctx->dir); 434 435 return 0; 436 } 437 438 static int rfc4106_setauthsize(struct crypto_aead *authenc, 439 unsigned int authsize) 440 { 441 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 442 443 ctx->authsize = authsize; 444 rfc4106_set_sh_desc(authenc); 445 446 return 0; 447 } 448 449 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 450 { 451 struct caam_ctx *ctx = crypto_aead_ctx(aead); 452 struct device *jrdev = ctx->jrdev; 453 unsigned int ivsize = crypto_aead_ivsize(aead); 454 u32 *desc; 455 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 456 ctx->cdata.keylen; 457 458 if (!ctx->cdata.keylen || !ctx->authsize) 459 return 0; 460 461 /* 462 * RFC4543 encrypt shared descriptor 463 * Job Descriptor and Shared Descriptor 464 * must fit into the 64-word Descriptor h/w Buffer 465 */ 466 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 467 ctx->cdata.key_inline = true; 468 ctx->cdata.key_virt = ctx->key; 469 } else { 470 ctx->cdata.key_inline = false; 471 ctx->cdata.key_dma = ctx->key_dma; 472 } 473 474 desc = ctx->sh_desc_enc; 475 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 476 false); 477 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 478 desc_bytes(desc), ctx->dir); 479 480 /* 481 * Job Descriptor and Shared Descriptors 482 * must all fit into the 64-word Descriptor h/w Buffer 483 */ 484 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 485 ctx->cdata.key_inline = true; 486 ctx->cdata.key_virt = ctx->key; 487 } else { 488 ctx->cdata.key_inline = false; 489 ctx->cdata.key_dma = ctx->key_dma; 490 } 491 492 desc = ctx->sh_desc_dec; 493 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 494 false); 495 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 496 desc_bytes(desc), ctx->dir); 497 498 return 0; 499 } 500 501 static int rfc4543_setauthsize(struct crypto_aead *authenc, 502 unsigned int authsize) 503 { 504 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 505 506 ctx->authsize = authsize; 507 rfc4543_set_sh_desc(authenc); 508 509 return 0; 510 } 511 512 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 513 { 514 struct caam_ctx *ctx = crypto_aead_ctx(aead); 515 struct device *jrdev = ctx->jrdev; 516 unsigned int ivsize = crypto_aead_ivsize(aead); 517 u32 *desc; 518 519 if (!ctx->cdata.keylen || !ctx->authsize) 520 return 0; 521 522 desc = ctx->sh_desc_enc; 523 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 524 ctx->authsize, true, false); 525 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 526 desc_bytes(desc), ctx->dir); 527 528 desc = ctx->sh_desc_dec; 529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 530 ctx->authsize, false, false); 531 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 532 desc_bytes(desc), ctx->dir); 533 534 return 0; 535 } 536 537 static int chachapoly_setauthsize(struct crypto_aead *aead, 538 unsigned int authsize) 539 { 540 struct caam_ctx *ctx = crypto_aead_ctx(aead); 541 542 if (authsize != POLY1305_DIGEST_SIZE) 543 return -EINVAL; 544 545 ctx->authsize = authsize; 546 return chachapoly_set_sh_desc(aead); 547 } 548 549 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 550 unsigned int keylen) 551 { 552 struct caam_ctx *ctx = crypto_aead_ctx(aead); 553 unsigned int ivsize = crypto_aead_ivsize(aead); 554 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 555 556 if (keylen != CHACHA_KEY_SIZE + saltlen) { 557 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 558 return -EINVAL; 559 } 560 561 ctx->cdata.key_virt = key; 562 ctx->cdata.keylen = keylen - saltlen; 563 564 return chachapoly_set_sh_desc(aead); 565 } 566 567 static int aead_setkey(struct crypto_aead *aead, 568 const u8 *key, unsigned int keylen) 569 { 570 struct caam_ctx *ctx = crypto_aead_ctx(aead); 571 struct device *jrdev = ctx->jrdev; 572 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 573 struct crypto_authenc_keys keys; 574 int ret = 0; 575 576 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 577 goto badkey; 578 579 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", 580 keys.authkeylen + keys.enckeylen, keys.enckeylen, 581 keys.authkeylen); 582 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 583 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 584 585 /* 586 * If DKP is supported, use it in the shared descriptor to generate 587 * the split key. 588 */ 589 if (ctrlpriv->era >= 6) { 590 ctx->adata.keylen = keys.authkeylen; 591 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 592 OP_ALG_ALGSEL_MASK); 593 594 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 595 goto badkey; 596 597 memcpy(ctx->key, keys.authkey, keys.authkeylen); 598 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 599 keys.enckeylen); 600 dma_sync_single_for_device(jrdev, ctx->key_dma, 601 ctx->adata.keylen_pad + 602 keys.enckeylen, ctx->dir); 603 goto skip_split_key; 604 } 605 606 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 607 keys.authkeylen, CAAM_MAX_KEY_SIZE - 608 keys.enckeylen); 609 if (ret) { 610 goto badkey; 611 } 612 613 /* postpend encryption key to auth split key */ 614 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 615 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 616 keys.enckeylen, ctx->dir); 617 618 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ", 619 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 620 ctx->adata.keylen_pad + keys.enckeylen, 1); 621 622 skip_split_key: 623 ctx->cdata.keylen = keys.enckeylen; 624 memzero_explicit(&keys, sizeof(keys)); 625 return aead_set_sh_desc(aead); 626 badkey: 627 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 628 memzero_explicit(&keys, sizeof(keys)); 629 return -EINVAL; 630 } 631 632 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 633 unsigned int keylen) 634 { 635 struct crypto_authenc_keys keys; 636 u32 flags; 637 int err; 638 639 err = crypto_authenc_extractkeys(&keys, key, keylen); 640 if (unlikely(err)) 641 goto badkey; 642 643 err = -EINVAL; 644 if (keys.enckeylen != DES3_EDE_KEY_SIZE) 645 goto badkey; 646 647 flags = crypto_aead_get_flags(aead); 648 err = __des3_verify_key(&flags, keys.enckey); 649 if (unlikely(err)) { 650 crypto_aead_set_flags(aead, flags); 651 goto out; 652 } 653 654 err = aead_setkey(aead, key, keylen); 655 656 out: 657 memzero_explicit(&keys, sizeof(keys)); 658 return err; 659 660 badkey: 661 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 662 goto out; 663 } 664 665 static int gcm_setkey(struct crypto_aead *aead, 666 const u8 *key, unsigned int keylen) 667 { 668 struct caam_ctx *ctx = crypto_aead_ctx(aead); 669 struct device *jrdev = ctx->jrdev; 670 671 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 672 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 673 674 memcpy(ctx->key, key, keylen); 675 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 676 ctx->cdata.keylen = keylen; 677 678 return gcm_set_sh_desc(aead); 679 } 680 681 static int rfc4106_setkey(struct crypto_aead *aead, 682 const u8 *key, unsigned int keylen) 683 { 684 struct caam_ctx *ctx = crypto_aead_ctx(aead); 685 struct device *jrdev = ctx->jrdev; 686 687 if (keylen < 4) 688 return -EINVAL; 689 690 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 691 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 692 693 memcpy(ctx->key, key, keylen); 694 695 /* 696 * The last four bytes of the key material are used as the salt value 697 * in the nonce. Update the AES key length. 698 */ 699 ctx->cdata.keylen = keylen - 4; 700 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 701 ctx->dir); 702 return rfc4106_set_sh_desc(aead); 703 } 704 705 static int rfc4543_setkey(struct crypto_aead *aead, 706 const u8 *key, unsigned int keylen) 707 { 708 struct caam_ctx *ctx = crypto_aead_ctx(aead); 709 struct device *jrdev = ctx->jrdev; 710 711 if (keylen < 4) 712 return -EINVAL; 713 714 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 715 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 716 717 memcpy(ctx->key, key, keylen); 718 719 /* 720 * The last four bytes of the key material are used as the salt value 721 * in the nonce. Update the AES key length. 722 */ 723 ctx->cdata.keylen = keylen - 4; 724 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 725 ctx->dir); 726 return rfc4543_set_sh_desc(aead); 727 } 728 729 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 730 unsigned int keylen) 731 { 732 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 733 struct caam_skcipher_alg *alg = 734 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 735 skcipher); 736 struct device *jrdev = ctx->jrdev; 737 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 738 u32 *desc; 739 u32 ctx1_iv_off = 0; 740 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 741 OP_ALG_AAI_CTR_MOD128); 742 const bool is_rfc3686 = alg->caam.rfc3686; 743 744 print_hex_dump_debug("key in @"__stringify(__LINE__)": ", 745 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 746 /* 747 * AES-CTR needs to load IV in CONTEXT1 reg 748 * at an offset of 128bits (16bytes) 749 * CONTEXT1[255:128] = IV 750 */ 751 if (ctr_mode) 752 ctx1_iv_off = 16; 753 754 /* 755 * RFC3686 specific: 756 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 757 * | *key = {KEY, NONCE} 758 */ 759 if (is_rfc3686) { 760 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 761 keylen -= CTR_RFC3686_NONCE_SIZE; 762 } 763 764 ctx->cdata.keylen = keylen; 765 ctx->cdata.key_virt = key; 766 ctx->cdata.key_inline = true; 767 768 /* skcipher_encrypt shared descriptor */ 769 desc = ctx->sh_desc_enc; 770 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 771 ctx1_iv_off); 772 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 773 desc_bytes(desc), ctx->dir); 774 775 /* skcipher_decrypt shared descriptor */ 776 desc = ctx->sh_desc_dec; 777 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 778 ctx1_iv_off); 779 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 780 desc_bytes(desc), ctx->dir); 781 782 return 0; 783 } 784 785 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 786 const u8 *key, unsigned int keylen) 787 { 788 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 789 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 790 791 if (keylen == DES3_EDE_KEY_SIZE && 792 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) { 793 return -EINVAL; 794 } 795 796 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) & 797 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 798 crypto_skcipher_set_flags(skcipher, 799 CRYPTO_TFM_RES_WEAK_KEY); 800 return -EINVAL; 801 } 802 803 return skcipher_setkey(skcipher, key, keylen); 804 } 805 806 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 807 unsigned int keylen) 808 { 809 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 810 struct device *jrdev = ctx->jrdev; 811 u32 *desc; 812 813 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 814 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 815 dev_err(jrdev, "key size mismatch\n"); 816 return -EINVAL; 817 } 818 819 ctx->cdata.keylen = keylen; 820 ctx->cdata.key_virt = key; 821 ctx->cdata.key_inline = true; 822 823 /* xts_skcipher_encrypt shared descriptor */ 824 desc = ctx->sh_desc_enc; 825 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 826 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 827 desc_bytes(desc), ctx->dir); 828 829 /* xts_skcipher_decrypt shared descriptor */ 830 desc = ctx->sh_desc_dec; 831 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 832 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 833 desc_bytes(desc), ctx->dir); 834 835 return 0; 836 } 837 838 /* 839 * aead_edesc - s/w-extended aead descriptor 840 * @src_nents: number of segments in input s/w scatterlist 841 * @dst_nents: number of segments in output s/w scatterlist 842 * @mapped_src_nents: number of segments in input h/w link table 843 * @mapped_dst_nents: number of segments in output h/w link table 844 * @sec4_sg_bytes: length of dma mapped sec4_sg space 845 * @sec4_sg_dma: bus physical mapped address of h/w link table 846 * @sec4_sg: pointer to h/w link table 847 * @hw_desc: the h/w job descriptor followed by any referenced link tables 848 */ 849 struct aead_edesc { 850 int src_nents; 851 int dst_nents; 852 int mapped_src_nents; 853 int mapped_dst_nents; 854 int sec4_sg_bytes; 855 dma_addr_t sec4_sg_dma; 856 struct sec4_sg_entry *sec4_sg; 857 u32 hw_desc[]; 858 }; 859 860 /* 861 * skcipher_edesc - s/w-extended skcipher descriptor 862 * @src_nents: number of segments in input s/w scatterlist 863 * @dst_nents: number of segments in output s/w scatterlist 864 * @mapped_src_nents: number of segments in input h/w link table 865 * @mapped_dst_nents: number of segments in output h/w link table 866 * @iv_dma: dma address of iv for checking continuity and link table 867 * @sec4_sg_bytes: length of dma mapped sec4_sg space 868 * @sec4_sg_dma: bus physical mapped address of h/w link table 869 * @sec4_sg: pointer to h/w link table 870 * @hw_desc: the h/w job descriptor followed by any referenced link tables 871 * and IV 872 */ 873 struct skcipher_edesc { 874 int src_nents; 875 int dst_nents; 876 int mapped_src_nents; 877 int mapped_dst_nents; 878 dma_addr_t iv_dma; 879 int sec4_sg_bytes; 880 dma_addr_t sec4_sg_dma; 881 struct sec4_sg_entry *sec4_sg; 882 u32 hw_desc[0]; 883 }; 884 885 static void caam_unmap(struct device *dev, struct scatterlist *src, 886 struct scatterlist *dst, int src_nents, 887 int dst_nents, 888 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 889 int sec4_sg_bytes) 890 { 891 if (dst != src) { 892 if (src_nents) 893 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 894 if (dst_nents) 895 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 896 } else { 897 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 898 } 899 900 if (iv_dma) 901 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL); 902 if (sec4_sg_bytes) 903 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 904 DMA_TO_DEVICE); 905 } 906 907 static void aead_unmap(struct device *dev, 908 struct aead_edesc *edesc, 909 struct aead_request *req) 910 { 911 caam_unmap(dev, req->src, req->dst, 912 edesc->src_nents, edesc->dst_nents, 0, 0, 913 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 914 } 915 916 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 917 struct skcipher_request *req) 918 { 919 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 920 int ivsize = crypto_skcipher_ivsize(skcipher); 921 922 caam_unmap(dev, req->src, req->dst, 923 edesc->src_nents, edesc->dst_nents, 924 edesc->iv_dma, ivsize, 925 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 926 } 927 928 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 929 void *context) 930 { 931 struct aead_request *req = context; 932 struct aead_edesc *edesc; 933 934 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 935 936 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 937 938 if (err) 939 caam_jr_strstatus(jrdev, err); 940 941 aead_unmap(jrdev, edesc, req); 942 943 kfree(edesc); 944 945 aead_request_complete(req, err); 946 } 947 948 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 949 void *context) 950 { 951 struct aead_request *req = context; 952 struct aead_edesc *edesc; 953 954 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 955 956 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 957 958 if (err) 959 caam_jr_strstatus(jrdev, err); 960 961 aead_unmap(jrdev, edesc, req); 962 963 /* 964 * verify hw auth check passed else return -EBADMSG 965 */ 966 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 967 err = -EBADMSG; 968 969 kfree(edesc); 970 971 aead_request_complete(req, err); 972 } 973 974 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 975 void *context) 976 { 977 struct skcipher_request *req = context; 978 struct skcipher_edesc *edesc; 979 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 980 int ivsize = crypto_skcipher_ivsize(skcipher); 981 982 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 983 984 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 985 986 if (err) 987 caam_jr_strstatus(jrdev, err); 988 989 skcipher_unmap(jrdev, edesc, req); 990 991 /* 992 * The crypto API expects us to set the IV (req->iv) to the last 993 * ciphertext block (CBC mode) or last counter (CTR mode). 994 * This is used e.g. by the CTS mode. 995 */ 996 if (ivsize) { 997 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 998 ivsize); 999 1000 print_hex_dump_debug("dstiv @"__stringify(__LINE__)": ", 1001 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1002 edesc->src_nents > 1 ? 100 : ivsize, 1); 1003 } 1004 1005 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1006 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1007 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1008 1009 kfree(edesc); 1010 1011 skcipher_request_complete(req, err); 1012 } 1013 1014 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 1015 void *context) 1016 { 1017 struct skcipher_request *req = context; 1018 struct skcipher_edesc *edesc; 1019 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1020 int ivsize = crypto_skcipher_ivsize(skcipher); 1021 1022 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1023 1024 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1025 if (err) 1026 caam_jr_strstatus(jrdev, err); 1027 1028 skcipher_unmap(jrdev, edesc, req); 1029 1030 /* 1031 * The crypto API expects us to set the IV (req->iv) to the last 1032 * ciphertext block (CBC mode) or last counter (CTR mode). 1033 * This is used e.g. by the CTS mode. 1034 */ 1035 if (ivsize) { 1036 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes, 1037 ivsize); 1038 1039 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ", 1040 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1041 ivsize, 1); 1042 } 1043 1044 caam_dump_sg("dst @" __stringify(__LINE__)": ", 1045 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1046 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1047 1048 kfree(edesc); 1049 1050 skcipher_request_complete(req, err); 1051 } 1052 1053 /* 1054 * Fill in aead job descriptor 1055 */ 1056 static void init_aead_job(struct aead_request *req, 1057 struct aead_edesc *edesc, 1058 bool all_contig, bool encrypt) 1059 { 1060 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1061 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1062 int authsize = ctx->authsize; 1063 u32 *desc = edesc->hw_desc; 1064 u32 out_options, in_options; 1065 dma_addr_t dst_dma, src_dma; 1066 int len, sec4_sg_index = 0; 1067 dma_addr_t ptr; 1068 u32 *sh_desc; 1069 1070 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1071 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1072 1073 len = desc_len(sh_desc); 1074 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1075 1076 if (all_contig) { 1077 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1078 0; 1079 in_options = 0; 1080 } else { 1081 src_dma = edesc->sec4_sg_dma; 1082 sec4_sg_index += edesc->mapped_src_nents; 1083 in_options = LDST_SGF; 1084 } 1085 1086 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1087 in_options); 1088 1089 dst_dma = src_dma; 1090 out_options = in_options; 1091 1092 if (unlikely(req->src != req->dst)) { 1093 if (!edesc->mapped_dst_nents) { 1094 dst_dma = 0; 1095 out_options = 0; 1096 } else if (edesc->mapped_dst_nents == 1) { 1097 dst_dma = sg_dma_address(req->dst); 1098 out_options = 0; 1099 } else { 1100 dst_dma = edesc->sec4_sg_dma + 1101 sec4_sg_index * 1102 sizeof(struct sec4_sg_entry); 1103 out_options = LDST_SGF; 1104 } 1105 } 1106 1107 if (encrypt) 1108 append_seq_out_ptr(desc, dst_dma, 1109 req->assoclen + req->cryptlen + authsize, 1110 out_options); 1111 else 1112 append_seq_out_ptr(desc, dst_dma, 1113 req->assoclen + req->cryptlen - authsize, 1114 out_options); 1115 } 1116 1117 static void init_gcm_job(struct aead_request *req, 1118 struct aead_edesc *edesc, 1119 bool all_contig, bool encrypt) 1120 { 1121 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1122 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1123 unsigned int ivsize = crypto_aead_ivsize(aead); 1124 u32 *desc = edesc->hw_desc; 1125 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1126 unsigned int last; 1127 1128 init_aead_job(req, edesc, all_contig, encrypt); 1129 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1130 1131 /* BUG This should not be specific to generic GCM. */ 1132 last = 0; 1133 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1134 last = FIFOLD_TYPE_LAST1; 1135 1136 /* Read GCM IV */ 1137 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1138 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1139 /* Append Salt */ 1140 if (!generic_gcm) 1141 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1142 /* Append IV */ 1143 append_data(desc, req->iv, ivsize); 1144 /* End of blank commands */ 1145 } 1146 1147 static void init_chachapoly_job(struct aead_request *req, 1148 struct aead_edesc *edesc, bool all_contig, 1149 bool encrypt) 1150 { 1151 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1152 unsigned int ivsize = crypto_aead_ivsize(aead); 1153 unsigned int assoclen = req->assoclen; 1154 u32 *desc = edesc->hw_desc; 1155 u32 ctx_iv_off = 4; 1156 1157 init_aead_job(req, edesc, all_contig, encrypt); 1158 1159 if (ivsize != CHACHAPOLY_IV_SIZE) { 1160 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1161 ctx_iv_off += 4; 1162 1163 /* 1164 * The associated data comes already with the IV but we need 1165 * to skip it when we authenticate or encrypt... 1166 */ 1167 assoclen -= ivsize; 1168 } 1169 1170 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1171 1172 /* 1173 * For IPsec load the IV further in the same register. 1174 * For RFC7539 simply load the 12 bytes nonce in a single operation 1175 */ 1176 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1177 LDST_SRCDST_BYTE_CONTEXT | 1178 ctx_iv_off << LDST_OFFSET_SHIFT); 1179 } 1180 1181 static void init_authenc_job(struct aead_request *req, 1182 struct aead_edesc *edesc, 1183 bool all_contig, bool encrypt) 1184 { 1185 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1186 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1187 struct caam_aead_alg, aead); 1188 unsigned int ivsize = crypto_aead_ivsize(aead); 1189 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1190 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1191 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1192 OP_ALG_AAI_CTR_MOD128); 1193 const bool is_rfc3686 = alg->caam.rfc3686; 1194 u32 *desc = edesc->hw_desc; 1195 u32 ivoffset = 0; 1196 1197 /* 1198 * AES-CTR needs to load IV in CONTEXT1 reg 1199 * at an offset of 128bits (16bytes) 1200 * CONTEXT1[255:128] = IV 1201 */ 1202 if (ctr_mode) 1203 ivoffset = 16; 1204 1205 /* 1206 * RFC3686 specific: 1207 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1208 */ 1209 if (is_rfc3686) 1210 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1211 1212 init_aead_job(req, edesc, all_contig, encrypt); 1213 1214 /* 1215 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1216 * having DPOVRD as destination. 1217 */ 1218 if (ctrlpriv->era < 3) 1219 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1220 else 1221 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1222 1223 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1224 append_load_as_imm(desc, req->iv, ivsize, 1225 LDST_CLASS_1_CCB | 1226 LDST_SRCDST_BYTE_CONTEXT | 1227 (ivoffset << LDST_OFFSET_SHIFT)); 1228 } 1229 1230 /* 1231 * Fill in skcipher job descriptor 1232 */ 1233 static void init_skcipher_job(struct skcipher_request *req, 1234 struct skcipher_edesc *edesc, 1235 const bool encrypt) 1236 { 1237 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1238 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1239 struct device *jrdev = ctx->jrdev; 1240 int ivsize = crypto_skcipher_ivsize(skcipher); 1241 u32 *desc = edesc->hw_desc; 1242 u32 *sh_desc; 1243 u32 in_options = 0, out_options = 0; 1244 dma_addr_t src_dma, dst_dma, ptr; 1245 int len, sec4_sg_index = 0; 1246 1247 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ", 1248 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1249 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", 1250 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1251 1252 caam_dump_sg("src @" __stringify(__LINE__)": ", 1253 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1254 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1255 1256 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1257 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1258 1259 len = desc_len(sh_desc); 1260 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1261 1262 if (ivsize || edesc->mapped_src_nents > 1) { 1263 src_dma = edesc->sec4_sg_dma; 1264 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1265 in_options = LDST_SGF; 1266 } else { 1267 src_dma = sg_dma_address(req->src); 1268 } 1269 1270 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1271 1272 if (likely(req->src == req->dst)) { 1273 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1274 out_options = in_options; 1275 } else if (!ivsize && edesc->mapped_dst_nents == 1) { 1276 dst_dma = sg_dma_address(req->dst); 1277 } else { 1278 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1279 sizeof(struct sec4_sg_entry); 1280 out_options = LDST_SGF; 1281 } 1282 1283 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); 1284 } 1285 1286 /* 1287 * allocate and map the aead extended descriptor 1288 */ 1289 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1290 int desc_bytes, bool *all_contig_ptr, 1291 bool encrypt) 1292 { 1293 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1294 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1295 struct device *jrdev = ctx->jrdev; 1296 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1297 GFP_KERNEL : GFP_ATOMIC; 1298 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1299 int src_len, dst_len = 0; 1300 struct aead_edesc *edesc; 1301 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1302 unsigned int authsize = ctx->authsize; 1303 1304 if (unlikely(req->dst != req->src)) { 1305 src_len = req->assoclen + req->cryptlen; 1306 dst_len = src_len + (encrypt ? authsize : (-authsize)); 1307 1308 src_nents = sg_nents_for_len(req->src, src_len); 1309 if (unlikely(src_nents < 0)) { 1310 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1311 src_len); 1312 return ERR_PTR(src_nents); 1313 } 1314 1315 dst_nents = sg_nents_for_len(req->dst, dst_len); 1316 if (unlikely(dst_nents < 0)) { 1317 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1318 dst_len); 1319 return ERR_PTR(dst_nents); 1320 } 1321 } else { 1322 src_len = req->assoclen + req->cryptlen + 1323 (encrypt ? authsize : 0); 1324 1325 src_nents = sg_nents_for_len(req->src, src_len); 1326 if (unlikely(src_nents < 0)) { 1327 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1328 src_len); 1329 return ERR_PTR(src_nents); 1330 } 1331 } 1332 1333 if (likely(req->src == req->dst)) { 1334 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1335 DMA_BIDIRECTIONAL); 1336 if (unlikely(!mapped_src_nents)) { 1337 dev_err(jrdev, "unable to map source\n"); 1338 return ERR_PTR(-ENOMEM); 1339 } 1340 } else { 1341 /* Cover also the case of null (zero length) input data */ 1342 if (src_nents) { 1343 mapped_src_nents = dma_map_sg(jrdev, req->src, 1344 src_nents, DMA_TO_DEVICE); 1345 if (unlikely(!mapped_src_nents)) { 1346 dev_err(jrdev, "unable to map source\n"); 1347 return ERR_PTR(-ENOMEM); 1348 } 1349 } else { 1350 mapped_src_nents = 0; 1351 } 1352 1353 /* Cover also the case of null (zero length) output data */ 1354 if (dst_nents) { 1355 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1356 dst_nents, 1357 DMA_FROM_DEVICE); 1358 if (unlikely(!mapped_dst_nents)) { 1359 dev_err(jrdev, "unable to map destination\n"); 1360 dma_unmap_sg(jrdev, req->src, src_nents, 1361 DMA_TO_DEVICE); 1362 return ERR_PTR(-ENOMEM); 1363 } 1364 } else { 1365 mapped_dst_nents = 0; 1366 } 1367 } 1368 1369 /* 1370 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1371 * the end of the table by allocating more S/G entries. 1372 */ 1373 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1374 if (mapped_dst_nents > 1) 1375 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1376 else 1377 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1378 1379 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1380 1381 /* allocate space for base edesc and hw desc commands, link tables */ 1382 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1383 GFP_DMA | flags); 1384 if (!edesc) { 1385 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1386 0, 0, 0); 1387 return ERR_PTR(-ENOMEM); 1388 } 1389 1390 edesc->src_nents = src_nents; 1391 edesc->dst_nents = dst_nents; 1392 edesc->mapped_src_nents = mapped_src_nents; 1393 edesc->mapped_dst_nents = mapped_dst_nents; 1394 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1395 desc_bytes; 1396 *all_contig_ptr = !(mapped_src_nents > 1); 1397 1398 sec4_sg_index = 0; 1399 if (mapped_src_nents > 1) { 1400 sg_to_sec4_sg_last(req->src, src_len, 1401 edesc->sec4_sg + sec4_sg_index, 0); 1402 sec4_sg_index += mapped_src_nents; 1403 } 1404 if (mapped_dst_nents > 1) { 1405 sg_to_sec4_sg_last(req->dst, dst_len, 1406 edesc->sec4_sg + sec4_sg_index, 0); 1407 } 1408 1409 if (!sec4_sg_bytes) 1410 return edesc; 1411 1412 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1413 sec4_sg_bytes, DMA_TO_DEVICE); 1414 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1415 dev_err(jrdev, "unable to map S/G table\n"); 1416 aead_unmap(jrdev, edesc, req); 1417 kfree(edesc); 1418 return ERR_PTR(-ENOMEM); 1419 } 1420 1421 edesc->sec4_sg_bytes = sec4_sg_bytes; 1422 1423 return edesc; 1424 } 1425 1426 static int gcm_encrypt(struct aead_request *req) 1427 { 1428 struct aead_edesc *edesc; 1429 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1430 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1431 struct device *jrdev = ctx->jrdev; 1432 bool all_contig; 1433 u32 *desc; 1434 int ret = 0; 1435 1436 /* allocate extended descriptor */ 1437 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1438 if (IS_ERR(edesc)) 1439 return PTR_ERR(edesc); 1440 1441 /* Create and submit job descriptor */ 1442 init_gcm_job(req, edesc, all_contig, true); 1443 1444 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1445 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1446 desc_bytes(edesc->hw_desc), 1); 1447 1448 desc = edesc->hw_desc; 1449 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1450 if (!ret) { 1451 ret = -EINPROGRESS; 1452 } else { 1453 aead_unmap(jrdev, edesc, req); 1454 kfree(edesc); 1455 } 1456 1457 return ret; 1458 } 1459 1460 static int chachapoly_encrypt(struct aead_request *req) 1461 { 1462 struct aead_edesc *edesc; 1463 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1464 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1465 struct device *jrdev = ctx->jrdev; 1466 bool all_contig; 1467 u32 *desc; 1468 int ret; 1469 1470 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1471 true); 1472 if (IS_ERR(edesc)) 1473 return PTR_ERR(edesc); 1474 1475 desc = edesc->hw_desc; 1476 1477 init_chachapoly_job(req, edesc, all_contig, true); 1478 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1479 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1480 1); 1481 1482 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1483 if (!ret) { 1484 ret = -EINPROGRESS; 1485 } else { 1486 aead_unmap(jrdev, edesc, req); 1487 kfree(edesc); 1488 } 1489 1490 return ret; 1491 } 1492 1493 static int chachapoly_decrypt(struct aead_request *req) 1494 { 1495 struct aead_edesc *edesc; 1496 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1497 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1498 struct device *jrdev = ctx->jrdev; 1499 bool all_contig; 1500 u32 *desc; 1501 int ret; 1502 1503 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1504 false); 1505 if (IS_ERR(edesc)) 1506 return PTR_ERR(edesc); 1507 1508 desc = edesc->hw_desc; 1509 1510 init_chachapoly_job(req, edesc, all_contig, false); 1511 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1512 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1513 1); 1514 1515 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1516 if (!ret) { 1517 ret = -EINPROGRESS; 1518 } else { 1519 aead_unmap(jrdev, edesc, req); 1520 kfree(edesc); 1521 } 1522 1523 return ret; 1524 } 1525 1526 static int ipsec_gcm_encrypt(struct aead_request *req) 1527 { 1528 if (req->assoclen < 8) 1529 return -EINVAL; 1530 1531 return gcm_encrypt(req); 1532 } 1533 1534 static int aead_encrypt(struct aead_request *req) 1535 { 1536 struct aead_edesc *edesc; 1537 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1538 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1539 struct device *jrdev = ctx->jrdev; 1540 bool all_contig; 1541 u32 *desc; 1542 int ret = 0; 1543 1544 /* allocate extended descriptor */ 1545 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1546 &all_contig, true); 1547 if (IS_ERR(edesc)) 1548 return PTR_ERR(edesc); 1549 1550 /* Create and submit job descriptor */ 1551 init_authenc_job(req, edesc, all_contig, true); 1552 1553 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1554 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1555 desc_bytes(edesc->hw_desc), 1); 1556 1557 desc = edesc->hw_desc; 1558 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1559 if (!ret) { 1560 ret = -EINPROGRESS; 1561 } else { 1562 aead_unmap(jrdev, edesc, req); 1563 kfree(edesc); 1564 } 1565 1566 return ret; 1567 } 1568 1569 static int gcm_decrypt(struct aead_request *req) 1570 { 1571 struct aead_edesc *edesc; 1572 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1573 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1574 struct device *jrdev = ctx->jrdev; 1575 bool all_contig; 1576 u32 *desc; 1577 int ret = 0; 1578 1579 /* allocate extended descriptor */ 1580 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1581 if (IS_ERR(edesc)) 1582 return PTR_ERR(edesc); 1583 1584 /* Create and submit job descriptor*/ 1585 init_gcm_job(req, edesc, all_contig, false); 1586 1587 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1588 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1589 desc_bytes(edesc->hw_desc), 1); 1590 1591 desc = edesc->hw_desc; 1592 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1593 if (!ret) { 1594 ret = -EINPROGRESS; 1595 } else { 1596 aead_unmap(jrdev, edesc, req); 1597 kfree(edesc); 1598 } 1599 1600 return ret; 1601 } 1602 1603 static int ipsec_gcm_decrypt(struct aead_request *req) 1604 { 1605 if (req->assoclen < 8) 1606 return -EINVAL; 1607 1608 return gcm_decrypt(req); 1609 } 1610 1611 static int aead_decrypt(struct aead_request *req) 1612 { 1613 struct aead_edesc *edesc; 1614 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1615 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1616 struct device *jrdev = ctx->jrdev; 1617 bool all_contig; 1618 u32 *desc; 1619 int ret = 0; 1620 1621 caam_dump_sg("dec src@" __stringify(__LINE__)": ", 1622 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1623 req->assoclen + req->cryptlen, 1); 1624 1625 /* allocate extended descriptor */ 1626 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1627 &all_contig, false); 1628 if (IS_ERR(edesc)) 1629 return PTR_ERR(edesc); 1630 1631 /* Create and submit job descriptor*/ 1632 init_authenc_job(req, edesc, all_contig, false); 1633 1634 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ", 1635 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1636 desc_bytes(edesc->hw_desc), 1); 1637 1638 desc = edesc->hw_desc; 1639 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1640 if (!ret) { 1641 ret = -EINPROGRESS; 1642 } else { 1643 aead_unmap(jrdev, edesc, req); 1644 kfree(edesc); 1645 } 1646 1647 return ret; 1648 } 1649 1650 /* 1651 * allocate and map the skcipher extended descriptor for skcipher 1652 */ 1653 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1654 int desc_bytes) 1655 { 1656 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1657 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1658 struct device *jrdev = ctx->jrdev; 1659 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1660 GFP_KERNEL : GFP_ATOMIC; 1661 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1662 struct skcipher_edesc *edesc; 1663 dma_addr_t iv_dma = 0; 1664 u8 *iv; 1665 int ivsize = crypto_skcipher_ivsize(skcipher); 1666 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1667 1668 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1669 if (unlikely(src_nents < 0)) { 1670 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1671 req->cryptlen); 1672 return ERR_PTR(src_nents); 1673 } 1674 1675 if (req->dst != req->src) { 1676 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1677 if (unlikely(dst_nents < 0)) { 1678 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1679 req->cryptlen); 1680 return ERR_PTR(dst_nents); 1681 } 1682 } 1683 1684 if (likely(req->src == req->dst)) { 1685 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1686 DMA_BIDIRECTIONAL); 1687 if (unlikely(!mapped_src_nents)) { 1688 dev_err(jrdev, "unable to map source\n"); 1689 return ERR_PTR(-ENOMEM); 1690 } 1691 } else { 1692 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1693 DMA_TO_DEVICE); 1694 if (unlikely(!mapped_src_nents)) { 1695 dev_err(jrdev, "unable to map source\n"); 1696 return ERR_PTR(-ENOMEM); 1697 } 1698 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1699 DMA_FROM_DEVICE); 1700 if (unlikely(!mapped_dst_nents)) { 1701 dev_err(jrdev, "unable to map destination\n"); 1702 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1703 return ERR_PTR(-ENOMEM); 1704 } 1705 } 1706 1707 if (!ivsize && mapped_src_nents == 1) 1708 sec4_sg_ents = 0; // no need for an input hw s/g table 1709 else 1710 sec4_sg_ents = mapped_src_nents + !!ivsize; 1711 dst_sg_idx = sec4_sg_ents; 1712 1713 /* 1714 * Input, output HW S/G tables: [IV, src][dst, IV] 1715 * IV entries point to the same buffer 1716 * If src == dst, S/G entries are reused (S/G tables overlap) 1717 * 1718 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1719 * the end of the table by allocating more S/G entries. Logic: 1720 * if (output S/G) 1721 * pad output S/G, if needed 1722 * else if (input S/G) ... 1723 * pad input S/G, if needed 1724 */ 1725 if (ivsize || mapped_dst_nents > 1) { 1726 if (req->src == req->dst) 1727 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents); 1728 else 1729 sec4_sg_ents += pad_sg_nents(mapped_dst_nents + 1730 !!ivsize); 1731 } else { 1732 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1733 } 1734 1735 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1736 1737 /* 1738 * allocate space for base edesc and hw desc commands, link tables, IV 1739 */ 1740 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1741 GFP_DMA | flags); 1742 if (!edesc) { 1743 dev_err(jrdev, "could not allocate extended descriptor\n"); 1744 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1745 0, 0, 0); 1746 return ERR_PTR(-ENOMEM); 1747 } 1748 1749 edesc->src_nents = src_nents; 1750 edesc->dst_nents = dst_nents; 1751 edesc->mapped_src_nents = mapped_src_nents; 1752 edesc->mapped_dst_nents = mapped_dst_nents; 1753 edesc->sec4_sg_bytes = sec4_sg_bytes; 1754 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1755 desc_bytes); 1756 1757 /* Make sure IV is located in a DMAable area */ 1758 if (ivsize) { 1759 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes; 1760 memcpy(iv, req->iv, ivsize); 1761 1762 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); 1763 if (dma_mapping_error(jrdev, iv_dma)) { 1764 dev_err(jrdev, "unable to map IV\n"); 1765 caam_unmap(jrdev, req->src, req->dst, src_nents, 1766 dst_nents, 0, 0, 0, 0); 1767 kfree(edesc); 1768 return ERR_PTR(-ENOMEM); 1769 } 1770 1771 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1772 } 1773 if (dst_sg_idx) 1774 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + 1775 !!ivsize, 0); 1776 1777 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1)) 1778 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + 1779 dst_sg_idx, 0); 1780 1781 if (ivsize) 1782 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx + 1783 mapped_dst_nents, iv_dma, ivsize, 0); 1784 1785 if (ivsize || mapped_dst_nents > 1) 1786 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx + 1787 mapped_dst_nents); 1788 1789 if (sec4_sg_bytes) { 1790 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1791 sec4_sg_bytes, 1792 DMA_TO_DEVICE); 1793 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1794 dev_err(jrdev, "unable to map S/G table\n"); 1795 caam_unmap(jrdev, req->src, req->dst, src_nents, 1796 dst_nents, iv_dma, ivsize, 0, 0); 1797 kfree(edesc); 1798 return ERR_PTR(-ENOMEM); 1799 } 1800 } 1801 1802 edesc->iv_dma = iv_dma; 1803 1804 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ", 1805 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1806 sec4_sg_bytes, 1); 1807 1808 return edesc; 1809 } 1810 1811 static int skcipher_encrypt(struct skcipher_request *req) 1812 { 1813 struct skcipher_edesc *edesc; 1814 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1815 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1816 struct device *jrdev = ctx->jrdev; 1817 u32 *desc; 1818 int ret = 0; 1819 1820 /* allocate extended descriptor */ 1821 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1822 if (IS_ERR(edesc)) 1823 return PTR_ERR(edesc); 1824 1825 /* Create and submit job descriptor*/ 1826 init_skcipher_job(req, edesc, true); 1827 1828 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1829 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1830 desc_bytes(edesc->hw_desc), 1); 1831 1832 desc = edesc->hw_desc; 1833 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1834 1835 if (!ret) { 1836 ret = -EINPROGRESS; 1837 } else { 1838 skcipher_unmap(jrdev, edesc, req); 1839 kfree(edesc); 1840 } 1841 1842 return ret; 1843 } 1844 1845 static int skcipher_decrypt(struct skcipher_request *req) 1846 { 1847 struct skcipher_edesc *edesc; 1848 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1849 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1850 struct device *jrdev = ctx->jrdev; 1851 u32 *desc; 1852 int ret = 0; 1853 1854 /* allocate extended descriptor */ 1855 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1856 if (IS_ERR(edesc)) 1857 return PTR_ERR(edesc); 1858 1859 /* Create and submit job descriptor*/ 1860 init_skcipher_job(req, edesc, false); 1861 desc = edesc->hw_desc; 1862 1863 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ", 1864 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1865 desc_bytes(edesc->hw_desc), 1); 1866 1867 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1868 if (!ret) { 1869 ret = -EINPROGRESS; 1870 } else { 1871 skcipher_unmap(jrdev, edesc, req); 1872 kfree(edesc); 1873 } 1874 1875 return ret; 1876 } 1877 1878 static struct caam_skcipher_alg driver_algs[] = { 1879 { 1880 .skcipher = { 1881 .base = { 1882 .cra_name = "cbc(aes)", 1883 .cra_driver_name = "cbc-aes-caam", 1884 .cra_blocksize = AES_BLOCK_SIZE, 1885 }, 1886 .setkey = skcipher_setkey, 1887 .encrypt = skcipher_encrypt, 1888 .decrypt = skcipher_decrypt, 1889 .min_keysize = AES_MIN_KEY_SIZE, 1890 .max_keysize = AES_MAX_KEY_SIZE, 1891 .ivsize = AES_BLOCK_SIZE, 1892 }, 1893 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1894 }, 1895 { 1896 .skcipher = { 1897 .base = { 1898 .cra_name = "cbc(des3_ede)", 1899 .cra_driver_name = "cbc-3des-caam", 1900 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1901 }, 1902 .setkey = des_skcipher_setkey, 1903 .encrypt = skcipher_encrypt, 1904 .decrypt = skcipher_decrypt, 1905 .min_keysize = DES3_EDE_KEY_SIZE, 1906 .max_keysize = DES3_EDE_KEY_SIZE, 1907 .ivsize = DES3_EDE_BLOCK_SIZE, 1908 }, 1909 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1910 }, 1911 { 1912 .skcipher = { 1913 .base = { 1914 .cra_name = "cbc(des)", 1915 .cra_driver_name = "cbc-des-caam", 1916 .cra_blocksize = DES_BLOCK_SIZE, 1917 }, 1918 .setkey = des_skcipher_setkey, 1919 .encrypt = skcipher_encrypt, 1920 .decrypt = skcipher_decrypt, 1921 .min_keysize = DES_KEY_SIZE, 1922 .max_keysize = DES_KEY_SIZE, 1923 .ivsize = DES_BLOCK_SIZE, 1924 }, 1925 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1926 }, 1927 { 1928 .skcipher = { 1929 .base = { 1930 .cra_name = "ctr(aes)", 1931 .cra_driver_name = "ctr-aes-caam", 1932 .cra_blocksize = 1, 1933 }, 1934 .setkey = skcipher_setkey, 1935 .encrypt = skcipher_encrypt, 1936 .decrypt = skcipher_decrypt, 1937 .min_keysize = AES_MIN_KEY_SIZE, 1938 .max_keysize = AES_MAX_KEY_SIZE, 1939 .ivsize = AES_BLOCK_SIZE, 1940 .chunksize = AES_BLOCK_SIZE, 1941 }, 1942 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1943 OP_ALG_AAI_CTR_MOD128, 1944 }, 1945 { 1946 .skcipher = { 1947 .base = { 1948 .cra_name = "rfc3686(ctr(aes))", 1949 .cra_driver_name = "rfc3686-ctr-aes-caam", 1950 .cra_blocksize = 1, 1951 }, 1952 .setkey = skcipher_setkey, 1953 .encrypt = skcipher_encrypt, 1954 .decrypt = skcipher_decrypt, 1955 .min_keysize = AES_MIN_KEY_SIZE + 1956 CTR_RFC3686_NONCE_SIZE, 1957 .max_keysize = AES_MAX_KEY_SIZE + 1958 CTR_RFC3686_NONCE_SIZE, 1959 .ivsize = CTR_RFC3686_IV_SIZE, 1960 .chunksize = AES_BLOCK_SIZE, 1961 }, 1962 .caam = { 1963 .class1_alg_type = OP_ALG_ALGSEL_AES | 1964 OP_ALG_AAI_CTR_MOD128, 1965 .rfc3686 = true, 1966 }, 1967 }, 1968 { 1969 .skcipher = { 1970 .base = { 1971 .cra_name = "xts(aes)", 1972 .cra_driver_name = "xts-aes-caam", 1973 .cra_blocksize = AES_BLOCK_SIZE, 1974 }, 1975 .setkey = xts_skcipher_setkey, 1976 .encrypt = skcipher_encrypt, 1977 .decrypt = skcipher_decrypt, 1978 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1979 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1980 .ivsize = AES_BLOCK_SIZE, 1981 }, 1982 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1983 }, 1984 { 1985 .skcipher = { 1986 .base = { 1987 .cra_name = "ecb(des)", 1988 .cra_driver_name = "ecb-des-caam", 1989 .cra_blocksize = DES_BLOCK_SIZE, 1990 }, 1991 .setkey = des_skcipher_setkey, 1992 .encrypt = skcipher_encrypt, 1993 .decrypt = skcipher_decrypt, 1994 .min_keysize = DES_KEY_SIZE, 1995 .max_keysize = DES_KEY_SIZE, 1996 }, 1997 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 1998 }, 1999 { 2000 .skcipher = { 2001 .base = { 2002 .cra_name = "ecb(aes)", 2003 .cra_driver_name = "ecb-aes-caam", 2004 .cra_blocksize = AES_BLOCK_SIZE, 2005 }, 2006 .setkey = skcipher_setkey, 2007 .encrypt = skcipher_encrypt, 2008 .decrypt = skcipher_decrypt, 2009 .min_keysize = AES_MIN_KEY_SIZE, 2010 .max_keysize = AES_MAX_KEY_SIZE, 2011 }, 2012 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2013 }, 2014 { 2015 .skcipher = { 2016 .base = { 2017 .cra_name = "ecb(des3_ede)", 2018 .cra_driver_name = "ecb-des3-caam", 2019 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2020 }, 2021 .setkey = des_skcipher_setkey, 2022 .encrypt = skcipher_encrypt, 2023 .decrypt = skcipher_decrypt, 2024 .min_keysize = DES3_EDE_KEY_SIZE, 2025 .max_keysize = DES3_EDE_KEY_SIZE, 2026 }, 2027 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2028 }, 2029 { 2030 .skcipher = { 2031 .base = { 2032 .cra_name = "ecb(arc4)", 2033 .cra_driver_name = "ecb-arc4-caam", 2034 .cra_blocksize = ARC4_BLOCK_SIZE, 2035 }, 2036 .setkey = skcipher_setkey, 2037 .encrypt = skcipher_encrypt, 2038 .decrypt = skcipher_decrypt, 2039 .min_keysize = ARC4_MIN_KEY_SIZE, 2040 .max_keysize = ARC4_MAX_KEY_SIZE, 2041 }, 2042 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB, 2043 }, 2044 }; 2045 2046 static struct caam_aead_alg driver_aeads[] = { 2047 { 2048 .aead = { 2049 .base = { 2050 .cra_name = "rfc4106(gcm(aes))", 2051 .cra_driver_name = "rfc4106-gcm-aes-caam", 2052 .cra_blocksize = 1, 2053 }, 2054 .setkey = rfc4106_setkey, 2055 .setauthsize = rfc4106_setauthsize, 2056 .encrypt = ipsec_gcm_encrypt, 2057 .decrypt = ipsec_gcm_decrypt, 2058 .ivsize = GCM_RFC4106_IV_SIZE, 2059 .maxauthsize = AES_BLOCK_SIZE, 2060 }, 2061 .caam = { 2062 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2063 .nodkp = true, 2064 }, 2065 }, 2066 { 2067 .aead = { 2068 .base = { 2069 .cra_name = "rfc4543(gcm(aes))", 2070 .cra_driver_name = "rfc4543-gcm-aes-caam", 2071 .cra_blocksize = 1, 2072 }, 2073 .setkey = rfc4543_setkey, 2074 .setauthsize = rfc4543_setauthsize, 2075 .encrypt = ipsec_gcm_encrypt, 2076 .decrypt = ipsec_gcm_decrypt, 2077 .ivsize = GCM_RFC4543_IV_SIZE, 2078 .maxauthsize = AES_BLOCK_SIZE, 2079 }, 2080 .caam = { 2081 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2082 .nodkp = true, 2083 }, 2084 }, 2085 /* Galois Counter Mode */ 2086 { 2087 .aead = { 2088 .base = { 2089 .cra_name = "gcm(aes)", 2090 .cra_driver_name = "gcm-aes-caam", 2091 .cra_blocksize = 1, 2092 }, 2093 .setkey = gcm_setkey, 2094 .setauthsize = gcm_setauthsize, 2095 .encrypt = gcm_encrypt, 2096 .decrypt = gcm_decrypt, 2097 .ivsize = GCM_AES_IV_SIZE, 2098 .maxauthsize = AES_BLOCK_SIZE, 2099 }, 2100 .caam = { 2101 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2102 .nodkp = true, 2103 }, 2104 }, 2105 /* single-pass ipsec_esp descriptor */ 2106 { 2107 .aead = { 2108 .base = { 2109 .cra_name = "authenc(hmac(md5)," 2110 "ecb(cipher_null))", 2111 .cra_driver_name = "authenc-hmac-md5-" 2112 "ecb-cipher_null-caam", 2113 .cra_blocksize = NULL_BLOCK_SIZE, 2114 }, 2115 .setkey = aead_setkey, 2116 .setauthsize = aead_setauthsize, 2117 .encrypt = aead_encrypt, 2118 .decrypt = aead_decrypt, 2119 .ivsize = NULL_IV_SIZE, 2120 .maxauthsize = MD5_DIGEST_SIZE, 2121 }, 2122 .caam = { 2123 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2124 OP_ALG_AAI_HMAC_PRECOMP, 2125 }, 2126 }, 2127 { 2128 .aead = { 2129 .base = { 2130 .cra_name = "authenc(hmac(sha1)," 2131 "ecb(cipher_null))", 2132 .cra_driver_name = "authenc-hmac-sha1-" 2133 "ecb-cipher_null-caam", 2134 .cra_blocksize = NULL_BLOCK_SIZE, 2135 }, 2136 .setkey = aead_setkey, 2137 .setauthsize = aead_setauthsize, 2138 .encrypt = aead_encrypt, 2139 .decrypt = aead_decrypt, 2140 .ivsize = NULL_IV_SIZE, 2141 .maxauthsize = SHA1_DIGEST_SIZE, 2142 }, 2143 .caam = { 2144 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2145 OP_ALG_AAI_HMAC_PRECOMP, 2146 }, 2147 }, 2148 { 2149 .aead = { 2150 .base = { 2151 .cra_name = "authenc(hmac(sha224)," 2152 "ecb(cipher_null))", 2153 .cra_driver_name = "authenc-hmac-sha224-" 2154 "ecb-cipher_null-caam", 2155 .cra_blocksize = NULL_BLOCK_SIZE, 2156 }, 2157 .setkey = aead_setkey, 2158 .setauthsize = aead_setauthsize, 2159 .encrypt = aead_encrypt, 2160 .decrypt = aead_decrypt, 2161 .ivsize = NULL_IV_SIZE, 2162 .maxauthsize = SHA224_DIGEST_SIZE, 2163 }, 2164 .caam = { 2165 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2166 OP_ALG_AAI_HMAC_PRECOMP, 2167 }, 2168 }, 2169 { 2170 .aead = { 2171 .base = { 2172 .cra_name = "authenc(hmac(sha256)," 2173 "ecb(cipher_null))", 2174 .cra_driver_name = "authenc-hmac-sha256-" 2175 "ecb-cipher_null-caam", 2176 .cra_blocksize = NULL_BLOCK_SIZE, 2177 }, 2178 .setkey = aead_setkey, 2179 .setauthsize = aead_setauthsize, 2180 .encrypt = aead_encrypt, 2181 .decrypt = aead_decrypt, 2182 .ivsize = NULL_IV_SIZE, 2183 .maxauthsize = SHA256_DIGEST_SIZE, 2184 }, 2185 .caam = { 2186 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2187 OP_ALG_AAI_HMAC_PRECOMP, 2188 }, 2189 }, 2190 { 2191 .aead = { 2192 .base = { 2193 .cra_name = "authenc(hmac(sha384)," 2194 "ecb(cipher_null))", 2195 .cra_driver_name = "authenc-hmac-sha384-" 2196 "ecb-cipher_null-caam", 2197 .cra_blocksize = NULL_BLOCK_SIZE, 2198 }, 2199 .setkey = aead_setkey, 2200 .setauthsize = aead_setauthsize, 2201 .encrypt = aead_encrypt, 2202 .decrypt = aead_decrypt, 2203 .ivsize = NULL_IV_SIZE, 2204 .maxauthsize = SHA384_DIGEST_SIZE, 2205 }, 2206 .caam = { 2207 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2208 OP_ALG_AAI_HMAC_PRECOMP, 2209 }, 2210 }, 2211 { 2212 .aead = { 2213 .base = { 2214 .cra_name = "authenc(hmac(sha512)," 2215 "ecb(cipher_null))", 2216 .cra_driver_name = "authenc-hmac-sha512-" 2217 "ecb-cipher_null-caam", 2218 .cra_blocksize = NULL_BLOCK_SIZE, 2219 }, 2220 .setkey = aead_setkey, 2221 .setauthsize = aead_setauthsize, 2222 .encrypt = aead_encrypt, 2223 .decrypt = aead_decrypt, 2224 .ivsize = NULL_IV_SIZE, 2225 .maxauthsize = SHA512_DIGEST_SIZE, 2226 }, 2227 .caam = { 2228 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2229 OP_ALG_AAI_HMAC_PRECOMP, 2230 }, 2231 }, 2232 { 2233 .aead = { 2234 .base = { 2235 .cra_name = "authenc(hmac(md5),cbc(aes))", 2236 .cra_driver_name = "authenc-hmac-md5-" 2237 "cbc-aes-caam", 2238 .cra_blocksize = AES_BLOCK_SIZE, 2239 }, 2240 .setkey = aead_setkey, 2241 .setauthsize = aead_setauthsize, 2242 .encrypt = aead_encrypt, 2243 .decrypt = aead_decrypt, 2244 .ivsize = AES_BLOCK_SIZE, 2245 .maxauthsize = MD5_DIGEST_SIZE, 2246 }, 2247 .caam = { 2248 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2249 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2250 OP_ALG_AAI_HMAC_PRECOMP, 2251 }, 2252 }, 2253 { 2254 .aead = { 2255 .base = { 2256 .cra_name = "echainiv(authenc(hmac(md5)," 2257 "cbc(aes)))", 2258 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2259 "cbc-aes-caam", 2260 .cra_blocksize = AES_BLOCK_SIZE, 2261 }, 2262 .setkey = aead_setkey, 2263 .setauthsize = aead_setauthsize, 2264 .encrypt = aead_encrypt, 2265 .decrypt = aead_decrypt, 2266 .ivsize = AES_BLOCK_SIZE, 2267 .maxauthsize = MD5_DIGEST_SIZE, 2268 }, 2269 .caam = { 2270 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2271 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2272 OP_ALG_AAI_HMAC_PRECOMP, 2273 .geniv = true, 2274 }, 2275 }, 2276 { 2277 .aead = { 2278 .base = { 2279 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2280 .cra_driver_name = "authenc-hmac-sha1-" 2281 "cbc-aes-caam", 2282 .cra_blocksize = AES_BLOCK_SIZE, 2283 }, 2284 .setkey = aead_setkey, 2285 .setauthsize = aead_setauthsize, 2286 .encrypt = aead_encrypt, 2287 .decrypt = aead_decrypt, 2288 .ivsize = AES_BLOCK_SIZE, 2289 .maxauthsize = SHA1_DIGEST_SIZE, 2290 }, 2291 .caam = { 2292 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2293 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2294 OP_ALG_AAI_HMAC_PRECOMP, 2295 }, 2296 }, 2297 { 2298 .aead = { 2299 .base = { 2300 .cra_name = "echainiv(authenc(hmac(sha1)," 2301 "cbc(aes)))", 2302 .cra_driver_name = "echainiv-authenc-" 2303 "hmac-sha1-cbc-aes-caam", 2304 .cra_blocksize = AES_BLOCK_SIZE, 2305 }, 2306 .setkey = aead_setkey, 2307 .setauthsize = aead_setauthsize, 2308 .encrypt = aead_encrypt, 2309 .decrypt = aead_decrypt, 2310 .ivsize = AES_BLOCK_SIZE, 2311 .maxauthsize = SHA1_DIGEST_SIZE, 2312 }, 2313 .caam = { 2314 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2315 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2316 OP_ALG_AAI_HMAC_PRECOMP, 2317 .geniv = true, 2318 }, 2319 }, 2320 { 2321 .aead = { 2322 .base = { 2323 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2324 .cra_driver_name = "authenc-hmac-sha224-" 2325 "cbc-aes-caam", 2326 .cra_blocksize = AES_BLOCK_SIZE, 2327 }, 2328 .setkey = aead_setkey, 2329 .setauthsize = aead_setauthsize, 2330 .encrypt = aead_encrypt, 2331 .decrypt = aead_decrypt, 2332 .ivsize = AES_BLOCK_SIZE, 2333 .maxauthsize = SHA224_DIGEST_SIZE, 2334 }, 2335 .caam = { 2336 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2337 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2338 OP_ALG_AAI_HMAC_PRECOMP, 2339 }, 2340 }, 2341 { 2342 .aead = { 2343 .base = { 2344 .cra_name = "echainiv(authenc(hmac(sha224)," 2345 "cbc(aes)))", 2346 .cra_driver_name = "echainiv-authenc-" 2347 "hmac-sha224-cbc-aes-caam", 2348 .cra_blocksize = AES_BLOCK_SIZE, 2349 }, 2350 .setkey = aead_setkey, 2351 .setauthsize = aead_setauthsize, 2352 .encrypt = aead_encrypt, 2353 .decrypt = aead_decrypt, 2354 .ivsize = AES_BLOCK_SIZE, 2355 .maxauthsize = SHA224_DIGEST_SIZE, 2356 }, 2357 .caam = { 2358 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2359 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2360 OP_ALG_AAI_HMAC_PRECOMP, 2361 .geniv = true, 2362 }, 2363 }, 2364 { 2365 .aead = { 2366 .base = { 2367 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2368 .cra_driver_name = "authenc-hmac-sha256-" 2369 "cbc-aes-caam", 2370 .cra_blocksize = AES_BLOCK_SIZE, 2371 }, 2372 .setkey = aead_setkey, 2373 .setauthsize = aead_setauthsize, 2374 .encrypt = aead_encrypt, 2375 .decrypt = aead_decrypt, 2376 .ivsize = AES_BLOCK_SIZE, 2377 .maxauthsize = SHA256_DIGEST_SIZE, 2378 }, 2379 .caam = { 2380 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2381 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2382 OP_ALG_AAI_HMAC_PRECOMP, 2383 }, 2384 }, 2385 { 2386 .aead = { 2387 .base = { 2388 .cra_name = "echainiv(authenc(hmac(sha256)," 2389 "cbc(aes)))", 2390 .cra_driver_name = "echainiv-authenc-" 2391 "hmac-sha256-cbc-aes-caam", 2392 .cra_blocksize = AES_BLOCK_SIZE, 2393 }, 2394 .setkey = aead_setkey, 2395 .setauthsize = aead_setauthsize, 2396 .encrypt = aead_encrypt, 2397 .decrypt = aead_decrypt, 2398 .ivsize = AES_BLOCK_SIZE, 2399 .maxauthsize = SHA256_DIGEST_SIZE, 2400 }, 2401 .caam = { 2402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2403 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2404 OP_ALG_AAI_HMAC_PRECOMP, 2405 .geniv = true, 2406 }, 2407 }, 2408 { 2409 .aead = { 2410 .base = { 2411 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2412 .cra_driver_name = "authenc-hmac-sha384-" 2413 "cbc-aes-caam", 2414 .cra_blocksize = AES_BLOCK_SIZE, 2415 }, 2416 .setkey = aead_setkey, 2417 .setauthsize = aead_setauthsize, 2418 .encrypt = aead_encrypt, 2419 .decrypt = aead_decrypt, 2420 .ivsize = AES_BLOCK_SIZE, 2421 .maxauthsize = SHA384_DIGEST_SIZE, 2422 }, 2423 .caam = { 2424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2425 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2426 OP_ALG_AAI_HMAC_PRECOMP, 2427 }, 2428 }, 2429 { 2430 .aead = { 2431 .base = { 2432 .cra_name = "echainiv(authenc(hmac(sha384)," 2433 "cbc(aes)))", 2434 .cra_driver_name = "echainiv-authenc-" 2435 "hmac-sha384-cbc-aes-caam", 2436 .cra_blocksize = AES_BLOCK_SIZE, 2437 }, 2438 .setkey = aead_setkey, 2439 .setauthsize = aead_setauthsize, 2440 .encrypt = aead_encrypt, 2441 .decrypt = aead_decrypt, 2442 .ivsize = AES_BLOCK_SIZE, 2443 .maxauthsize = SHA384_DIGEST_SIZE, 2444 }, 2445 .caam = { 2446 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2447 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2448 OP_ALG_AAI_HMAC_PRECOMP, 2449 .geniv = true, 2450 }, 2451 }, 2452 { 2453 .aead = { 2454 .base = { 2455 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2456 .cra_driver_name = "authenc-hmac-sha512-" 2457 "cbc-aes-caam", 2458 .cra_blocksize = AES_BLOCK_SIZE, 2459 }, 2460 .setkey = aead_setkey, 2461 .setauthsize = aead_setauthsize, 2462 .encrypt = aead_encrypt, 2463 .decrypt = aead_decrypt, 2464 .ivsize = AES_BLOCK_SIZE, 2465 .maxauthsize = SHA512_DIGEST_SIZE, 2466 }, 2467 .caam = { 2468 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2469 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2470 OP_ALG_AAI_HMAC_PRECOMP, 2471 }, 2472 }, 2473 { 2474 .aead = { 2475 .base = { 2476 .cra_name = "echainiv(authenc(hmac(sha512)," 2477 "cbc(aes)))", 2478 .cra_driver_name = "echainiv-authenc-" 2479 "hmac-sha512-cbc-aes-caam", 2480 .cra_blocksize = AES_BLOCK_SIZE, 2481 }, 2482 .setkey = aead_setkey, 2483 .setauthsize = aead_setauthsize, 2484 .encrypt = aead_encrypt, 2485 .decrypt = aead_decrypt, 2486 .ivsize = AES_BLOCK_SIZE, 2487 .maxauthsize = SHA512_DIGEST_SIZE, 2488 }, 2489 .caam = { 2490 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2491 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2492 OP_ALG_AAI_HMAC_PRECOMP, 2493 .geniv = true, 2494 }, 2495 }, 2496 { 2497 .aead = { 2498 .base = { 2499 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2500 .cra_driver_name = "authenc-hmac-md5-" 2501 "cbc-des3_ede-caam", 2502 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2503 }, 2504 .setkey = des3_aead_setkey, 2505 .setauthsize = aead_setauthsize, 2506 .encrypt = aead_encrypt, 2507 .decrypt = aead_decrypt, 2508 .ivsize = DES3_EDE_BLOCK_SIZE, 2509 .maxauthsize = MD5_DIGEST_SIZE, 2510 }, 2511 .caam = { 2512 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2513 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2514 OP_ALG_AAI_HMAC_PRECOMP, 2515 } 2516 }, 2517 { 2518 .aead = { 2519 .base = { 2520 .cra_name = "echainiv(authenc(hmac(md5)," 2521 "cbc(des3_ede)))", 2522 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2523 "cbc-des3_ede-caam", 2524 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2525 }, 2526 .setkey = des3_aead_setkey, 2527 .setauthsize = aead_setauthsize, 2528 .encrypt = aead_encrypt, 2529 .decrypt = aead_decrypt, 2530 .ivsize = DES3_EDE_BLOCK_SIZE, 2531 .maxauthsize = MD5_DIGEST_SIZE, 2532 }, 2533 .caam = { 2534 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2535 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2536 OP_ALG_AAI_HMAC_PRECOMP, 2537 .geniv = true, 2538 } 2539 }, 2540 { 2541 .aead = { 2542 .base = { 2543 .cra_name = "authenc(hmac(sha1)," 2544 "cbc(des3_ede))", 2545 .cra_driver_name = "authenc-hmac-sha1-" 2546 "cbc-des3_ede-caam", 2547 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2548 }, 2549 .setkey = des3_aead_setkey, 2550 .setauthsize = aead_setauthsize, 2551 .encrypt = aead_encrypt, 2552 .decrypt = aead_decrypt, 2553 .ivsize = DES3_EDE_BLOCK_SIZE, 2554 .maxauthsize = SHA1_DIGEST_SIZE, 2555 }, 2556 .caam = { 2557 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2558 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2559 OP_ALG_AAI_HMAC_PRECOMP, 2560 }, 2561 }, 2562 { 2563 .aead = { 2564 .base = { 2565 .cra_name = "echainiv(authenc(hmac(sha1)," 2566 "cbc(des3_ede)))", 2567 .cra_driver_name = "echainiv-authenc-" 2568 "hmac-sha1-" 2569 "cbc-des3_ede-caam", 2570 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2571 }, 2572 .setkey = des3_aead_setkey, 2573 .setauthsize = aead_setauthsize, 2574 .encrypt = aead_encrypt, 2575 .decrypt = aead_decrypt, 2576 .ivsize = DES3_EDE_BLOCK_SIZE, 2577 .maxauthsize = SHA1_DIGEST_SIZE, 2578 }, 2579 .caam = { 2580 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2581 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2582 OP_ALG_AAI_HMAC_PRECOMP, 2583 .geniv = true, 2584 }, 2585 }, 2586 { 2587 .aead = { 2588 .base = { 2589 .cra_name = "authenc(hmac(sha224)," 2590 "cbc(des3_ede))", 2591 .cra_driver_name = "authenc-hmac-sha224-" 2592 "cbc-des3_ede-caam", 2593 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2594 }, 2595 .setkey = des3_aead_setkey, 2596 .setauthsize = aead_setauthsize, 2597 .encrypt = aead_encrypt, 2598 .decrypt = aead_decrypt, 2599 .ivsize = DES3_EDE_BLOCK_SIZE, 2600 .maxauthsize = SHA224_DIGEST_SIZE, 2601 }, 2602 .caam = { 2603 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2604 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2605 OP_ALG_AAI_HMAC_PRECOMP, 2606 }, 2607 }, 2608 { 2609 .aead = { 2610 .base = { 2611 .cra_name = "echainiv(authenc(hmac(sha224)," 2612 "cbc(des3_ede)))", 2613 .cra_driver_name = "echainiv-authenc-" 2614 "hmac-sha224-" 2615 "cbc-des3_ede-caam", 2616 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2617 }, 2618 .setkey = des3_aead_setkey, 2619 .setauthsize = aead_setauthsize, 2620 .encrypt = aead_encrypt, 2621 .decrypt = aead_decrypt, 2622 .ivsize = DES3_EDE_BLOCK_SIZE, 2623 .maxauthsize = SHA224_DIGEST_SIZE, 2624 }, 2625 .caam = { 2626 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2627 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2628 OP_ALG_AAI_HMAC_PRECOMP, 2629 .geniv = true, 2630 }, 2631 }, 2632 { 2633 .aead = { 2634 .base = { 2635 .cra_name = "authenc(hmac(sha256)," 2636 "cbc(des3_ede))", 2637 .cra_driver_name = "authenc-hmac-sha256-" 2638 "cbc-des3_ede-caam", 2639 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2640 }, 2641 .setkey = des3_aead_setkey, 2642 .setauthsize = aead_setauthsize, 2643 .encrypt = aead_encrypt, 2644 .decrypt = aead_decrypt, 2645 .ivsize = DES3_EDE_BLOCK_SIZE, 2646 .maxauthsize = SHA256_DIGEST_SIZE, 2647 }, 2648 .caam = { 2649 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2650 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2651 OP_ALG_AAI_HMAC_PRECOMP, 2652 }, 2653 }, 2654 { 2655 .aead = { 2656 .base = { 2657 .cra_name = "echainiv(authenc(hmac(sha256)," 2658 "cbc(des3_ede)))", 2659 .cra_driver_name = "echainiv-authenc-" 2660 "hmac-sha256-" 2661 "cbc-des3_ede-caam", 2662 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2663 }, 2664 .setkey = des3_aead_setkey, 2665 .setauthsize = aead_setauthsize, 2666 .encrypt = aead_encrypt, 2667 .decrypt = aead_decrypt, 2668 .ivsize = DES3_EDE_BLOCK_SIZE, 2669 .maxauthsize = SHA256_DIGEST_SIZE, 2670 }, 2671 .caam = { 2672 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2673 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2674 OP_ALG_AAI_HMAC_PRECOMP, 2675 .geniv = true, 2676 }, 2677 }, 2678 { 2679 .aead = { 2680 .base = { 2681 .cra_name = "authenc(hmac(sha384)," 2682 "cbc(des3_ede))", 2683 .cra_driver_name = "authenc-hmac-sha384-" 2684 "cbc-des3_ede-caam", 2685 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2686 }, 2687 .setkey = des3_aead_setkey, 2688 .setauthsize = aead_setauthsize, 2689 .encrypt = aead_encrypt, 2690 .decrypt = aead_decrypt, 2691 .ivsize = DES3_EDE_BLOCK_SIZE, 2692 .maxauthsize = SHA384_DIGEST_SIZE, 2693 }, 2694 .caam = { 2695 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2696 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2697 OP_ALG_AAI_HMAC_PRECOMP, 2698 }, 2699 }, 2700 { 2701 .aead = { 2702 .base = { 2703 .cra_name = "echainiv(authenc(hmac(sha384)," 2704 "cbc(des3_ede)))", 2705 .cra_driver_name = "echainiv-authenc-" 2706 "hmac-sha384-" 2707 "cbc-des3_ede-caam", 2708 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2709 }, 2710 .setkey = des3_aead_setkey, 2711 .setauthsize = aead_setauthsize, 2712 .encrypt = aead_encrypt, 2713 .decrypt = aead_decrypt, 2714 .ivsize = DES3_EDE_BLOCK_SIZE, 2715 .maxauthsize = SHA384_DIGEST_SIZE, 2716 }, 2717 .caam = { 2718 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2719 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2720 OP_ALG_AAI_HMAC_PRECOMP, 2721 .geniv = true, 2722 }, 2723 }, 2724 { 2725 .aead = { 2726 .base = { 2727 .cra_name = "authenc(hmac(sha512)," 2728 "cbc(des3_ede))", 2729 .cra_driver_name = "authenc-hmac-sha512-" 2730 "cbc-des3_ede-caam", 2731 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2732 }, 2733 .setkey = des3_aead_setkey, 2734 .setauthsize = aead_setauthsize, 2735 .encrypt = aead_encrypt, 2736 .decrypt = aead_decrypt, 2737 .ivsize = DES3_EDE_BLOCK_SIZE, 2738 .maxauthsize = SHA512_DIGEST_SIZE, 2739 }, 2740 .caam = { 2741 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2742 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2743 OP_ALG_AAI_HMAC_PRECOMP, 2744 }, 2745 }, 2746 { 2747 .aead = { 2748 .base = { 2749 .cra_name = "echainiv(authenc(hmac(sha512)," 2750 "cbc(des3_ede)))", 2751 .cra_driver_name = "echainiv-authenc-" 2752 "hmac-sha512-" 2753 "cbc-des3_ede-caam", 2754 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2755 }, 2756 .setkey = des3_aead_setkey, 2757 .setauthsize = aead_setauthsize, 2758 .encrypt = aead_encrypt, 2759 .decrypt = aead_decrypt, 2760 .ivsize = DES3_EDE_BLOCK_SIZE, 2761 .maxauthsize = SHA512_DIGEST_SIZE, 2762 }, 2763 .caam = { 2764 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2765 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2766 OP_ALG_AAI_HMAC_PRECOMP, 2767 .geniv = true, 2768 }, 2769 }, 2770 { 2771 .aead = { 2772 .base = { 2773 .cra_name = "authenc(hmac(md5),cbc(des))", 2774 .cra_driver_name = "authenc-hmac-md5-" 2775 "cbc-des-caam", 2776 .cra_blocksize = DES_BLOCK_SIZE, 2777 }, 2778 .setkey = aead_setkey, 2779 .setauthsize = aead_setauthsize, 2780 .encrypt = aead_encrypt, 2781 .decrypt = aead_decrypt, 2782 .ivsize = DES_BLOCK_SIZE, 2783 .maxauthsize = MD5_DIGEST_SIZE, 2784 }, 2785 .caam = { 2786 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2787 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2788 OP_ALG_AAI_HMAC_PRECOMP, 2789 }, 2790 }, 2791 { 2792 .aead = { 2793 .base = { 2794 .cra_name = "echainiv(authenc(hmac(md5)," 2795 "cbc(des)))", 2796 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2797 "cbc-des-caam", 2798 .cra_blocksize = DES_BLOCK_SIZE, 2799 }, 2800 .setkey = aead_setkey, 2801 .setauthsize = aead_setauthsize, 2802 .encrypt = aead_encrypt, 2803 .decrypt = aead_decrypt, 2804 .ivsize = DES_BLOCK_SIZE, 2805 .maxauthsize = MD5_DIGEST_SIZE, 2806 }, 2807 .caam = { 2808 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2809 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2810 OP_ALG_AAI_HMAC_PRECOMP, 2811 .geniv = true, 2812 }, 2813 }, 2814 { 2815 .aead = { 2816 .base = { 2817 .cra_name = "authenc(hmac(sha1),cbc(des))", 2818 .cra_driver_name = "authenc-hmac-sha1-" 2819 "cbc-des-caam", 2820 .cra_blocksize = DES_BLOCK_SIZE, 2821 }, 2822 .setkey = aead_setkey, 2823 .setauthsize = aead_setauthsize, 2824 .encrypt = aead_encrypt, 2825 .decrypt = aead_decrypt, 2826 .ivsize = DES_BLOCK_SIZE, 2827 .maxauthsize = SHA1_DIGEST_SIZE, 2828 }, 2829 .caam = { 2830 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2831 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2832 OP_ALG_AAI_HMAC_PRECOMP, 2833 }, 2834 }, 2835 { 2836 .aead = { 2837 .base = { 2838 .cra_name = "echainiv(authenc(hmac(sha1)," 2839 "cbc(des)))", 2840 .cra_driver_name = "echainiv-authenc-" 2841 "hmac-sha1-cbc-des-caam", 2842 .cra_blocksize = DES_BLOCK_SIZE, 2843 }, 2844 .setkey = aead_setkey, 2845 .setauthsize = aead_setauthsize, 2846 .encrypt = aead_encrypt, 2847 .decrypt = aead_decrypt, 2848 .ivsize = DES_BLOCK_SIZE, 2849 .maxauthsize = SHA1_DIGEST_SIZE, 2850 }, 2851 .caam = { 2852 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2853 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2854 OP_ALG_AAI_HMAC_PRECOMP, 2855 .geniv = true, 2856 }, 2857 }, 2858 { 2859 .aead = { 2860 .base = { 2861 .cra_name = "authenc(hmac(sha224),cbc(des))", 2862 .cra_driver_name = "authenc-hmac-sha224-" 2863 "cbc-des-caam", 2864 .cra_blocksize = DES_BLOCK_SIZE, 2865 }, 2866 .setkey = aead_setkey, 2867 .setauthsize = aead_setauthsize, 2868 .encrypt = aead_encrypt, 2869 .decrypt = aead_decrypt, 2870 .ivsize = DES_BLOCK_SIZE, 2871 .maxauthsize = SHA224_DIGEST_SIZE, 2872 }, 2873 .caam = { 2874 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2875 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2876 OP_ALG_AAI_HMAC_PRECOMP, 2877 }, 2878 }, 2879 { 2880 .aead = { 2881 .base = { 2882 .cra_name = "echainiv(authenc(hmac(sha224)," 2883 "cbc(des)))", 2884 .cra_driver_name = "echainiv-authenc-" 2885 "hmac-sha224-cbc-des-caam", 2886 .cra_blocksize = DES_BLOCK_SIZE, 2887 }, 2888 .setkey = aead_setkey, 2889 .setauthsize = aead_setauthsize, 2890 .encrypt = aead_encrypt, 2891 .decrypt = aead_decrypt, 2892 .ivsize = DES_BLOCK_SIZE, 2893 .maxauthsize = SHA224_DIGEST_SIZE, 2894 }, 2895 .caam = { 2896 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2897 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2898 OP_ALG_AAI_HMAC_PRECOMP, 2899 .geniv = true, 2900 }, 2901 }, 2902 { 2903 .aead = { 2904 .base = { 2905 .cra_name = "authenc(hmac(sha256),cbc(des))", 2906 .cra_driver_name = "authenc-hmac-sha256-" 2907 "cbc-des-caam", 2908 .cra_blocksize = DES_BLOCK_SIZE, 2909 }, 2910 .setkey = aead_setkey, 2911 .setauthsize = aead_setauthsize, 2912 .encrypt = aead_encrypt, 2913 .decrypt = aead_decrypt, 2914 .ivsize = DES_BLOCK_SIZE, 2915 .maxauthsize = SHA256_DIGEST_SIZE, 2916 }, 2917 .caam = { 2918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2919 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2920 OP_ALG_AAI_HMAC_PRECOMP, 2921 }, 2922 }, 2923 { 2924 .aead = { 2925 .base = { 2926 .cra_name = "echainiv(authenc(hmac(sha256)," 2927 "cbc(des)))", 2928 .cra_driver_name = "echainiv-authenc-" 2929 "hmac-sha256-cbc-des-caam", 2930 .cra_blocksize = DES_BLOCK_SIZE, 2931 }, 2932 .setkey = aead_setkey, 2933 .setauthsize = aead_setauthsize, 2934 .encrypt = aead_encrypt, 2935 .decrypt = aead_decrypt, 2936 .ivsize = DES_BLOCK_SIZE, 2937 .maxauthsize = SHA256_DIGEST_SIZE, 2938 }, 2939 .caam = { 2940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2941 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2942 OP_ALG_AAI_HMAC_PRECOMP, 2943 .geniv = true, 2944 }, 2945 }, 2946 { 2947 .aead = { 2948 .base = { 2949 .cra_name = "authenc(hmac(sha384),cbc(des))", 2950 .cra_driver_name = "authenc-hmac-sha384-" 2951 "cbc-des-caam", 2952 .cra_blocksize = DES_BLOCK_SIZE, 2953 }, 2954 .setkey = aead_setkey, 2955 .setauthsize = aead_setauthsize, 2956 .encrypt = aead_encrypt, 2957 .decrypt = aead_decrypt, 2958 .ivsize = DES_BLOCK_SIZE, 2959 .maxauthsize = SHA384_DIGEST_SIZE, 2960 }, 2961 .caam = { 2962 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2963 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2964 OP_ALG_AAI_HMAC_PRECOMP, 2965 }, 2966 }, 2967 { 2968 .aead = { 2969 .base = { 2970 .cra_name = "echainiv(authenc(hmac(sha384)," 2971 "cbc(des)))", 2972 .cra_driver_name = "echainiv-authenc-" 2973 "hmac-sha384-cbc-des-caam", 2974 .cra_blocksize = DES_BLOCK_SIZE, 2975 }, 2976 .setkey = aead_setkey, 2977 .setauthsize = aead_setauthsize, 2978 .encrypt = aead_encrypt, 2979 .decrypt = aead_decrypt, 2980 .ivsize = DES_BLOCK_SIZE, 2981 .maxauthsize = SHA384_DIGEST_SIZE, 2982 }, 2983 .caam = { 2984 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2985 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2986 OP_ALG_AAI_HMAC_PRECOMP, 2987 .geniv = true, 2988 }, 2989 }, 2990 { 2991 .aead = { 2992 .base = { 2993 .cra_name = "authenc(hmac(sha512),cbc(des))", 2994 .cra_driver_name = "authenc-hmac-sha512-" 2995 "cbc-des-caam", 2996 .cra_blocksize = DES_BLOCK_SIZE, 2997 }, 2998 .setkey = aead_setkey, 2999 .setauthsize = aead_setauthsize, 3000 .encrypt = aead_encrypt, 3001 .decrypt = aead_decrypt, 3002 .ivsize = DES_BLOCK_SIZE, 3003 .maxauthsize = SHA512_DIGEST_SIZE, 3004 }, 3005 .caam = { 3006 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3007 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3008 OP_ALG_AAI_HMAC_PRECOMP, 3009 }, 3010 }, 3011 { 3012 .aead = { 3013 .base = { 3014 .cra_name = "echainiv(authenc(hmac(sha512)," 3015 "cbc(des)))", 3016 .cra_driver_name = "echainiv-authenc-" 3017 "hmac-sha512-cbc-des-caam", 3018 .cra_blocksize = DES_BLOCK_SIZE, 3019 }, 3020 .setkey = aead_setkey, 3021 .setauthsize = aead_setauthsize, 3022 .encrypt = aead_encrypt, 3023 .decrypt = aead_decrypt, 3024 .ivsize = DES_BLOCK_SIZE, 3025 .maxauthsize = SHA512_DIGEST_SIZE, 3026 }, 3027 .caam = { 3028 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3029 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3030 OP_ALG_AAI_HMAC_PRECOMP, 3031 .geniv = true, 3032 }, 3033 }, 3034 { 3035 .aead = { 3036 .base = { 3037 .cra_name = "authenc(hmac(md5)," 3038 "rfc3686(ctr(aes)))", 3039 .cra_driver_name = "authenc-hmac-md5-" 3040 "rfc3686-ctr-aes-caam", 3041 .cra_blocksize = 1, 3042 }, 3043 .setkey = aead_setkey, 3044 .setauthsize = aead_setauthsize, 3045 .encrypt = aead_encrypt, 3046 .decrypt = aead_decrypt, 3047 .ivsize = CTR_RFC3686_IV_SIZE, 3048 .maxauthsize = MD5_DIGEST_SIZE, 3049 }, 3050 .caam = { 3051 .class1_alg_type = OP_ALG_ALGSEL_AES | 3052 OP_ALG_AAI_CTR_MOD128, 3053 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3054 OP_ALG_AAI_HMAC_PRECOMP, 3055 .rfc3686 = true, 3056 }, 3057 }, 3058 { 3059 .aead = { 3060 .base = { 3061 .cra_name = "seqiv(authenc(" 3062 "hmac(md5),rfc3686(ctr(aes))))", 3063 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3064 "rfc3686-ctr-aes-caam", 3065 .cra_blocksize = 1, 3066 }, 3067 .setkey = aead_setkey, 3068 .setauthsize = aead_setauthsize, 3069 .encrypt = aead_encrypt, 3070 .decrypt = aead_decrypt, 3071 .ivsize = CTR_RFC3686_IV_SIZE, 3072 .maxauthsize = MD5_DIGEST_SIZE, 3073 }, 3074 .caam = { 3075 .class1_alg_type = OP_ALG_ALGSEL_AES | 3076 OP_ALG_AAI_CTR_MOD128, 3077 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3078 OP_ALG_AAI_HMAC_PRECOMP, 3079 .rfc3686 = true, 3080 .geniv = true, 3081 }, 3082 }, 3083 { 3084 .aead = { 3085 .base = { 3086 .cra_name = "authenc(hmac(sha1)," 3087 "rfc3686(ctr(aes)))", 3088 .cra_driver_name = "authenc-hmac-sha1-" 3089 "rfc3686-ctr-aes-caam", 3090 .cra_blocksize = 1, 3091 }, 3092 .setkey = aead_setkey, 3093 .setauthsize = aead_setauthsize, 3094 .encrypt = aead_encrypt, 3095 .decrypt = aead_decrypt, 3096 .ivsize = CTR_RFC3686_IV_SIZE, 3097 .maxauthsize = SHA1_DIGEST_SIZE, 3098 }, 3099 .caam = { 3100 .class1_alg_type = OP_ALG_ALGSEL_AES | 3101 OP_ALG_AAI_CTR_MOD128, 3102 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3103 OP_ALG_AAI_HMAC_PRECOMP, 3104 .rfc3686 = true, 3105 }, 3106 }, 3107 { 3108 .aead = { 3109 .base = { 3110 .cra_name = "seqiv(authenc(" 3111 "hmac(sha1),rfc3686(ctr(aes))))", 3112 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3113 "rfc3686-ctr-aes-caam", 3114 .cra_blocksize = 1, 3115 }, 3116 .setkey = aead_setkey, 3117 .setauthsize = aead_setauthsize, 3118 .encrypt = aead_encrypt, 3119 .decrypt = aead_decrypt, 3120 .ivsize = CTR_RFC3686_IV_SIZE, 3121 .maxauthsize = SHA1_DIGEST_SIZE, 3122 }, 3123 .caam = { 3124 .class1_alg_type = OP_ALG_ALGSEL_AES | 3125 OP_ALG_AAI_CTR_MOD128, 3126 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3127 OP_ALG_AAI_HMAC_PRECOMP, 3128 .rfc3686 = true, 3129 .geniv = true, 3130 }, 3131 }, 3132 { 3133 .aead = { 3134 .base = { 3135 .cra_name = "authenc(hmac(sha224)," 3136 "rfc3686(ctr(aes)))", 3137 .cra_driver_name = "authenc-hmac-sha224-" 3138 "rfc3686-ctr-aes-caam", 3139 .cra_blocksize = 1, 3140 }, 3141 .setkey = aead_setkey, 3142 .setauthsize = aead_setauthsize, 3143 .encrypt = aead_encrypt, 3144 .decrypt = aead_decrypt, 3145 .ivsize = CTR_RFC3686_IV_SIZE, 3146 .maxauthsize = SHA224_DIGEST_SIZE, 3147 }, 3148 .caam = { 3149 .class1_alg_type = OP_ALG_ALGSEL_AES | 3150 OP_ALG_AAI_CTR_MOD128, 3151 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3152 OP_ALG_AAI_HMAC_PRECOMP, 3153 .rfc3686 = true, 3154 }, 3155 }, 3156 { 3157 .aead = { 3158 .base = { 3159 .cra_name = "seqiv(authenc(" 3160 "hmac(sha224),rfc3686(ctr(aes))))", 3161 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3162 "rfc3686-ctr-aes-caam", 3163 .cra_blocksize = 1, 3164 }, 3165 .setkey = aead_setkey, 3166 .setauthsize = aead_setauthsize, 3167 .encrypt = aead_encrypt, 3168 .decrypt = aead_decrypt, 3169 .ivsize = CTR_RFC3686_IV_SIZE, 3170 .maxauthsize = SHA224_DIGEST_SIZE, 3171 }, 3172 .caam = { 3173 .class1_alg_type = OP_ALG_ALGSEL_AES | 3174 OP_ALG_AAI_CTR_MOD128, 3175 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3176 OP_ALG_AAI_HMAC_PRECOMP, 3177 .rfc3686 = true, 3178 .geniv = true, 3179 }, 3180 }, 3181 { 3182 .aead = { 3183 .base = { 3184 .cra_name = "authenc(hmac(sha256)," 3185 "rfc3686(ctr(aes)))", 3186 .cra_driver_name = "authenc-hmac-sha256-" 3187 "rfc3686-ctr-aes-caam", 3188 .cra_blocksize = 1, 3189 }, 3190 .setkey = aead_setkey, 3191 .setauthsize = aead_setauthsize, 3192 .encrypt = aead_encrypt, 3193 .decrypt = aead_decrypt, 3194 .ivsize = CTR_RFC3686_IV_SIZE, 3195 .maxauthsize = SHA256_DIGEST_SIZE, 3196 }, 3197 .caam = { 3198 .class1_alg_type = OP_ALG_ALGSEL_AES | 3199 OP_ALG_AAI_CTR_MOD128, 3200 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3201 OP_ALG_AAI_HMAC_PRECOMP, 3202 .rfc3686 = true, 3203 }, 3204 }, 3205 { 3206 .aead = { 3207 .base = { 3208 .cra_name = "seqiv(authenc(hmac(sha256)," 3209 "rfc3686(ctr(aes))))", 3210 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3211 "rfc3686-ctr-aes-caam", 3212 .cra_blocksize = 1, 3213 }, 3214 .setkey = aead_setkey, 3215 .setauthsize = aead_setauthsize, 3216 .encrypt = aead_encrypt, 3217 .decrypt = aead_decrypt, 3218 .ivsize = CTR_RFC3686_IV_SIZE, 3219 .maxauthsize = SHA256_DIGEST_SIZE, 3220 }, 3221 .caam = { 3222 .class1_alg_type = OP_ALG_ALGSEL_AES | 3223 OP_ALG_AAI_CTR_MOD128, 3224 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3225 OP_ALG_AAI_HMAC_PRECOMP, 3226 .rfc3686 = true, 3227 .geniv = true, 3228 }, 3229 }, 3230 { 3231 .aead = { 3232 .base = { 3233 .cra_name = "authenc(hmac(sha384)," 3234 "rfc3686(ctr(aes)))", 3235 .cra_driver_name = "authenc-hmac-sha384-" 3236 "rfc3686-ctr-aes-caam", 3237 .cra_blocksize = 1, 3238 }, 3239 .setkey = aead_setkey, 3240 .setauthsize = aead_setauthsize, 3241 .encrypt = aead_encrypt, 3242 .decrypt = aead_decrypt, 3243 .ivsize = CTR_RFC3686_IV_SIZE, 3244 .maxauthsize = SHA384_DIGEST_SIZE, 3245 }, 3246 .caam = { 3247 .class1_alg_type = OP_ALG_ALGSEL_AES | 3248 OP_ALG_AAI_CTR_MOD128, 3249 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3250 OP_ALG_AAI_HMAC_PRECOMP, 3251 .rfc3686 = true, 3252 }, 3253 }, 3254 { 3255 .aead = { 3256 .base = { 3257 .cra_name = "seqiv(authenc(hmac(sha384)," 3258 "rfc3686(ctr(aes))))", 3259 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3260 "rfc3686-ctr-aes-caam", 3261 .cra_blocksize = 1, 3262 }, 3263 .setkey = aead_setkey, 3264 .setauthsize = aead_setauthsize, 3265 .encrypt = aead_encrypt, 3266 .decrypt = aead_decrypt, 3267 .ivsize = CTR_RFC3686_IV_SIZE, 3268 .maxauthsize = SHA384_DIGEST_SIZE, 3269 }, 3270 .caam = { 3271 .class1_alg_type = OP_ALG_ALGSEL_AES | 3272 OP_ALG_AAI_CTR_MOD128, 3273 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3274 OP_ALG_AAI_HMAC_PRECOMP, 3275 .rfc3686 = true, 3276 .geniv = true, 3277 }, 3278 }, 3279 { 3280 .aead = { 3281 .base = { 3282 .cra_name = "authenc(hmac(sha512)," 3283 "rfc3686(ctr(aes)))", 3284 .cra_driver_name = "authenc-hmac-sha512-" 3285 "rfc3686-ctr-aes-caam", 3286 .cra_blocksize = 1, 3287 }, 3288 .setkey = aead_setkey, 3289 .setauthsize = aead_setauthsize, 3290 .encrypt = aead_encrypt, 3291 .decrypt = aead_decrypt, 3292 .ivsize = CTR_RFC3686_IV_SIZE, 3293 .maxauthsize = SHA512_DIGEST_SIZE, 3294 }, 3295 .caam = { 3296 .class1_alg_type = OP_ALG_ALGSEL_AES | 3297 OP_ALG_AAI_CTR_MOD128, 3298 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3299 OP_ALG_AAI_HMAC_PRECOMP, 3300 .rfc3686 = true, 3301 }, 3302 }, 3303 { 3304 .aead = { 3305 .base = { 3306 .cra_name = "seqiv(authenc(hmac(sha512)," 3307 "rfc3686(ctr(aes))))", 3308 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3309 "rfc3686-ctr-aes-caam", 3310 .cra_blocksize = 1, 3311 }, 3312 .setkey = aead_setkey, 3313 .setauthsize = aead_setauthsize, 3314 .encrypt = aead_encrypt, 3315 .decrypt = aead_decrypt, 3316 .ivsize = CTR_RFC3686_IV_SIZE, 3317 .maxauthsize = SHA512_DIGEST_SIZE, 3318 }, 3319 .caam = { 3320 .class1_alg_type = OP_ALG_ALGSEL_AES | 3321 OP_ALG_AAI_CTR_MOD128, 3322 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3323 OP_ALG_AAI_HMAC_PRECOMP, 3324 .rfc3686 = true, 3325 .geniv = true, 3326 }, 3327 }, 3328 { 3329 .aead = { 3330 .base = { 3331 .cra_name = "rfc7539(chacha20,poly1305)", 3332 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3333 "caam", 3334 .cra_blocksize = 1, 3335 }, 3336 .setkey = chachapoly_setkey, 3337 .setauthsize = chachapoly_setauthsize, 3338 .encrypt = chachapoly_encrypt, 3339 .decrypt = chachapoly_decrypt, 3340 .ivsize = CHACHAPOLY_IV_SIZE, 3341 .maxauthsize = POLY1305_DIGEST_SIZE, 3342 }, 3343 .caam = { 3344 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3345 OP_ALG_AAI_AEAD, 3346 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3347 OP_ALG_AAI_AEAD, 3348 .nodkp = true, 3349 }, 3350 }, 3351 { 3352 .aead = { 3353 .base = { 3354 .cra_name = "rfc7539esp(chacha20,poly1305)", 3355 .cra_driver_name = "rfc7539esp-chacha20-" 3356 "poly1305-caam", 3357 .cra_blocksize = 1, 3358 }, 3359 .setkey = chachapoly_setkey, 3360 .setauthsize = chachapoly_setauthsize, 3361 .encrypt = chachapoly_encrypt, 3362 .decrypt = chachapoly_decrypt, 3363 .ivsize = 8, 3364 .maxauthsize = POLY1305_DIGEST_SIZE, 3365 }, 3366 .caam = { 3367 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3368 OP_ALG_AAI_AEAD, 3369 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3370 OP_ALG_AAI_AEAD, 3371 .nodkp = true, 3372 }, 3373 }, 3374 }; 3375 3376 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3377 bool uses_dkp) 3378 { 3379 dma_addr_t dma_addr; 3380 struct caam_drv_private *priv; 3381 3382 ctx->jrdev = caam_jr_alloc(); 3383 if (IS_ERR(ctx->jrdev)) { 3384 pr_err("Job Ring Device allocation for transform failed\n"); 3385 return PTR_ERR(ctx->jrdev); 3386 } 3387 3388 priv = dev_get_drvdata(ctx->jrdev->parent); 3389 if (priv->era >= 6 && uses_dkp) 3390 ctx->dir = DMA_BIDIRECTIONAL; 3391 else 3392 ctx->dir = DMA_TO_DEVICE; 3393 3394 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3395 offsetof(struct caam_ctx, 3396 sh_desc_enc_dma), 3397 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3398 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3399 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3400 caam_jr_free(ctx->jrdev); 3401 return -ENOMEM; 3402 } 3403 3404 ctx->sh_desc_enc_dma = dma_addr; 3405 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3406 sh_desc_dec); 3407 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3408 3409 /* copy descriptor header template value */ 3410 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3411 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3412 3413 return 0; 3414 } 3415 3416 static int caam_cra_init(struct crypto_skcipher *tfm) 3417 { 3418 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3419 struct caam_skcipher_alg *caam_alg = 3420 container_of(alg, typeof(*caam_alg), skcipher); 3421 3422 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3423 false); 3424 } 3425 3426 static int caam_aead_init(struct crypto_aead *tfm) 3427 { 3428 struct aead_alg *alg = crypto_aead_alg(tfm); 3429 struct caam_aead_alg *caam_alg = 3430 container_of(alg, struct caam_aead_alg, aead); 3431 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3432 3433 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3434 } 3435 3436 static void caam_exit_common(struct caam_ctx *ctx) 3437 { 3438 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3439 offsetof(struct caam_ctx, sh_desc_enc_dma), 3440 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3441 caam_jr_free(ctx->jrdev); 3442 } 3443 3444 static void caam_cra_exit(struct crypto_skcipher *tfm) 3445 { 3446 caam_exit_common(crypto_skcipher_ctx(tfm)); 3447 } 3448 3449 static void caam_aead_exit(struct crypto_aead *tfm) 3450 { 3451 caam_exit_common(crypto_aead_ctx(tfm)); 3452 } 3453 3454 void caam_algapi_exit(void) 3455 { 3456 int i; 3457 3458 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3459 struct caam_aead_alg *t_alg = driver_aeads + i; 3460 3461 if (t_alg->registered) 3462 crypto_unregister_aead(&t_alg->aead); 3463 } 3464 3465 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3466 struct caam_skcipher_alg *t_alg = driver_algs + i; 3467 3468 if (t_alg->registered) 3469 crypto_unregister_skcipher(&t_alg->skcipher); 3470 } 3471 } 3472 3473 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3474 { 3475 struct skcipher_alg *alg = &t_alg->skcipher; 3476 3477 alg->base.cra_module = THIS_MODULE; 3478 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3479 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3480 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3481 3482 alg->init = caam_cra_init; 3483 alg->exit = caam_cra_exit; 3484 } 3485 3486 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3487 { 3488 struct aead_alg *alg = &t_alg->aead; 3489 3490 alg->base.cra_module = THIS_MODULE; 3491 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3492 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3493 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3494 3495 alg->init = caam_aead_init; 3496 alg->exit = caam_aead_exit; 3497 } 3498 3499 int caam_algapi_init(struct device *ctrldev) 3500 { 3501 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3502 int i = 0, err = 0; 3503 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3504 u32 arc4_inst; 3505 unsigned int md_limit = SHA512_DIGEST_SIZE; 3506 bool registered = false, gcm_support; 3507 3508 /* 3509 * Register crypto algorithms the device supports. 3510 * First, detect presence and attributes of DES, AES, and MD blocks. 3511 */ 3512 if (priv->era < 10) { 3513 u32 cha_vid, cha_inst, aes_rn; 3514 3515 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3516 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3517 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3518 3519 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3520 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3521 CHA_ID_LS_DES_SHIFT; 3522 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3523 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3524 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >> 3525 CHA_ID_LS_ARC4_SHIFT; 3526 ccha_inst = 0; 3527 ptha_inst = 0; 3528 3529 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3530 CHA_ID_LS_AES_MASK; 3531 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3532 } else { 3533 u32 aesa, mdha; 3534 3535 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3536 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3537 3538 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3539 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3540 3541 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3542 aes_inst = aesa & CHA_VER_NUM_MASK; 3543 md_inst = mdha & CHA_VER_NUM_MASK; 3544 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3545 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3546 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK; 3547 3548 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3549 } 3550 3551 /* If MD is present, limit digest size based on LP256 */ 3552 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3553 md_limit = SHA256_DIGEST_SIZE; 3554 3555 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3556 struct caam_skcipher_alg *t_alg = driver_algs + i; 3557 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3558 3559 /* Skip DES algorithms if not supported by device */ 3560 if (!des_inst && 3561 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3562 (alg_sel == OP_ALG_ALGSEL_DES))) 3563 continue; 3564 3565 /* Skip AES algorithms if not supported by device */ 3566 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3567 continue; 3568 3569 /* Skip ARC4 algorithms if not supported by device */ 3570 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4) 3571 continue; 3572 3573 /* 3574 * Check support for AES modes not available 3575 * on LP devices. 3576 */ 3577 if (aes_vid == CHA_VER_VID_AES_LP && 3578 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3579 OP_ALG_AAI_XTS) 3580 continue; 3581 3582 caam_skcipher_alg_init(t_alg); 3583 3584 err = crypto_register_skcipher(&t_alg->skcipher); 3585 if (err) { 3586 pr_warn("%s alg registration failed\n", 3587 t_alg->skcipher.base.cra_driver_name); 3588 continue; 3589 } 3590 3591 t_alg->registered = true; 3592 registered = true; 3593 } 3594 3595 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3596 struct caam_aead_alg *t_alg = driver_aeads + i; 3597 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3598 OP_ALG_ALGSEL_MASK; 3599 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3600 OP_ALG_ALGSEL_MASK; 3601 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3602 3603 /* Skip DES algorithms if not supported by device */ 3604 if (!des_inst && 3605 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3606 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3607 continue; 3608 3609 /* Skip AES algorithms if not supported by device */ 3610 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3611 continue; 3612 3613 /* Skip CHACHA20 algorithms if not supported by device */ 3614 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3615 continue; 3616 3617 /* Skip POLY1305 algorithms if not supported by device */ 3618 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3619 continue; 3620 3621 /* Skip GCM algorithms if not supported by device */ 3622 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3623 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3624 continue; 3625 3626 /* 3627 * Skip algorithms requiring message digests 3628 * if MD or MD size is not supported by device. 3629 */ 3630 if (is_mdha(c2_alg_sel) && 3631 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3632 continue; 3633 3634 caam_aead_alg_init(t_alg); 3635 3636 err = crypto_register_aead(&t_alg->aead); 3637 if (err) { 3638 pr_warn("%s alg registration failed\n", 3639 t_alg->aead.base.cra_driver_name); 3640 continue; 3641 } 3642 3643 t_alg->registered = true; 3644 registered = true; 3645 } 3646 3647 if (registered) 3648 pr_info("caam algorithms registered in /proc/crypto\n"); 3649 3650 return err; 3651 } 3652