1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 60 /* 61 * crypto alg 62 */ 63 #define CAAM_CRA_PRIORITY 3000 64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ 67 SHA512_DIGEST_SIZE * 2) 68 69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 71 CAAM_CMD_SZ * 4) 72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 73 CAAM_CMD_SZ * 5) 74 75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 76 77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 79 80 #ifdef DEBUG 81 /* for print_hex_dumps with line references */ 82 #define debug(format, arg...) printk(format, arg) 83 #else 84 #define debug(format, arg...) 85 #endif 86 87 struct caam_alg_entry { 88 int class1_alg_type; 89 int class2_alg_type; 90 bool rfc3686; 91 bool geniv; 92 bool nodkp; 93 }; 94 95 struct caam_aead_alg { 96 struct aead_alg aead; 97 struct caam_alg_entry caam; 98 bool registered; 99 }; 100 101 struct caam_skcipher_alg { 102 struct skcipher_alg skcipher; 103 struct caam_alg_entry caam; 104 bool registered; 105 }; 106 107 /* 108 * per-session context 109 */ 110 struct caam_ctx { 111 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 112 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 113 u8 key[CAAM_MAX_KEY_SIZE]; 114 dma_addr_t sh_desc_enc_dma; 115 dma_addr_t sh_desc_dec_dma; 116 dma_addr_t key_dma; 117 enum dma_data_direction dir; 118 struct device *jrdev; 119 struct alginfo adata; 120 struct alginfo cdata; 121 unsigned int authsize; 122 }; 123 124 static int aead_null_set_sh_desc(struct crypto_aead *aead) 125 { 126 struct caam_ctx *ctx = crypto_aead_ctx(aead); 127 struct device *jrdev = ctx->jrdev; 128 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 129 u32 *desc; 130 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 131 ctx->adata.keylen_pad; 132 133 /* 134 * Job Descriptor and Shared Descriptors 135 * must all fit into the 64-word Descriptor h/w Buffer 136 */ 137 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 138 ctx->adata.key_inline = true; 139 ctx->adata.key_virt = ctx->key; 140 } else { 141 ctx->adata.key_inline = false; 142 ctx->adata.key_dma = ctx->key_dma; 143 } 144 145 /* aead_encrypt shared descriptor */ 146 desc = ctx->sh_desc_enc; 147 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 148 ctrlpriv->era); 149 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 150 desc_bytes(desc), ctx->dir); 151 152 /* 153 * Job Descriptor and Shared Descriptors 154 * must all fit into the 64-word Descriptor h/w Buffer 155 */ 156 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 157 ctx->adata.key_inline = true; 158 ctx->adata.key_virt = ctx->key; 159 } else { 160 ctx->adata.key_inline = false; 161 ctx->adata.key_dma = ctx->key_dma; 162 } 163 164 /* aead_decrypt shared descriptor */ 165 desc = ctx->sh_desc_dec; 166 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 167 ctrlpriv->era); 168 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 169 desc_bytes(desc), ctx->dir); 170 171 return 0; 172 } 173 174 static int aead_set_sh_desc(struct crypto_aead *aead) 175 { 176 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 177 struct caam_aead_alg, aead); 178 unsigned int ivsize = crypto_aead_ivsize(aead); 179 struct caam_ctx *ctx = crypto_aead_ctx(aead); 180 struct device *jrdev = ctx->jrdev; 181 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 182 u32 ctx1_iv_off = 0; 183 u32 *desc, *nonce = NULL; 184 u32 inl_mask; 185 unsigned int data_len[2]; 186 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 187 OP_ALG_AAI_CTR_MOD128); 188 const bool is_rfc3686 = alg->caam.rfc3686; 189 190 if (!ctx->authsize) 191 return 0; 192 193 /* NULL encryption / decryption */ 194 if (!ctx->cdata.keylen) 195 return aead_null_set_sh_desc(aead); 196 197 /* 198 * AES-CTR needs to load IV in CONTEXT1 reg 199 * at an offset of 128bits (16bytes) 200 * CONTEXT1[255:128] = IV 201 */ 202 if (ctr_mode) 203 ctx1_iv_off = 16; 204 205 /* 206 * RFC3686 specific: 207 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 208 */ 209 if (is_rfc3686) { 210 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 211 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 212 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 213 } 214 215 data_len[0] = ctx->adata.keylen_pad; 216 data_len[1] = ctx->cdata.keylen; 217 218 if (alg->caam.geniv) 219 goto skip_enc; 220 221 /* 222 * Job Descriptor and Shared Descriptors 223 * must all fit into the 64-word Descriptor h/w Buffer 224 */ 225 if (desc_inline_query(DESC_AEAD_ENC_LEN + 226 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 227 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 228 ARRAY_SIZE(data_len)) < 0) 229 return -EINVAL; 230 231 if (inl_mask & 1) 232 ctx->adata.key_virt = ctx->key; 233 else 234 ctx->adata.key_dma = ctx->key_dma; 235 236 if (inl_mask & 2) 237 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 238 else 239 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 240 241 ctx->adata.key_inline = !!(inl_mask & 1); 242 ctx->cdata.key_inline = !!(inl_mask & 2); 243 244 /* aead_encrypt shared descriptor */ 245 desc = ctx->sh_desc_enc; 246 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 247 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 248 false, ctrlpriv->era); 249 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 250 desc_bytes(desc), ctx->dir); 251 252 skip_enc: 253 /* 254 * Job Descriptor and Shared Descriptors 255 * must all fit into the 64-word Descriptor h/w Buffer 256 */ 257 if (desc_inline_query(DESC_AEAD_DEC_LEN + 258 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 259 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 260 ARRAY_SIZE(data_len)) < 0) 261 return -EINVAL; 262 263 if (inl_mask & 1) 264 ctx->adata.key_virt = ctx->key; 265 else 266 ctx->adata.key_dma = ctx->key_dma; 267 268 if (inl_mask & 2) 269 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 270 else 271 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 272 273 ctx->adata.key_inline = !!(inl_mask & 1); 274 ctx->cdata.key_inline = !!(inl_mask & 2); 275 276 /* aead_decrypt shared descriptor */ 277 desc = ctx->sh_desc_dec; 278 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 279 ctx->authsize, alg->caam.geniv, is_rfc3686, 280 nonce, ctx1_iv_off, false, ctrlpriv->era); 281 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 282 desc_bytes(desc), ctx->dir); 283 284 if (!alg->caam.geniv) 285 goto skip_givenc; 286 287 /* 288 * Job Descriptor and Shared Descriptors 289 * must all fit into the 64-word Descriptor h/w Buffer 290 */ 291 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 292 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 293 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 294 ARRAY_SIZE(data_len)) < 0) 295 return -EINVAL; 296 297 if (inl_mask & 1) 298 ctx->adata.key_virt = ctx->key; 299 else 300 ctx->adata.key_dma = ctx->key_dma; 301 302 if (inl_mask & 2) 303 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 304 else 305 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 306 307 ctx->adata.key_inline = !!(inl_mask & 1); 308 ctx->cdata.key_inline = !!(inl_mask & 2); 309 310 /* aead_givencrypt shared descriptor */ 311 desc = ctx->sh_desc_enc; 312 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 313 ctx->authsize, is_rfc3686, nonce, 314 ctx1_iv_off, false, ctrlpriv->era); 315 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 316 desc_bytes(desc), ctx->dir); 317 318 skip_givenc: 319 return 0; 320 } 321 322 static int aead_setauthsize(struct crypto_aead *authenc, 323 unsigned int authsize) 324 { 325 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 326 327 ctx->authsize = authsize; 328 aead_set_sh_desc(authenc); 329 330 return 0; 331 } 332 333 static int gcm_set_sh_desc(struct crypto_aead *aead) 334 { 335 struct caam_ctx *ctx = crypto_aead_ctx(aead); 336 struct device *jrdev = ctx->jrdev; 337 unsigned int ivsize = crypto_aead_ivsize(aead); 338 u32 *desc; 339 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 340 ctx->cdata.keylen; 341 342 if (!ctx->cdata.keylen || !ctx->authsize) 343 return 0; 344 345 /* 346 * AES GCM encrypt shared descriptor 347 * Job Descriptor and Shared Descriptor 348 * must fit into the 64-word Descriptor h/w Buffer 349 */ 350 if (rem_bytes >= DESC_GCM_ENC_LEN) { 351 ctx->cdata.key_inline = true; 352 ctx->cdata.key_virt = ctx->key; 353 } else { 354 ctx->cdata.key_inline = false; 355 ctx->cdata.key_dma = ctx->key_dma; 356 } 357 358 desc = ctx->sh_desc_enc; 359 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 360 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 361 desc_bytes(desc), ctx->dir); 362 363 /* 364 * Job Descriptor and Shared Descriptors 365 * must all fit into the 64-word Descriptor h/w Buffer 366 */ 367 if (rem_bytes >= DESC_GCM_DEC_LEN) { 368 ctx->cdata.key_inline = true; 369 ctx->cdata.key_virt = ctx->key; 370 } else { 371 ctx->cdata.key_inline = false; 372 ctx->cdata.key_dma = ctx->key_dma; 373 } 374 375 desc = ctx->sh_desc_dec; 376 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 377 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 378 desc_bytes(desc), ctx->dir); 379 380 return 0; 381 } 382 383 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 384 { 385 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 386 387 ctx->authsize = authsize; 388 gcm_set_sh_desc(authenc); 389 390 return 0; 391 } 392 393 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 394 { 395 struct caam_ctx *ctx = crypto_aead_ctx(aead); 396 struct device *jrdev = ctx->jrdev; 397 unsigned int ivsize = crypto_aead_ivsize(aead); 398 u32 *desc; 399 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 400 ctx->cdata.keylen; 401 402 if (!ctx->cdata.keylen || !ctx->authsize) 403 return 0; 404 405 /* 406 * RFC4106 encrypt shared descriptor 407 * Job Descriptor and Shared Descriptor 408 * must fit into the 64-word Descriptor h/w Buffer 409 */ 410 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 411 ctx->cdata.key_inline = true; 412 ctx->cdata.key_virt = ctx->key; 413 } else { 414 ctx->cdata.key_inline = false; 415 ctx->cdata.key_dma = ctx->key_dma; 416 } 417 418 desc = ctx->sh_desc_enc; 419 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 420 false); 421 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 422 desc_bytes(desc), ctx->dir); 423 424 /* 425 * Job Descriptor and Shared Descriptors 426 * must all fit into the 64-word Descriptor h/w Buffer 427 */ 428 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 429 ctx->cdata.key_inline = true; 430 ctx->cdata.key_virt = ctx->key; 431 } else { 432 ctx->cdata.key_inline = false; 433 ctx->cdata.key_dma = ctx->key_dma; 434 } 435 436 desc = ctx->sh_desc_dec; 437 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 438 false); 439 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 440 desc_bytes(desc), ctx->dir); 441 442 return 0; 443 } 444 445 static int rfc4106_setauthsize(struct crypto_aead *authenc, 446 unsigned int authsize) 447 { 448 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 449 450 ctx->authsize = authsize; 451 rfc4106_set_sh_desc(authenc); 452 453 return 0; 454 } 455 456 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 457 { 458 struct caam_ctx *ctx = crypto_aead_ctx(aead); 459 struct device *jrdev = ctx->jrdev; 460 unsigned int ivsize = crypto_aead_ivsize(aead); 461 u32 *desc; 462 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 463 ctx->cdata.keylen; 464 465 if (!ctx->cdata.keylen || !ctx->authsize) 466 return 0; 467 468 /* 469 * RFC4543 encrypt shared descriptor 470 * Job Descriptor and Shared Descriptor 471 * must fit into the 64-word Descriptor h/w Buffer 472 */ 473 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 474 ctx->cdata.key_inline = true; 475 ctx->cdata.key_virt = ctx->key; 476 } else { 477 ctx->cdata.key_inline = false; 478 ctx->cdata.key_dma = ctx->key_dma; 479 } 480 481 desc = ctx->sh_desc_enc; 482 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 483 false); 484 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 485 desc_bytes(desc), ctx->dir); 486 487 /* 488 * Job Descriptor and Shared Descriptors 489 * must all fit into the 64-word Descriptor h/w Buffer 490 */ 491 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 492 ctx->cdata.key_inline = true; 493 ctx->cdata.key_virt = ctx->key; 494 } else { 495 ctx->cdata.key_inline = false; 496 ctx->cdata.key_dma = ctx->key_dma; 497 } 498 499 desc = ctx->sh_desc_dec; 500 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 501 false); 502 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 503 desc_bytes(desc), ctx->dir); 504 505 return 0; 506 } 507 508 static int rfc4543_setauthsize(struct crypto_aead *authenc, 509 unsigned int authsize) 510 { 511 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 512 513 ctx->authsize = authsize; 514 rfc4543_set_sh_desc(authenc); 515 516 return 0; 517 } 518 519 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 520 { 521 struct caam_ctx *ctx = crypto_aead_ctx(aead); 522 struct device *jrdev = ctx->jrdev; 523 unsigned int ivsize = crypto_aead_ivsize(aead); 524 u32 *desc; 525 526 if (!ctx->cdata.keylen || !ctx->authsize) 527 return 0; 528 529 desc = ctx->sh_desc_enc; 530 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 531 ctx->authsize, true, false); 532 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 533 desc_bytes(desc), ctx->dir); 534 535 desc = ctx->sh_desc_dec; 536 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 537 ctx->authsize, false, false); 538 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 539 desc_bytes(desc), ctx->dir); 540 541 return 0; 542 } 543 544 static int chachapoly_setauthsize(struct crypto_aead *aead, 545 unsigned int authsize) 546 { 547 struct caam_ctx *ctx = crypto_aead_ctx(aead); 548 549 if (authsize != POLY1305_DIGEST_SIZE) 550 return -EINVAL; 551 552 ctx->authsize = authsize; 553 return chachapoly_set_sh_desc(aead); 554 } 555 556 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 557 unsigned int keylen) 558 { 559 struct caam_ctx *ctx = crypto_aead_ctx(aead); 560 unsigned int ivsize = crypto_aead_ivsize(aead); 561 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 562 563 if (keylen != CHACHA_KEY_SIZE + saltlen) { 564 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 565 return -EINVAL; 566 } 567 568 ctx->cdata.key_virt = key; 569 ctx->cdata.keylen = keylen - saltlen; 570 571 return chachapoly_set_sh_desc(aead); 572 } 573 574 static int aead_setkey(struct crypto_aead *aead, 575 const u8 *key, unsigned int keylen) 576 { 577 struct caam_ctx *ctx = crypto_aead_ctx(aead); 578 struct device *jrdev = ctx->jrdev; 579 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 580 struct crypto_authenc_keys keys; 581 int ret = 0; 582 583 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 584 goto badkey; 585 586 #ifdef DEBUG 587 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 588 keys.authkeylen + keys.enckeylen, keys.enckeylen, 589 keys.authkeylen); 590 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 591 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 592 #endif 593 594 /* 595 * If DKP is supported, use it in the shared descriptor to generate 596 * the split key. 597 */ 598 if (ctrlpriv->era >= 6) { 599 ctx->adata.keylen = keys.authkeylen; 600 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 601 OP_ALG_ALGSEL_MASK); 602 603 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 604 goto badkey; 605 606 memcpy(ctx->key, keys.authkey, keys.authkeylen); 607 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 608 keys.enckeylen); 609 dma_sync_single_for_device(jrdev, ctx->key_dma, 610 ctx->adata.keylen_pad + 611 keys.enckeylen, ctx->dir); 612 goto skip_split_key; 613 } 614 615 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 616 keys.authkeylen, CAAM_MAX_KEY_SIZE - 617 keys.enckeylen); 618 if (ret) { 619 goto badkey; 620 } 621 622 /* postpend encryption key to auth split key */ 623 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 624 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 625 keys.enckeylen, ctx->dir); 626 #ifdef DEBUG 627 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 628 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 629 ctx->adata.keylen_pad + keys.enckeylen, 1); 630 #endif 631 632 skip_split_key: 633 ctx->cdata.keylen = keys.enckeylen; 634 memzero_explicit(&keys, sizeof(keys)); 635 return aead_set_sh_desc(aead); 636 badkey: 637 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 638 memzero_explicit(&keys, sizeof(keys)); 639 return -EINVAL; 640 } 641 642 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 643 unsigned int keylen) 644 { 645 struct crypto_authenc_keys keys; 646 u32 flags; 647 int err; 648 649 err = crypto_authenc_extractkeys(&keys, key, keylen); 650 if (unlikely(err)) 651 goto badkey; 652 653 err = -EINVAL; 654 if (keys.enckeylen != DES3_EDE_KEY_SIZE) 655 goto badkey; 656 657 flags = crypto_aead_get_flags(aead); 658 err = __des3_verify_key(&flags, keys.enckey); 659 if (unlikely(err)) { 660 crypto_aead_set_flags(aead, flags); 661 goto out; 662 } 663 664 err = aead_setkey(aead, key, keylen); 665 666 out: 667 memzero_explicit(&keys, sizeof(keys)); 668 return err; 669 670 badkey: 671 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 672 goto out; 673 } 674 675 static int gcm_setkey(struct crypto_aead *aead, 676 const u8 *key, unsigned int keylen) 677 { 678 struct caam_ctx *ctx = crypto_aead_ctx(aead); 679 struct device *jrdev = ctx->jrdev; 680 681 #ifdef DEBUG 682 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 683 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 684 #endif 685 686 memcpy(ctx->key, key, keylen); 687 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 688 ctx->cdata.keylen = keylen; 689 690 return gcm_set_sh_desc(aead); 691 } 692 693 static int rfc4106_setkey(struct crypto_aead *aead, 694 const u8 *key, unsigned int keylen) 695 { 696 struct caam_ctx *ctx = crypto_aead_ctx(aead); 697 struct device *jrdev = ctx->jrdev; 698 699 if (keylen < 4) 700 return -EINVAL; 701 702 #ifdef DEBUG 703 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 704 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 705 #endif 706 707 memcpy(ctx->key, key, keylen); 708 709 /* 710 * The last four bytes of the key material are used as the salt value 711 * in the nonce. Update the AES key length. 712 */ 713 ctx->cdata.keylen = keylen - 4; 714 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 715 ctx->dir); 716 return rfc4106_set_sh_desc(aead); 717 } 718 719 static int rfc4543_setkey(struct crypto_aead *aead, 720 const u8 *key, unsigned int keylen) 721 { 722 struct caam_ctx *ctx = crypto_aead_ctx(aead); 723 struct device *jrdev = ctx->jrdev; 724 725 if (keylen < 4) 726 return -EINVAL; 727 728 #ifdef DEBUG 729 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 730 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 731 #endif 732 733 memcpy(ctx->key, key, keylen); 734 735 /* 736 * The last four bytes of the key material are used as the salt value 737 * in the nonce. Update the AES key length. 738 */ 739 ctx->cdata.keylen = keylen - 4; 740 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 741 ctx->dir); 742 return rfc4543_set_sh_desc(aead); 743 } 744 745 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 746 unsigned int keylen) 747 { 748 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 749 struct caam_skcipher_alg *alg = 750 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 751 skcipher); 752 struct device *jrdev = ctx->jrdev; 753 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 754 u32 *desc; 755 u32 ctx1_iv_off = 0; 756 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 757 OP_ALG_AAI_CTR_MOD128); 758 const bool is_rfc3686 = alg->caam.rfc3686; 759 760 #ifdef DEBUG 761 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 762 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 763 #endif 764 /* 765 * AES-CTR needs to load IV in CONTEXT1 reg 766 * at an offset of 128bits (16bytes) 767 * CONTEXT1[255:128] = IV 768 */ 769 if (ctr_mode) 770 ctx1_iv_off = 16; 771 772 /* 773 * RFC3686 specific: 774 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 775 * | *key = {KEY, NONCE} 776 */ 777 if (is_rfc3686) { 778 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 779 keylen -= CTR_RFC3686_NONCE_SIZE; 780 } 781 782 ctx->cdata.keylen = keylen; 783 ctx->cdata.key_virt = key; 784 ctx->cdata.key_inline = true; 785 786 /* skcipher_encrypt shared descriptor */ 787 desc = ctx->sh_desc_enc; 788 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 789 ctx1_iv_off); 790 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 791 desc_bytes(desc), ctx->dir); 792 793 /* skcipher_decrypt shared descriptor */ 794 desc = ctx->sh_desc_dec; 795 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 796 ctx1_iv_off); 797 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 798 desc_bytes(desc), ctx->dir); 799 800 return 0; 801 } 802 803 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 804 const u8 *key, unsigned int keylen) 805 { 806 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 807 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 808 809 if (keylen == DES3_EDE_KEY_SIZE && 810 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) { 811 return -EINVAL; 812 } 813 814 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) & 815 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 816 crypto_skcipher_set_flags(skcipher, 817 CRYPTO_TFM_RES_WEAK_KEY); 818 return -EINVAL; 819 } 820 821 return skcipher_setkey(skcipher, key, keylen); 822 } 823 824 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 825 unsigned int keylen) 826 { 827 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 828 struct device *jrdev = ctx->jrdev; 829 u32 *desc; 830 831 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 832 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 833 dev_err(jrdev, "key size mismatch\n"); 834 return -EINVAL; 835 } 836 837 ctx->cdata.keylen = keylen; 838 ctx->cdata.key_virt = key; 839 ctx->cdata.key_inline = true; 840 841 /* xts_skcipher_encrypt shared descriptor */ 842 desc = ctx->sh_desc_enc; 843 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 844 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 845 desc_bytes(desc), ctx->dir); 846 847 /* xts_skcipher_decrypt shared descriptor */ 848 desc = ctx->sh_desc_dec; 849 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 850 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 851 desc_bytes(desc), ctx->dir); 852 853 return 0; 854 } 855 856 /* 857 * aead_edesc - s/w-extended aead descriptor 858 * @src_nents: number of segments in input s/w scatterlist 859 * @dst_nents: number of segments in output s/w scatterlist 860 * @mapped_src_nents: number of segments in input h/w link table 861 * @mapped_dst_nents: number of segments in output h/w link table 862 * @sec4_sg_bytes: length of dma mapped sec4_sg space 863 * @sec4_sg_dma: bus physical mapped address of h/w link table 864 * @sec4_sg: pointer to h/w link table 865 * @hw_desc: the h/w job descriptor followed by any referenced link tables 866 */ 867 struct aead_edesc { 868 int src_nents; 869 int dst_nents; 870 int mapped_src_nents; 871 int mapped_dst_nents; 872 int sec4_sg_bytes; 873 dma_addr_t sec4_sg_dma; 874 struct sec4_sg_entry *sec4_sg; 875 u32 hw_desc[]; 876 }; 877 878 /* 879 * skcipher_edesc - s/w-extended skcipher descriptor 880 * @src_nents: number of segments in input s/w scatterlist 881 * @dst_nents: number of segments in output s/w scatterlist 882 * @mapped_src_nents: number of segments in input h/w link table 883 * @mapped_dst_nents: number of segments in output h/w link table 884 * @iv_dma: dma address of iv for checking continuity and link table 885 * @sec4_sg_bytes: length of dma mapped sec4_sg space 886 * @sec4_sg_dma: bus physical mapped address of h/w link table 887 * @sec4_sg: pointer to h/w link table 888 * @hw_desc: the h/w job descriptor followed by any referenced link tables 889 * and IV 890 */ 891 struct skcipher_edesc { 892 int src_nents; 893 int dst_nents; 894 int mapped_src_nents; 895 int mapped_dst_nents; 896 dma_addr_t iv_dma; 897 int sec4_sg_bytes; 898 dma_addr_t sec4_sg_dma; 899 struct sec4_sg_entry *sec4_sg; 900 u32 hw_desc[0]; 901 }; 902 903 static void caam_unmap(struct device *dev, struct scatterlist *src, 904 struct scatterlist *dst, int src_nents, 905 int dst_nents, 906 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 907 int sec4_sg_bytes) 908 { 909 if (dst != src) { 910 if (src_nents) 911 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 912 if (dst_nents) 913 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 914 } else { 915 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 916 } 917 918 if (iv_dma) 919 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 920 if (sec4_sg_bytes) 921 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 922 DMA_TO_DEVICE); 923 } 924 925 static void aead_unmap(struct device *dev, 926 struct aead_edesc *edesc, 927 struct aead_request *req) 928 { 929 caam_unmap(dev, req->src, req->dst, 930 edesc->src_nents, edesc->dst_nents, 0, 0, 931 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 932 } 933 934 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 935 struct skcipher_request *req) 936 { 937 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 938 int ivsize = crypto_skcipher_ivsize(skcipher); 939 940 caam_unmap(dev, req->src, req->dst, 941 edesc->src_nents, edesc->dst_nents, 942 edesc->iv_dma, ivsize, 943 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 944 } 945 946 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 947 void *context) 948 { 949 struct aead_request *req = context; 950 struct aead_edesc *edesc; 951 952 #ifdef DEBUG 953 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 954 #endif 955 956 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 957 958 if (err) 959 caam_jr_strstatus(jrdev, err); 960 961 aead_unmap(jrdev, edesc, req); 962 963 kfree(edesc); 964 965 aead_request_complete(req, err); 966 } 967 968 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 969 void *context) 970 { 971 struct aead_request *req = context; 972 struct aead_edesc *edesc; 973 974 #ifdef DEBUG 975 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 976 #endif 977 978 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 979 980 if (err) 981 caam_jr_strstatus(jrdev, err); 982 983 aead_unmap(jrdev, edesc, req); 984 985 /* 986 * verify hw auth check passed else return -EBADMSG 987 */ 988 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 989 err = -EBADMSG; 990 991 kfree(edesc); 992 993 aead_request_complete(req, err); 994 } 995 996 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 997 void *context) 998 { 999 struct skcipher_request *req = context; 1000 struct skcipher_edesc *edesc; 1001 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1002 int ivsize = crypto_skcipher_ivsize(skcipher); 1003 1004 #ifdef DEBUG 1005 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1006 #endif 1007 1008 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1009 1010 if (err) 1011 caam_jr_strstatus(jrdev, err); 1012 1013 skcipher_unmap(jrdev, edesc, req); 1014 1015 /* 1016 * The crypto API expects us to set the IV (req->iv) to the last 1017 * ciphertext block. This is used e.g. by the CTS mode. 1018 */ 1019 if (ivsize) 1020 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - 1021 ivsize, ivsize, 0); 1022 1023 #ifdef DEBUG 1024 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 1025 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1026 edesc->src_nents > 1 ? 100 : ivsize, 1); 1027 #endif 1028 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 1029 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1030 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1031 1032 kfree(edesc); 1033 1034 skcipher_request_complete(req, err); 1035 } 1036 1037 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 1038 void *context) 1039 { 1040 struct skcipher_request *req = context; 1041 struct skcipher_edesc *edesc; 1042 #ifdef DEBUG 1043 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1044 int ivsize = crypto_skcipher_ivsize(skcipher); 1045 1046 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1047 #endif 1048 1049 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1050 if (err) 1051 caam_jr_strstatus(jrdev, err); 1052 1053 skcipher_unmap(jrdev, edesc, req); 1054 1055 #ifdef DEBUG 1056 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 1057 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1058 #endif 1059 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 1060 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1061 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1062 1063 kfree(edesc); 1064 1065 skcipher_request_complete(req, err); 1066 } 1067 1068 /* 1069 * Fill in aead job descriptor 1070 */ 1071 static void init_aead_job(struct aead_request *req, 1072 struct aead_edesc *edesc, 1073 bool all_contig, bool encrypt) 1074 { 1075 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1076 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1077 int authsize = ctx->authsize; 1078 u32 *desc = edesc->hw_desc; 1079 u32 out_options, in_options; 1080 dma_addr_t dst_dma, src_dma; 1081 int len, sec4_sg_index = 0; 1082 dma_addr_t ptr; 1083 u32 *sh_desc; 1084 1085 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1086 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1087 1088 len = desc_len(sh_desc); 1089 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1090 1091 if (all_contig) { 1092 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1093 0; 1094 in_options = 0; 1095 } else { 1096 src_dma = edesc->sec4_sg_dma; 1097 sec4_sg_index += edesc->mapped_src_nents; 1098 in_options = LDST_SGF; 1099 } 1100 1101 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1102 in_options); 1103 1104 dst_dma = src_dma; 1105 out_options = in_options; 1106 1107 if (unlikely(req->src != req->dst)) { 1108 if (!edesc->mapped_dst_nents) { 1109 dst_dma = 0; 1110 out_options = 0; 1111 } else if (edesc->mapped_dst_nents == 1) { 1112 dst_dma = sg_dma_address(req->dst); 1113 out_options = 0; 1114 } else { 1115 dst_dma = edesc->sec4_sg_dma + 1116 sec4_sg_index * 1117 sizeof(struct sec4_sg_entry); 1118 out_options = LDST_SGF; 1119 } 1120 } 1121 1122 if (encrypt) 1123 append_seq_out_ptr(desc, dst_dma, 1124 req->assoclen + req->cryptlen + authsize, 1125 out_options); 1126 else 1127 append_seq_out_ptr(desc, dst_dma, 1128 req->assoclen + req->cryptlen - authsize, 1129 out_options); 1130 } 1131 1132 static void init_gcm_job(struct aead_request *req, 1133 struct aead_edesc *edesc, 1134 bool all_contig, bool encrypt) 1135 { 1136 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1137 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1138 unsigned int ivsize = crypto_aead_ivsize(aead); 1139 u32 *desc = edesc->hw_desc; 1140 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1141 unsigned int last; 1142 1143 init_aead_job(req, edesc, all_contig, encrypt); 1144 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1145 1146 /* BUG This should not be specific to generic GCM. */ 1147 last = 0; 1148 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1149 last = FIFOLD_TYPE_LAST1; 1150 1151 /* Read GCM IV */ 1152 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1153 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1154 /* Append Salt */ 1155 if (!generic_gcm) 1156 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1157 /* Append IV */ 1158 append_data(desc, req->iv, ivsize); 1159 /* End of blank commands */ 1160 } 1161 1162 static void init_chachapoly_job(struct aead_request *req, 1163 struct aead_edesc *edesc, bool all_contig, 1164 bool encrypt) 1165 { 1166 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1167 unsigned int ivsize = crypto_aead_ivsize(aead); 1168 unsigned int assoclen = req->assoclen; 1169 u32 *desc = edesc->hw_desc; 1170 u32 ctx_iv_off = 4; 1171 1172 init_aead_job(req, edesc, all_contig, encrypt); 1173 1174 if (ivsize != CHACHAPOLY_IV_SIZE) { 1175 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1176 ctx_iv_off += 4; 1177 1178 /* 1179 * The associated data comes already with the IV but we need 1180 * to skip it when we authenticate or encrypt... 1181 */ 1182 assoclen -= ivsize; 1183 } 1184 1185 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1186 1187 /* 1188 * For IPsec load the IV further in the same register. 1189 * For RFC7539 simply load the 12 bytes nonce in a single operation 1190 */ 1191 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1192 LDST_SRCDST_BYTE_CONTEXT | 1193 ctx_iv_off << LDST_OFFSET_SHIFT); 1194 } 1195 1196 static void init_authenc_job(struct aead_request *req, 1197 struct aead_edesc *edesc, 1198 bool all_contig, bool encrypt) 1199 { 1200 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1201 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1202 struct caam_aead_alg, aead); 1203 unsigned int ivsize = crypto_aead_ivsize(aead); 1204 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1205 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1206 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1207 OP_ALG_AAI_CTR_MOD128); 1208 const bool is_rfc3686 = alg->caam.rfc3686; 1209 u32 *desc = edesc->hw_desc; 1210 u32 ivoffset = 0; 1211 1212 /* 1213 * AES-CTR needs to load IV in CONTEXT1 reg 1214 * at an offset of 128bits (16bytes) 1215 * CONTEXT1[255:128] = IV 1216 */ 1217 if (ctr_mode) 1218 ivoffset = 16; 1219 1220 /* 1221 * RFC3686 specific: 1222 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1223 */ 1224 if (is_rfc3686) 1225 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1226 1227 init_aead_job(req, edesc, all_contig, encrypt); 1228 1229 /* 1230 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1231 * having DPOVRD as destination. 1232 */ 1233 if (ctrlpriv->era < 3) 1234 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1235 else 1236 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1237 1238 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1239 append_load_as_imm(desc, req->iv, ivsize, 1240 LDST_CLASS_1_CCB | 1241 LDST_SRCDST_BYTE_CONTEXT | 1242 (ivoffset << LDST_OFFSET_SHIFT)); 1243 } 1244 1245 /* 1246 * Fill in skcipher job descriptor 1247 */ 1248 static void init_skcipher_job(struct skcipher_request *req, 1249 struct skcipher_edesc *edesc, 1250 const bool encrypt) 1251 { 1252 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1253 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1254 int ivsize = crypto_skcipher_ivsize(skcipher); 1255 u32 *desc = edesc->hw_desc; 1256 u32 *sh_desc; 1257 u32 in_options = 0, out_options = 0; 1258 dma_addr_t src_dma, dst_dma, ptr; 1259 int len, sec4_sg_index = 0; 1260 1261 #ifdef DEBUG 1262 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1263 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1264 pr_err("asked=%d, cryptlen%d\n", 1265 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1266 #endif 1267 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1268 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1269 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1270 1271 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1272 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1273 1274 len = desc_len(sh_desc); 1275 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1276 1277 if (ivsize || edesc->mapped_src_nents > 1) { 1278 src_dma = edesc->sec4_sg_dma; 1279 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1280 in_options = LDST_SGF; 1281 } else { 1282 src_dma = sg_dma_address(req->src); 1283 } 1284 1285 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1286 1287 if (likely(req->src == req->dst)) { 1288 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1289 out_options = in_options; 1290 } else if (edesc->mapped_dst_nents == 1) { 1291 dst_dma = sg_dma_address(req->dst); 1292 } else { 1293 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1294 sizeof(struct sec4_sg_entry); 1295 out_options = LDST_SGF; 1296 } 1297 1298 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options); 1299 } 1300 1301 /* 1302 * allocate and map the aead extended descriptor 1303 */ 1304 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1305 int desc_bytes, bool *all_contig_ptr, 1306 bool encrypt) 1307 { 1308 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1309 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1310 struct device *jrdev = ctx->jrdev; 1311 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1312 GFP_KERNEL : GFP_ATOMIC; 1313 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1314 struct aead_edesc *edesc; 1315 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1316 unsigned int authsize = ctx->authsize; 1317 1318 if (unlikely(req->dst != req->src)) { 1319 src_nents = sg_nents_for_len(req->src, req->assoclen + 1320 req->cryptlen); 1321 if (unlikely(src_nents < 0)) { 1322 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1323 req->assoclen + req->cryptlen); 1324 return ERR_PTR(src_nents); 1325 } 1326 1327 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1328 req->cryptlen + 1329 (encrypt ? authsize : 1330 (-authsize))); 1331 if (unlikely(dst_nents < 0)) { 1332 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1333 req->assoclen + req->cryptlen + 1334 (encrypt ? authsize : (-authsize))); 1335 return ERR_PTR(dst_nents); 1336 } 1337 } else { 1338 src_nents = sg_nents_for_len(req->src, req->assoclen + 1339 req->cryptlen + 1340 (encrypt ? authsize : 0)); 1341 if (unlikely(src_nents < 0)) { 1342 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1343 req->assoclen + req->cryptlen + 1344 (encrypt ? authsize : 0)); 1345 return ERR_PTR(src_nents); 1346 } 1347 } 1348 1349 if (likely(req->src == req->dst)) { 1350 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1351 DMA_BIDIRECTIONAL); 1352 if (unlikely(!mapped_src_nents)) { 1353 dev_err(jrdev, "unable to map source\n"); 1354 return ERR_PTR(-ENOMEM); 1355 } 1356 } else { 1357 /* Cover also the case of null (zero length) input data */ 1358 if (src_nents) { 1359 mapped_src_nents = dma_map_sg(jrdev, req->src, 1360 src_nents, DMA_TO_DEVICE); 1361 if (unlikely(!mapped_src_nents)) { 1362 dev_err(jrdev, "unable to map source\n"); 1363 return ERR_PTR(-ENOMEM); 1364 } 1365 } else { 1366 mapped_src_nents = 0; 1367 } 1368 1369 /* Cover also the case of null (zero length) output data */ 1370 if (dst_nents) { 1371 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1372 dst_nents, 1373 DMA_FROM_DEVICE); 1374 if (unlikely(!mapped_dst_nents)) { 1375 dev_err(jrdev, "unable to map destination\n"); 1376 dma_unmap_sg(jrdev, req->src, src_nents, 1377 DMA_TO_DEVICE); 1378 return ERR_PTR(-ENOMEM); 1379 } 1380 } else { 1381 mapped_dst_nents = 0; 1382 } 1383 } 1384 1385 /* 1386 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1387 * the end of the table by allocating more S/G entries. 1388 */ 1389 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1390 if (mapped_dst_nents > 1) 1391 sec4_sg_len += pad_sg_nents(mapped_dst_nents); 1392 else 1393 sec4_sg_len = pad_sg_nents(sec4_sg_len); 1394 1395 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1396 1397 /* allocate space for base edesc and hw desc commands, link tables */ 1398 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1399 GFP_DMA | flags); 1400 if (!edesc) { 1401 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1402 0, 0, 0); 1403 return ERR_PTR(-ENOMEM); 1404 } 1405 1406 edesc->src_nents = src_nents; 1407 edesc->dst_nents = dst_nents; 1408 edesc->mapped_src_nents = mapped_src_nents; 1409 edesc->mapped_dst_nents = mapped_dst_nents; 1410 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1411 desc_bytes; 1412 *all_contig_ptr = !(mapped_src_nents > 1); 1413 1414 sec4_sg_index = 0; 1415 if (mapped_src_nents > 1) { 1416 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1417 edesc->sec4_sg + sec4_sg_index, 0); 1418 sec4_sg_index += mapped_src_nents; 1419 } 1420 if (mapped_dst_nents > 1) { 1421 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1422 edesc->sec4_sg + sec4_sg_index, 0); 1423 } 1424 1425 if (!sec4_sg_bytes) 1426 return edesc; 1427 1428 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1429 sec4_sg_bytes, DMA_TO_DEVICE); 1430 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1431 dev_err(jrdev, "unable to map S/G table\n"); 1432 aead_unmap(jrdev, edesc, req); 1433 kfree(edesc); 1434 return ERR_PTR(-ENOMEM); 1435 } 1436 1437 edesc->sec4_sg_bytes = sec4_sg_bytes; 1438 1439 return edesc; 1440 } 1441 1442 static int gcm_encrypt(struct aead_request *req) 1443 { 1444 struct aead_edesc *edesc; 1445 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1446 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1447 struct device *jrdev = ctx->jrdev; 1448 bool all_contig; 1449 u32 *desc; 1450 int ret = 0; 1451 1452 /* allocate extended descriptor */ 1453 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1454 if (IS_ERR(edesc)) 1455 return PTR_ERR(edesc); 1456 1457 /* Create and submit job descriptor */ 1458 init_gcm_job(req, edesc, all_contig, true); 1459 #ifdef DEBUG 1460 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1461 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1462 desc_bytes(edesc->hw_desc), 1); 1463 #endif 1464 1465 desc = edesc->hw_desc; 1466 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1467 if (!ret) { 1468 ret = -EINPROGRESS; 1469 } else { 1470 aead_unmap(jrdev, edesc, req); 1471 kfree(edesc); 1472 } 1473 1474 return ret; 1475 } 1476 1477 static int chachapoly_encrypt(struct aead_request *req) 1478 { 1479 struct aead_edesc *edesc; 1480 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1481 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1482 struct device *jrdev = ctx->jrdev; 1483 bool all_contig; 1484 u32 *desc; 1485 int ret; 1486 1487 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1488 true); 1489 if (IS_ERR(edesc)) 1490 return PTR_ERR(edesc); 1491 1492 desc = edesc->hw_desc; 1493 1494 init_chachapoly_job(req, edesc, all_contig, true); 1495 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1496 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1497 1); 1498 1499 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1500 if (!ret) { 1501 ret = -EINPROGRESS; 1502 } else { 1503 aead_unmap(jrdev, edesc, req); 1504 kfree(edesc); 1505 } 1506 1507 return ret; 1508 } 1509 1510 static int chachapoly_decrypt(struct aead_request *req) 1511 { 1512 struct aead_edesc *edesc; 1513 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1514 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1515 struct device *jrdev = ctx->jrdev; 1516 bool all_contig; 1517 u32 *desc; 1518 int ret; 1519 1520 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1521 false); 1522 if (IS_ERR(edesc)) 1523 return PTR_ERR(edesc); 1524 1525 desc = edesc->hw_desc; 1526 1527 init_chachapoly_job(req, edesc, all_contig, false); 1528 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1529 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1530 1); 1531 1532 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1533 if (!ret) { 1534 ret = -EINPROGRESS; 1535 } else { 1536 aead_unmap(jrdev, edesc, req); 1537 kfree(edesc); 1538 } 1539 1540 return ret; 1541 } 1542 1543 static int ipsec_gcm_encrypt(struct aead_request *req) 1544 { 1545 if (req->assoclen < 8) 1546 return -EINVAL; 1547 1548 return gcm_encrypt(req); 1549 } 1550 1551 static int aead_encrypt(struct aead_request *req) 1552 { 1553 struct aead_edesc *edesc; 1554 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1555 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1556 struct device *jrdev = ctx->jrdev; 1557 bool all_contig; 1558 u32 *desc; 1559 int ret = 0; 1560 1561 /* allocate extended descriptor */ 1562 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1563 &all_contig, true); 1564 if (IS_ERR(edesc)) 1565 return PTR_ERR(edesc); 1566 1567 /* Create and submit job descriptor */ 1568 init_authenc_job(req, edesc, all_contig, true); 1569 #ifdef DEBUG 1570 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1571 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1572 desc_bytes(edesc->hw_desc), 1); 1573 #endif 1574 1575 desc = edesc->hw_desc; 1576 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1577 if (!ret) { 1578 ret = -EINPROGRESS; 1579 } else { 1580 aead_unmap(jrdev, edesc, req); 1581 kfree(edesc); 1582 } 1583 1584 return ret; 1585 } 1586 1587 static int gcm_decrypt(struct aead_request *req) 1588 { 1589 struct aead_edesc *edesc; 1590 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1591 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1592 struct device *jrdev = ctx->jrdev; 1593 bool all_contig; 1594 u32 *desc; 1595 int ret = 0; 1596 1597 /* allocate extended descriptor */ 1598 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1599 if (IS_ERR(edesc)) 1600 return PTR_ERR(edesc); 1601 1602 /* Create and submit job descriptor*/ 1603 init_gcm_job(req, edesc, all_contig, false); 1604 #ifdef DEBUG 1605 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1606 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1607 desc_bytes(edesc->hw_desc), 1); 1608 #endif 1609 1610 desc = edesc->hw_desc; 1611 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1612 if (!ret) { 1613 ret = -EINPROGRESS; 1614 } else { 1615 aead_unmap(jrdev, edesc, req); 1616 kfree(edesc); 1617 } 1618 1619 return ret; 1620 } 1621 1622 static int ipsec_gcm_decrypt(struct aead_request *req) 1623 { 1624 if (req->assoclen < 8) 1625 return -EINVAL; 1626 1627 return gcm_decrypt(req); 1628 } 1629 1630 static int aead_decrypt(struct aead_request *req) 1631 { 1632 struct aead_edesc *edesc; 1633 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1634 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1635 struct device *jrdev = ctx->jrdev; 1636 bool all_contig; 1637 u32 *desc; 1638 int ret = 0; 1639 1640 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1641 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1642 req->assoclen + req->cryptlen, 1); 1643 1644 /* allocate extended descriptor */ 1645 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1646 &all_contig, false); 1647 if (IS_ERR(edesc)) 1648 return PTR_ERR(edesc); 1649 1650 /* Create and submit job descriptor*/ 1651 init_authenc_job(req, edesc, all_contig, false); 1652 #ifdef DEBUG 1653 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1654 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1655 desc_bytes(edesc->hw_desc), 1); 1656 #endif 1657 1658 desc = edesc->hw_desc; 1659 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1660 if (!ret) { 1661 ret = -EINPROGRESS; 1662 } else { 1663 aead_unmap(jrdev, edesc, req); 1664 kfree(edesc); 1665 } 1666 1667 return ret; 1668 } 1669 1670 /* 1671 * allocate and map the skcipher extended descriptor for skcipher 1672 */ 1673 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1674 int desc_bytes) 1675 { 1676 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1677 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1678 struct device *jrdev = ctx->jrdev; 1679 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1680 GFP_KERNEL : GFP_ATOMIC; 1681 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1682 struct skcipher_edesc *edesc; 1683 dma_addr_t iv_dma = 0; 1684 u8 *iv; 1685 int ivsize = crypto_skcipher_ivsize(skcipher); 1686 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1687 1688 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1689 if (unlikely(src_nents < 0)) { 1690 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1691 req->cryptlen); 1692 return ERR_PTR(src_nents); 1693 } 1694 1695 if (req->dst != req->src) { 1696 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1697 if (unlikely(dst_nents < 0)) { 1698 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1699 req->cryptlen); 1700 return ERR_PTR(dst_nents); 1701 } 1702 } 1703 1704 if (likely(req->src == req->dst)) { 1705 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1706 DMA_BIDIRECTIONAL); 1707 if (unlikely(!mapped_src_nents)) { 1708 dev_err(jrdev, "unable to map source\n"); 1709 return ERR_PTR(-ENOMEM); 1710 } 1711 } else { 1712 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1713 DMA_TO_DEVICE); 1714 if (unlikely(!mapped_src_nents)) { 1715 dev_err(jrdev, "unable to map source\n"); 1716 return ERR_PTR(-ENOMEM); 1717 } 1718 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1719 DMA_FROM_DEVICE); 1720 if (unlikely(!mapped_dst_nents)) { 1721 dev_err(jrdev, "unable to map destination\n"); 1722 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1723 return ERR_PTR(-ENOMEM); 1724 } 1725 } 1726 1727 if (!ivsize && mapped_src_nents == 1) 1728 sec4_sg_ents = 0; // no need for an input hw s/g table 1729 else 1730 sec4_sg_ents = mapped_src_nents + !!ivsize; 1731 dst_sg_idx = sec4_sg_ents; 1732 1733 /* 1734 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond 1735 * the end of the table by allocating more S/G entries. Logic: 1736 * if (src != dst && output S/G) 1737 * pad output S/G, if needed 1738 * else if (src == dst && S/G) 1739 * overlapping S/Gs; pad one of them 1740 * else if (input S/G) ... 1741 * pad input S/G, if needed 1742 */ 1743 if (mapped_dst_nents > 1) 1744 sec4_sg_ents += pad_sg_nents(mapped_dst_nents); 1745 else if ((req->src == req->dst) && (mapped_src_nents > 1)) 1746 sec4_sg_ents = max(pad_sg_nents(sec4_sg_ents), 1747 !!ivsize + pad_sg_nents(mapped_src_nents)); 1748 else 1749 sec4_sg_ents = pad_sg_nents(sec4_sg_ents); 1750 1751 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1752 1753 /* 1754 * allocate space for base edesc and hw desc commands, link tables, IV 1755 */ 1756 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1757 GFP_DMA | flags); 1758 if (!edesc) { 1759 dev_err(jrdev, "could not allocate extended descriptor\n"); 1760 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1761 0, 0, 0); 1762 return ERR_PTR(-ENOMEM); 1763 } 1764 1765 edesc->src_nents = src_nents; 1766 edesc->dst_nents = dst_nents; 1767 edesc->mapped_src_nents = mapped_src_nents; 1768 edesc->mapped_dst_nents = mapped_dst_nents; 1769 edesc->sec4_sg_bytes = sec4_sg_bytes; 1770 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1771 desc_bytes); 1772 1773 /* Make sure IV is located in a DMAable area */ 1774 if (ivsize) { 1775 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes; 1776 memcpy(iv, req->iv, ivsize); 1777 1778 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE); 1779 if (dma_mapping_error(jrdev, iv_dma)) { 1780 dev_err(jrdev, "unable to map IV\n"); 1781 caam_unmap(jrdev, req->src, req->dst, src_nents, 1782 dst_nents, 0, 0, 0, 0); 1783 kfree(edesc); 1784 return ERR_PTR(-ENOMEM); 1785 } 1786 1787 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1788 } 1789 if (dst_sg_idx) 1790 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1791 !!ivsize, 0); 1792 1793 if (mapped_dst_nents > 1) { 1794 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1795 edesc->sec4_sg + dst_sg_idx, 0); 1796 } 1797 1798 if (sec4_sg_bytes) { 1799 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1800 sec4_sg_bytes, 1801 DMA_TO_DEVICE); 1802 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1803 dev_err(jrdev, "unable to map S/G table\n"); 1804 caam_unmap(jrdev, req->src, req->dst, src_nents, 1805 dst_nents, iv_dma, ivsize, 0, 0); 1806 kfree(edesc); 1807 return ERR_PTR(-ENOMEM); 1808 } 1809 } 1810 1811 edesc->iv_dma = iv_dma; 1812 1813 #ifdef DEBUG 1814 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ", 1815 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1816 sec4_sg_bytes, 1); 1817 #endif 1818 1819 return edesc; 1820 } 1821 1822 static int skcipher_encrypt(struct skcipher_request *req) 1823 { 1824 struct skcipher_edesc *edesc; 1825 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1826 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1827 struct device *jrdev = ctx->jrdev; 1828 u32 *desc; 1829 int ret = 0; 1830 1831 /* allocate extended descriptor */ 1832 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1833 if (IS_ERR(edesc)) 1834 return PTR_ERR(edesc); 1835 1836 /* Create and submit job descriptor*/ 1837 init_skcipher_job(req, edesc, true); 1838 #ifdef DEBUG 1839 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1840 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1841 desc_bytes(edesc->hw_desc), 1); 1842 #endif 1843 desc = edesc->hw_desc; 1844 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1845 1846 if (!ret) { 1847 ret = -EINPROGRESS; 1848 } else { 1849 skcipher_unmap(jrdev, edesc, req); 1850 kfree(edesc); 1851 } 1852 1853 return ret; 1854 } 1855 1856 static int skcipher_decrypt(struct skcipher_request *req) 1857 { 1858 struct skcipher_edesc *edesc; 1859 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1860 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1861 int ivsize = crypto_skcipher_ivsize(skcipher); 1862 struct device *jrdev = ctx->jrdev; 1863 u32 *desc; 1864 int ret = 0; 1865 1866 /* allocate extended descriptor */ 1867 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1868 if (IS_ERR(edesc)) 1869 return PTR_ERR(edesc); 1870 1871 /* 1872 * The crypto API expects us to set the IV (req->iv) to the last 1873 * ciphertext block. 1874 */ 1875 if (ivsize) 1876 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - 1877 ivsize, ivsize, 0); 1878 1879 /* Create and submit job descriptor*/ 1880 init_skcipher_job(req, edesc, false); 1881 desc = edesc->hw_desc; 1882 #ifdef DEBUG 1883 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1884 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1885 desc_bytes(edesc->hw_desc), 1); 1886 #endif 1887 1888 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1889 if (!ret) { 1890 ret = -EINPROGRESS; 1891 } else { 1892 skcipher_unmap(jrdev, edesc, req); 1893 kfree(edesc); 1894 } 1895 1896 return ret; 1897 } 1898 1899 static struct caam_skcipher_alg driver_algs[] = { 1900 { 1901 .skcipher = { 1902 .base = { 1903 .cra_name = "cbc(aes)", 1904 .cra_driver_name = "cbc-aes-caam", 1905 .cra_blocksize = AES_BLOCK_SIZE, 1906 }, 1907 .setkey = skcipher_setkey, 1908 .encrypt = skcipher_encrypt, 1909 .decrypt = skcipher_decrypt, 1910 .min_keysize = AES_MIN_KEY_SIZE, 1911 .max_keysize = AES_MAX_KEY_SIZE, 1912 .ivsize = AES_BLOCK_SIZE, 1913 }, 1914 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1915 }, 1916 { 1917 .skcipher = { 1918 .base = { 1919 .cra_name = "cbc(des3_ede)", 1920 .cra_driver_name = "cbc-3des-caam", 1921 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1922 }, 1923 .setkey = des_skcipher_setkey, 1924 .encrypt = skcipher_encrypt, 1925 .decrypt = skcipher_decrypt, 1926 .min_keysize = DES3_EDE_KEY_SIZE, 1927 .max_keysize = DES3_EDE_KEY_SIZE, 1928 .ivsize = DES3_EDE_BLOCK_SIZE, 1929 }, 1930 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1931 }, 1932 { 1933 .skcipher = { 1934 .base = { 1935 .cra_name = "cbc(des)", 1936 .cra_driver_name = "cbc-des-caam", 1937 .cra_blocksize = DES_BLOCK_SIZE, 1938 }, 1939 .setkey = des_skcipher_setkey, 1940 .encrypt = skcipher_encrypt, 1941 .decrypt = skcipher_decrypt, 1942 .min_keysize = DES_KEY_SIZE, 1943 .max_keysize = DES_KEY_SIZE, 1944 .ivsize = DES_BLOCK_SIZE, 1945 }, 1946 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1947 }, 1948 { 1949 .skcipher = { 1950 .base = { 1951 .cra_name = "ctr(aes)", 1952 .cra_driver_name = "ctr-aes-caam", 1953 .cra_blocksize = 1, 1954 }, 1955 .setkey = skcipher_setkey, 1956 .encrypt = skcipher_encrypt, 1957 .decrypt = skcipher_decrypt, 1958 .min_keysize = AES_MIN_KEY_SIZE, 1959 .max_keysize = AES_MAX_KEY_SIZE, 1960 .ivsize = AES_BLOCK_SIZE, 1961 .chunksize = AES_BLOCK_SIZE, 1962 }, 1963 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1964 OP_ALG_AAI_CTR_MOD128, 1965 }, 1966 { 1967 .skcipher = { 1968 .base = { 1969 .cra_name = "rfc3686(ctr(aes))", 1970 .cra_driver_name = "rfc3686-ctr-aes-caam", 1971 .cra_blocksize = 1, 1972 }, 1973 .setkey = skcipher_setkey, 1974 .encrypt = skcipher_encrypt, 1975 .decrypt = skcipher_decrypt, 1976 .min_keysize = AES_MIN_KEY_SIZE + 1977 CTR_RFC3686_NONCE_SIZE, 1978 .max_keysize = AES_MAX_KEY_SIZE + 1979 CTR_RFC3686_NONCE_SIZE, 1980 .ivsize = CTR_RFC3686_IV_SIZE, 1981 .chunksize = AES_BLOCK_SIZE, 1982 }, 1983 .caam = { 1984 .class1_alg_type = OP_ALG_ALGSEL_AES | 1985 OP_ALG_AAI_CTR_MOD128, 1986 .rfc3686 = true, 1987 }, 1988 }, 1989 { 1990 .skcipher = { 1991 .base = { 1992 .cra_name = "xts(aes)", 1993 .cra_driver_name = "xts-aes-caam", 1994 .cra_blocksize = AES_BLOCK_SIZE, 1995 }, 1996 .setkey = xts_skcipher_setkey, 1997 .encrypt = skcipher_encrypt, 1998 .decrypt = skcipher_decrypt, 1999 .min_keysize = 2 * AES_MIN_KEY_SIZE, 2000 .max_keysize = 2 * AES_MAX_KEY_SIZE, 2001 .ivsize = AES_BLOCK_SIZE, 2002 }, 2003 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 2004 }, 2005 { 2006 .skcipher = { 2007 .base = { 2008 .cra_name = "ecb(des)", 2009 .cra_driver_name = "ecb-des-caam", 2010 .cra_blocksize = DES_BLOCK_SIZE, 2011 }, 2012 .setkey = des_skcipher_setkey, 2013 .encrypt = skcipher_encrypt, 2014 .decrypt = skcipher_decrypt, 2015 .min_keysize = DES_KEY_SIZE, 2016 .max_keysize = DES_KEY_SIZE, 2017 }, 2018 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 2019 }, 2020 { 2021 .skcipher = { 2022 .base = { 2023 .cra_name = "ecb(aes)", 2024 .cra_driver_name = "ecb-aes-caam", 2025 .cra_blocksize = AES_BLOCK_SIZE, 2026 }, 2027 .setkey = skcipher_setkey, 2028 .encrypt = skcipher_encrypt, 2029 .decrypt = skcipher_decrypt, 2030 .min_keysize = AES_MIN_KEY_SIZE, 2031 .max_keysize = AES_MAX_KEY_SIZE, 2032 }, 2033 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2034 }, 2035 { 2036 .skcipher = { 2037 .base = { 2038 .cra_name = "ecb(des3_ede)", 2039 .cra_driver_name = "ecb-des3-caam", 2040 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2041 }, 2042 .setkey = des_skcipher_setkey, 2043 .encrypt = skcipher_encrypt, 2044 .decrypt = skcipher_decrypt, 2045 .min_keysize = DES3_EDE_KEY_SIZE, 2046 .max_keysize = DES3_EDE_KEY_SIZE, 2047 }, 2048 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2049 }, 2050 { 2051 .skcipher = { 2052 .base = { 2053 .cra_name = "ecb(arc4)", 2054 .cra_driver_name = "ecb-arc4-caam", 2055 .cra_blocksize = ARC4_BLOCK_SIZE, 2056 }, 2057 .setkey = skcipher_setkey, 2058 .encrypt = skcipher_encrypt, 2059 .decrypt = skcipher_decrypt, 2060 .min_keysize = ARC4_MIN_KEY_SIZE, 2061 .max_keysize = ARC4_MAX_KEY_SIZE, 2062 }, 2063 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB, 2064 }, 2065 }; 2066 2067 static struct caam_aead_alg driver_aeads[] = { 2068 { 2069 .aead = { 2070 .base = { 2071 .cra_name = "rfc4106(gcm(aes))", 2072 .cra_driver_name = "rfc4106-gcm-aes-caam", 2073 .cra_blocksize = 1, 2074 }, 2075 .setkey = rfc4106_setkey, 2076 .setauthsize = rfc4106_setauthsize, 2077 .encrypt = ipsec_gcm_encrypt, 2078 .decrypt = ipsec_gcm_decrypt, 2079 .ivsize = GCM_RFC4106_IV_SIZE, 2080 .maxauthsize = AES_BLOCK_SIZE, 2081 }, 2082 .caam = { 2083 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2084 .nodkp = true, 2085 }, 2086 }, 2087 { 2088 .aead = { 2089 .base = { 2090 .cra_name = "rfc4543(gcm(aes))", 2091 .cra_driver_name = "rfc4543-gcm-aes-caam", 2092 .cra_blocksize = 1, 2093 }, 2094 .setkey = rfc4543_setkey, 2095 .setauthsize = rfc4543_setauthsize, 2096 .encrypt = ipsec_gcm_encrypt, 2097 .decrypt = ipsec_gcm_decrypt, 2098 .ivsize = GCM_RFC4543_IV_SIZE, 2099 .maxauthsize = AES_BLOCK_SIZE, 2100 }, 2101 .caam = { 2102 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2103 .nodkp = true, 2104 }, 2105 }, 2106 /* Galois Counter Mode */ 2107 { 2108 .aead = { 2109 .base = { 2110 .cra_name = "gcm(aes)", 2111 .cra_driver_name = "gcm-aes-caam", 2112 .cra_blocksize = 1, 2113 }, 2114 .setkey = gcm_setkey, 2115 .setauthsize = gcm_setauthsize, 2116 .encrypt = gcm_encrypt, 2117 .decrypt = gcm_decrypt, 2118 .ivsize = GCM_AES_IV_SIZE, 2119 .maxauthsize = AES_BLOCK_SIZE, 2120 }, 2121 .caam = { 2122 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2123 .nodkp = true, 2124 }, 2125 }, 2126 /* single-pass ipsec_esp descriptor */ 2127 { 2128 .aead = { 2129 .base = { 2130 .cra_name = "authenc(hmac(md5)," 2131 "ecb(cipher_null))", 2132 .cra_driver_name = "authenc-hmac-md5-" 2133 "ecb-cipher_null-caam", 2134 .cra_blocksize = NULL_BLOCK_SIZE, 2135 }, 2136 .setkey = aead_setkey, 2137 .setauthsize = aead_setauthsize, 2138 .encrypt = aead_encrypt, 2139 .decrypt = aead_decrypt, 2140 .ivsize = NULL_IV_SIZE, 2141 .maxauthsize = MD5_DIGEST_SIZE, 2142 }, 2143 .caam = { 2144 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2145 OP_ALG_AAI_HMAC_PRECOMP, 2146 }, 2147 }, 2148 { 2149 .aead = { 2150 .base = { 2151 .cra_name = "authenc(hmac(sha1)," 2152 "ecb(cipher_null))", 2153 .cra_driver_name = "authenc-hmac-sha1-" 2154 "ecb-cipher_null-caam", 2155 .cra_blocksize = NULL_BLOCK_SIZE, 2156 }, 2157 .setkey = aead_setkey, 2158 .setauthsize = aead_setauthsize, 2159 .encrypt = aead_encrypt, 2160 .decrypt = aead_decrypt, 2161 .ivsize = NULL_IV_SIZE, 2162 .maxauthsize = SHA1_DIGEST_SIZE, 2163 }, 2164 .caam = { 2165 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2166 OP_ALG_AAI_HMAC_PRECOMP, 2167 }, 2168 }, 2169 { 2170 .aead = { 2171 .base = { 2172 .cra_name = "authenc(hmac(sha224)," 2173 "ecb(cipher_null))", 2174 .cra_driver_name = "authenc-hmac-sha224-" 2175 "ecb-cipher_null-caam", 2176 .cra_blocksize = NULL_BLOCK_SIZE, 2177 }, 2178 .setkey = aead_setkey, 2179 .setauthsize = aead_setauthsize, 2180 .encrypt = aead_encrypt, 2181 .decrypt = aead_decrypt, 2182 .ivsize = NULL_IV_SIZE, 2183 .maxauthsize = SHA224_DIGEST_SIZE, 2184 }, 2185 .caam = { 2186 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2187 OP_ALG_AAI_HMAC_PRECOMP, 2188 }, 2189 }, 2190 { 2191 .aead = { 2192 .base = { 2193 .cra_name = "authenc(hmac(sha256)," 2194 "ecb(cipher_null))", 2195 .cra_driver_name = "authenc-hmac-sha256-" 2196 "ecb-cipher_null-caam", 2197 .cra_blocksize = NULL_BLOCK_SIZE, 2198 }, 2199 .setkey = aead_setkey, 2200 .setauthsize = aead_setauthsize, 2201 .encrypt = aead_encrypt, 2202 .decrypt = aead_decrypt, 2203 .ivsize = NULL_IV_SIZE, 2204 .maxauthsize = SHA256_DIGEST_SIZE, 2205 }, 2206 .caam = { 2207 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2208 OP_ALG_AAI_HMAC_PRECOMP, 2209 }, 2210 }, 2211 { 2212 .aead = { 2213 .base = { 2214 .cra_name = "authenc(hmac(sha384)," 2215 "ecb(cipher_null))", 2216 .cra_driver_name = "authenc-hmac-sha384-" 2217 "ecb-cipher_null-caam", 2218 .cra_blocksize = NULL_BLOCK_SIZE, 2219 }, 2220 .setkey = aead_setkey, 2221 .setauthsize = aead_setauthsize, 2222 .encrypt = aead_encrypt, 2223 .decrypt = aead_decrypt, 2224 .ivsize = NULL_IV_SIZE, 2225 .maxauthsize = SHA384_DIGEST_SIZE, 2226 }, 2227 .caam = { 2228 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2229 OP_ALG_AAI_HMAC_PRECOMP, 2230 }, 2231 }, 2232 { 2233 .aead = { 2234 .base = { 2235 .cra_name = "authenc(hmac(sha512)," 2236 "ecb(cipher_null))", 2237 .cra_driver_name = "authenc-hmac-sha512-" 2238 "ecb-cipher_null-caam", 2239 .cra_blocksize = NULL_BLOCK_SIZE, 2240 }, 2241 .setkey = aead_setkey, 2242 .setauthsize = aead_setauthsize, 2243 .encrypt = aead_encrypt, 2244 .decrypt = aead_decrypt, 2245 .ivsize = NULL_IV_SIZE, 2246 .maxauthsize = SHA512_DIGEST_SIZE, 2247 }, 2248 .caam = { 2249 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2250 OP_ALG_AAI_HMAC_PRECOMP, 2251 }, 2252 }, 2253 { 2254 .aead = { 2255 .base = { 2256 .cra_name = "authenc(hmac(md5),cbc(aes))", 2257 .cra_driver_name = "authenc-hmac-md5-" 2258 "cbc-aes-caam", 2259 .cra_blocksize = AES_BLOCK_SIZE, 2260 }, 2261 .setkey = aead_setkey, 2262 .setauthsize = aead_setauthsize, 2263 .encrypt = aead_encrypt, 2264 .decrypt = aead_decrypt, 2265 .ivsize = AES_BLOCK_SIZE, 2266 .maxauthsize = MD5_DIGEST_SIZE, 2267 }, 2268 .caam = { 2269 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2270 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2271 OP_ALG_AAI_HMAC_PRECOMP, 2272 }, 2273 }, 2274 { 2275 .aead = { 2276 .base = { 2277 .cra_name = "echainiv(authenc(hmac(md5)," 2278 "cbc(aes)))", 2279 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2280 "cbc-aes-caam", 2281 .cra_blocksize = AES_BLOCK_SIZE, 2282 }, 2283 .setkey = aead_setkey, 2284 .setauthsize = aead_setauthsize, 2285 .encrypt = aead_encrypt, 2286 .decrypt = aead_decrypt, 2287 .ivsize = AES_BLOCK_SIZE, 2288 .maxauthsize = MD5_DIGEST_SIZE, 2289 }, 2290 .caam = { 2291 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2292 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2293 OP_ALG_AAI_HMAC_PRECOMP, 2294 .geniv = true, 2295 }, 2296 }, 2297 { 2298 .aead = { 2299 .base = { 2300 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2301 .cra_driver_name = "authenc-hmac-sha1-" 2302 "cbc-aes-caam", 2303 .cra_blocksize = AES_BLOCK_SIZE, 2304 }, 2305 .setkey = aead_setkey, 2306 .setauthsize = aead_setauthsize, 2307 .encrypt = aead_encrypt, 2308 .decrypt = aead_decrypt, 2309 .ivsize = AES_BLOCK_SIZE, 2310 .maxauthsize = SHA1_DIGEST_SIZE, 2311 }, 2312 .caam = { 2313 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2314 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2315 OP_ALG_AAI_HMAC_PRECOMP, 2316 }, 2317 }, 2318 { 2319 .aead = { 2320 .base = { 2321 .cra_name = "echainiv(authenc(hmac(sha1)," 2322 "cbc(aes)))", 2323 .cra_driver_name = "echainiv-authenc-" 2324 "hmac-sha1-cbc-aes-caam", 2325 .cra_blocksize = AES_BLOCK_SIZE, 2326 }, 2327 .setkey = aead_setkey, 2328 .setauthsize = aead_setauthsize, 2329 .encrypt = aead_encrypt, 2330 .decrypt = aead_decrypt, 2331 .ivsize = AES_BLOCK_SIZE, 2332 .maxauthsize = SHA1_DIGEST_SIZE, 2333 }, 2334 .caam = { 2335 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2336 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2337 OP_ALG_AAI_HMAC_PRECOMP, 2338 .geniv = true, 2339 }, 2340 }, 2341 { 2342 .aead = { 2343 .base = { 2344 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2345 .cra_driver_name = "authenc-hmac-sha224-" 2346 "cbc-aes-caam", 2347 .cra_blocksize = AES_BLOCK_SIZE, 2348 }, 2349 .setkey = aead_setkey, 2350 .setauthsize = aead_setauthsize, 2351 .encrypt = aead_encrypt, 2352 .decrypt = aead_decrypt, 2353 .ivsize = AES_BLOCK_SIZE, 2354 .maxauthsize = SHA224_DIGEST_SIZE, 2355 }, 2356 .caam = { 2357 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2358 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2359 OP_ALG_AAI_HMAC_PRECOMP, 2360 }, 2361 }, 2362 { 2363 .aead = { 2364 .base = { 2365 .cra_name = "echainiv(authenc(hmac(sha224)," 2366 "cbc(aes)))", 2367 .cra_driver_name = "echainiv-authenc-" 2368 "hmac-sha224-cbc-aes-caam", 2369 .cra_blocksize = AES_BLOCK_SIZE, 2370 }, 2371 .setkey = aead_setkey, 2372 .setauthsize = aead_setauthsize, 2373 .encrypt = aead_encrypt, 2374 .decrypt = aead_decrypt, 2375 .ivsize = AES_BLOCK_SIZE, 2376 .maxauthsize = SHA224_DIGEST_SIZE, 2377 }, 2378 .caam = { 2379 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2380 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2381 OP_ALG_AAI_HMAC_PRECOMP, 2382 .geniv = true, 2383 }, 2384 }, 2385 { 2386 .aead = { 2387 .base = { 2388 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2389 .cra_driver_name = "authenc-hmac-sha256-" 2390 "cbc-aes-caam", 2391 .cra_blocksize = AES_BLOCK_SIZE, 2392 }, 2393 .setkey = aead_setkey, 2394 .setauthsize = aead_setauthsize, 2395 .encrypt = aead_encrypt, 2396 .decrypt = aead_decrypt, 2397 .ivsize = AES_BLOCK_SIZE, 2398 .maxauthsize = SHA256_DIGEST_SIZE, 2399 }, 2400 .caam = { 2401 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2402 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2403 OP_ALG_AAI_HMAC_PRECOMP, 2404 }, 2405 }, 2406 { 2407 .aead = { 2408 .base = { 2409 .cra_name = "echainiv(authenc(hmac(sha256)," 2410 "cbc(aes)))", 2411 .cra_driver_name = "echainiv-authenc-" 2412 "hmac-sha256-cbc-aes-caam", 2413 .cra_blocksize = AES_BLOCK_SIZE, 2414 }, 2415 .setkey = aead_setkey, 2416 .setauthsize = aead_setauthsize, 2417 .encrypt = aead_encrypt, 2418 .decrypt = aead_decrypt, 2419 .ivsize = AES_BLOCK_SIZE, 2420 .maxauthsize = SHA256_DIGEST_SIZE, 2421 }, 2422 .caam = { 2423 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2424 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2425 OP_ALG_AAI_HMAC_PRECOMP, 2426 .geniv = true, 2427 }, 2428 }, 2429 { 2430 .aead = { 2431 .base = { 2432 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2433 .cra_driver_name = "authenc-hmac-sha384-" 2434 "cbc-aes-caam", 2435 .cra_blocksize = AES_BLOCK_SIZE, 2436 }, 2437 .setkey = aead_setkey, 2438 .setauthsize = aead_setauthsize, 2439 .encrypt = aead_encrypt, 2440 .decrypt = aead_decrypt, 2441 .ivsize = AES_BLOCK_SIZE, 2442 .maxauthsize = SHA384_DIGEST_SIZE, 2443 }, 2444 .caam = { 2445 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2446 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2447 OP_ALG_AAI_HMAC_PRECOMP, 2448 }, 2449 }, 2450 { 2451 .aead = { 2452 .base = { 2453 .cra_name = "echainiv(authenc(hmac(sha384)," 2454 "cbc(aes)))", 2455 .cra_driver_name = "echainiv-authenc-" 2456 "hmac-sha384-cbc-aes-caam", 2457 .cra_blocksize = AES_BLOCK_SIZE, 2458 }, 2459 .setkey = aead_setkey, 2460 .setauthsize = aead_setauthsize, 2461 .encrypt = aead_encrypt, 2462 .decrypt = aead_decrypt, 2463 .ivsize = AES_BLOCK_SIZE, 2464 .maxauthsize = SHA384_DIGEST_SIZE, 2465 }, 2466 .caam = { 2467 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2468 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2469 OP_ALG_AAI_HMAC_PRECOMP, 2470 .geniv = true, 2471 }, 2472 }, 2473 { 2474 .aead = { 2475 .base = { 2476 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2477 .cra_driver_name = "authenc-hmac-sha512-" 2478 "cbc-aes-caam", 2479 .cra_blocksize = AES_BLOCK_SIZE, 2480 }, 2481 .setkey = aead_setkey, 2482 .setauthsize = aead_setauthsize, 2483 .encrypt = aead_encrypt, 2484 .decrypt = aead_decrypt, 2485 .ivsize = AES_BLOCK_SIZE, 2486 .maxauthsize = SHA512_DIGEST_SIZE, 2487 }, 2488 .caam = { 2489 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2490 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2491 OP_ALG_AAI_HMAC_PRECOMP, 2492 }, 2493 }, 2494 { 2495 .aead = { 2496 .base = { 2497 .cra_name = "echainiv(authenc(hmac(sha512)," 2498 "cbc(aes)))", 2499 .cra_driver_name = "echainiv-authenc-" 2500 "hmac-sha512-cbc-aes-caam", 2501 .cra_blocksize = AES_BLOCK_SIZE, 2502 }, 2503 .setkey = aead_setkey, 2504 .setauthsize = aead_setauthsize, 2505 .encrypt = aead_encrypt, 2506 .decrypt = aead_decrypt, 2507 .ivsize = AES_BLOCK_SIZE, 2508 .maxauthsize = SHA512_DIGEST_SIZE, 2509 }, 2510 .caam = { 2511 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2512 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2513 OP_ALG_AAI_HMAC_PRECOMP, 2514 .geniv = true, 2515 }, 2516 }, 2517 { 2518 .aead = { 2519 .base = { 2520 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2521 .cra_driver_name = "authenc-hmac-md5-" 2522 "cbc-des3_ede-caam", 2523 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2524 }, 2525 .setkey = des3_aead_setkey, 2526 .setauthsize = aead_setauthsize, 2527 .encrypt = aead_encrypt, 2528 .decrypt = aead_decrypt, 2529 .ivsize = DES3_EDE_BLOCK_SIZE, 2530 .maxauthsize = MD5_DIGEST_SIZE, 2531 }, 2532 .caam = { 2533 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2534 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2535 OP_ALG_AAI_HMAC_PRECOMP, 2536 } 2537 }, 2538 { 2539 .aead = { 2540 .base = { 2541 .cra_name = "echainiv(authenc(hmac(md5)," 2542 "cbc(des3_ede)))", 2543 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2544 "cbc-des3_ede-caam", 2545 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2546 }, 2547 .setkey = des3_aead_setkey, 2548 .setauthsize = aead_setauthsize, 2549 .encrypt = aead_encrypt, 2550 .decrypt = aead_decrypt, 2551 .ivsize = DES3_EDE_BLOCK_SIZE, 2552 .maxauthsize = MD5_DIGEST_SIZE, 2553 }, 2554 .caam = { 2555 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2556 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2557 OP_ALG_AAI_HMAC_PRECOMP, 2558 .geniv = true, 2559 } 2560 }, 2561 { 2562 .aead = { 2563 .base = { 2564 .cra_name = "authenc(hmac(sha1)," 2565 "cbc(des3_ede))", 2566 .cra_driver_name = "authenc-hmac-sha1-" 2567 "cbc-des3_ede-caam", 2568 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2569 }, 2570 .setkey = des3_aead_setkey, 2571 .setauthsize = aead_setauthsize, 2572 .encrypt = aead_encrypt, 2573 .decrypt = aead_decrypt, 2574 .ivsize = DES3_EDE_BLOCK_SIZE, 2575 .maxauthsize = SHA1_DIGEST_SIZE, 2576 }, 2577 .caam = { 2578 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2579 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2580 OP_ALG_AAI_HMAC_PRECOMP, 2581 }, 2582 }, 2583 { 2584 .aead = { 2585 .base = { 2586 .cra_name = "echainiv(authenc(hmac(sha1)," 2587 "cbc(des3_ede)))", 2588 .cra_driver_name = "echainiv-authenc-" 2589 "hmac-sha1-" 2590 "cbc-des3_ede-caam", 2591 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2592 }, 2593 .setkey = des3_aead_setkey, 2594 .setauthsize = aead_setauthsize, 2595 .encrypt = aead_encrypt, 2596 .decrypt = aead_decrypt, 2597 .ivsize = DES3_EDE_BLOCK_SIZE, 2598 .maxauthsize = SHA1_DIGEST_SIZE, 2599 }, 2600 .caam = { 2601 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2602 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2603 OP_ALG_AAI_HMAC_PRECOMP, 2604 .geniv = true, 2605 }, 2606 }, 2607 { 2608 .aead = { 2609 .base = { 2610 .cra_name = "authenc(hmac(sha224)," 2611 "cbc(des3_ede))", 2612 .cra_driver_name = "authenc-hmac-sha224-" 2613 "cbc-des3_ede-caam", 2614 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2615 }, 2616 .setkey = des3_aead_setkey, 2617 .setauthsize = aead_setauthsize, 2618 .encrypt = aead_encrypt, 2619 .decrypt = aead_decrypt, 2620 .ivsize = DES3_EDE_BLOCK_SIZE, 2621 .maxauthsize = SHA224_DIGEST_SIZE, 2622 }, 2623 .caam = { 2624 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2625 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2626 OP_ALG_AAI_HMAC_PRECOMP, 2627 }, 2628 }, 2629 { 2630 .aead = { 2631 .base = { 2632 .cra_name = "echainiv(authenc(hmac(sha224)," 2633 "cbc(des3_ede)))", 2634 .cra_driver_name = "echainiv-authenc-" 2635 "hmac-sha224-" 2636 "cbc-des3_ede-caam", 2637 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2638 }, 2639 .setkey = des3_aead_setkey, 2640 .setauthsize = aead_setauthsize, 2641 .encrypt = aead_encrypt, 2642 .decrypt = aead_decrypt, 2643 .ivsize = DES3_EDE_BLOCK_SIZE, 2644 .maxauthsize = SHA224_DIGEST_SIZE, 2645 }, 2646 .caam = { 2647 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2648 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2649 OP_ALG_AAI_HMAC_PRECOMP, 2650 .geniv = true, 2651 }, 2652 }, 2653 { 2654 .aead = { 2655 .base = { 2656 .cra_name = "authenc(hmac(sha256)," 2657 "cbc(des3_ede))", 2658 .cra_driver_name = "authenc-hmac-sha256-" 2659 "cbc-des3_ede-caam", 2660 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2661 }, 2662 .setkey = des3_aead_setkey, 2663 .setauthsize = aead_setauthsize, 2664 .encrypt = aead_encrypt, 2665 .decrypt = aead_decrypt, 2666 .ivsize = DES3_EDE_BLOCK_SIZE, 2667 .maxauthsize = SHA256_DIGEST_SIZE, 2668 }, 2669 .caam = { 2670 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2671 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2672 OP_ALG_AAI_HMAC_PRECOMP, 2673 }, 2674 }, 2675 { 2676 .aead = { 2677 .base = { 2678 .cra_name = "echainiv(authenc(hmac(sha256)," 2679 "cbc(des3_ede)))", 2680 .cra_driver_name = "echainiv-authenc-" 2681 "hmac-sha256-" 2682 "cbc-des3_ede-caam", 2683 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2684 }, 2685 .setkey = des3_aead_setkey, 2686 .setauthsize = aead_setauthsize, 2687 .encrypt = aead_encrypt, 2688 .decrypt = aead_decrypt, 2689 .ivsize = DES3_EDE_BLOCK_SIZE, 2690 .maxauthsize = SHA256_DIGEST_SIZE, 2691 }, 2692 .caam = { 2693 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2694 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2695 OP_ALG_AAI_HMAC_PRECOMP, 2696 .geniv = true, 2697 }, 2698 }, 2699 { 2700 .aead = { 2701 .base = { 2702 .cra_name = "authenc(hmac(sha384)," 2703 "cbc(des3_ede))", 2704 .cra_driver_name = "authenc-hmac-sha384-" 2705 "cbc-des3_ede-caam", 2706 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2707 }, 2708 .setkey = des3_aead_setkey, 2709 .setauthsize = aead_setauthsize, 2710 .encrypt = aead_encrypt, 2711 .decrypt = aead_decrypt, 2712 .ivsize = DES3_EDE_BLOCK_SIZE, 2713 .maxauthsize = SHA384_DIGEST_SIZE, 2714 }, 2715 .caam = { 2716 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2717 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2718 OP_ALG_AAI_HMAC_PRECOMP, 2719 }, 2720 }, 2721 { 2722 .aead = { 2723 .base = { 2724 .cra_name = "echainiv(authenc(hmac(sha384)," 2725 "cbc(des3_ede)))", 2726 .cra_driver_name = "echainiv-authenc-" 2727 "hmac-sha384-" 2728 "cbc-des3_ede-caam", 2729 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2730 }, 2731 .setkey = des3_aead_setkey, 2732 .setauthsize = aead_setauthsize, 2733 .encrypt = aead_encrypt, 2734 .decrypt = aead_decrypt, 2735 .ivsize = DES3_EDE_BLOCK_SIZE, 2736 .maxauthsize = SHA384_DIGEST_SIZE, 2737 }, 2738 .caam = { 2739 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2740 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2741 OP_ALG_AAI_HMAC_PRECOMP, 2742 .geniv = true, 2743 }, 2744 }, 2745 { 2746 .aead = { 2747 .base = { 2748 .cra_name = "authenc(hmac(sha512)," 2749 "cbc(des3_ede))", 2750 .cra_driver_name = "authenc-hmac-sha512-" 2751 "cbc-des3_ede-caam", 2752 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2753 }, 2754 .setkey = des3_aead_setkey, 2755 .setauthsize = aead_setauthsize, 2756 .encrypt = aead_encrypt, 2757 .decrypt = aead_decrypt, 2758 .ivsize = DES3_EDE_BLOCK_SIZE, 2759 .maxauthsize = SHA512_DIGEST_SIZE, 2760 }, 2761 .caam = { 2762 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2763 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2764 OP_ALG_AAI_HMAC_PRECOMP, 2765 }, 2766 }, 2767 { 2768 .aead = { 2769 .base = { 2770 .cra_name = "echainiv(authenc(hmac(sha512)," 2771 "cbc(des3_ede)))", 2772 .cra_driver_name = "echainiv-authenc-" 2773 "hmac-sha512-" 2774 "cbc-des3_ede-caam", 2775 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2776 }, 2777 .setkey = des3_aead_setkey, 2778 .setauthsize = aead_setauthsize, 2779 .encrypt = aead_encrypt, 2780 .decrypt = aead_decrypt, 2781 .ivsize = DES3_EDE_BLOCK_SIZE, 2782 .maxauthsize = SHA512_DIGEST_SIZE, 2783 }, 2784 .caam = { 2785 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2786 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2787 OP_ALG_AAI_HMAC_PRECOMP, 2788 .geniv = true, 2789 }, 2790 }, 2791 { 2792 .aead = { 2793 .base = { 2794 .cra_name = "authenc(hmac(md5),cbc(des))", 2795 .cra_driver_name = "authenc-hmac-md5-" 2796 "cbc-des-caam", 2797 .cra_blocksize = DES_BLOCK_SIZE, 2798 }, 2799 .setkey = aead_setkey, 2800 .setauthsize = aead_setauthsize, 2801 .encrypt = aead_encrypt, 2802 .decrypt = aead_decrypt, 2803 .ivsize = DES_BLOCK_SIZE, 2804 .maxauthsize = MD5_DIGEST_SIZE, 2805 }, 2806 .caam = { 2807 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2808 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2809 OP_ALG_AAI_HMAC_PRECOMP, 2810 }, 2811 }, 2812 { 2813 .aead = { 2814 .base = { 2815 .cra_name = "echainiv(authenc(hmac(md5)," 2816 "cbc(des)))", 2817 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2818 "cbc-des-caam", 2819 .cra_blocksize = DES_BLOCK_SIZE, 2820 }, 2821 .setkey = aead_setkey, 2822 .setauthsize = aead_setauthsize, 2823 .encrypt = aead_encrypt, 2824 .decrypt = aead_decrypt, 2825 .ivsize = DES_BLOCK_SIZE, 2826 .maxauthsize = MD5_DIGEST_SIZE, 2827 }, 2828 .caam = { 2829 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2830 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2831 OP_ALG_AAI_HMAC_PRECOMP, 2832 .geniv = true, 2833 }, 2834 }, 2835 { 2836 .aead = { 2837 .base = { 2838 .cra_name = "authenc(hmac(sha1),cbc(des))", 2839 .cra_driver_name = "authenc-hmac-sha1-" 2840 "cbc-des-caam", 2841 .cra_blocksize = DES_BLOCK_SIZE, 2842 }, 2843 .setkey = aead_setkey, 2844 .setauthsize = aead_setauthsize, 2845 .encrypt = aead_encrypt, 2846 .decrypt = aead_decrypt, 2847 .ivsize = DES_BLOCK_SIZE, 2848 .maxauthsize = SHA1_DIGEST_SIZE, 2849 }, 2850 .caam = { 2851 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2852 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2853 OP_ALG_AAI_HMAC_PRECOMP, 2854 }, 2855 }, 2856 { 2857 .aead = { 2858 .base = { 2859 .cra_name = "echainiv(authenc(hmac(sha1)," 2860 "cbc(des)))", 2861 .cra_driver_name = "echainiv-authenc-" 2862 "hmac-sha1-cbc-des-caam", 2863 .cra_blocksize = DES_BLOCK_SIZE, 2864 }, 2865 .setkey = aead_setkey, 2866 .setauthsize = aead_setauthsize, 2867 .encrypt = aead_encrypt, 2868 .decrypt = aead_decrypt, 2869 .ivsize = DES_BLOCK_SIZE, 2870 .maxauthsize = SHA1_DIGEST_SIZE, 2871 }, 2872 .caam = { 2873 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2874 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2875 OP_ALG_AAI_HMAC_PRECOMP, 2876 .geniv = true, 2877 }, 2878 }, 2879 { 2880 .aead = { 2881 .base = { 2882 .cra_name = "authenc(hmac(sha224),cbc(des))", 2883 .cra_driver_name = "authenc-hmac-sha224-" 2884 "cbc-des-caam", 2885 .cra_blocksize = DES_BLOCK_SIZE, 2886 }, 2887 .setkey = aead_setkey, 2888 .setauthsize = aead_setauthsize, 2889 .encrypt = aead_encrypt, 2890 .decrypt = aead_decrypt, 2891 .ivsize = DES_BLOCK_SIZE, 2892 .maxauthsize = SHA224_DIGEST_SIZE, 2893 }, 2894 .caam = { 2895 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2896 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2897 OP_ALG_AAI_HMAC_PRECOMP, 2898 }, 2899 }, 2900 { 2901 .aead = { 2902 .base = { 2903 .cra_name = "echainiv(authenc(hmac(sha224)," 2904 "cbc(des)))", 2905 .cra_driver_name = "echainiv-authenc-" 2906 "hmac-sha224-cbc-des-caam", 2907 .cra_blocksize = DES_BLOCK_SIZE, 2908 }, 2909 .setkey = aead_setkey, 2910 .setauthsize = aead_setauthsize, 2911 .encrypt = aead_encrypt, 2912 .decrypt = aead_decrypt, 2913 .ivsize = DES_BLOCK_SIZE, 2914 .maxauthsize = SHA224_DIGEST_SIZE, 2915 }, 2916 .caam = { 2917 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2918 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2919 OP_ALG_AAI_HMAC_PRECOMP, 2920 .geniv = true, 2921 }, 2922 }, 2923 { 2924 .aead = { 2925 .base = { 2926 .cra_name = "authenc(hmac(sha256),cbc(des))", 2927 .cra_driver_name = "authenc-hmac-sha256-" 2928 "cbc-des-caam", 2929 .cra_blocksize = DES_BLOCK_SIZE, 2930 }, 2931 .setkey = aead_setkey, 2932 .setauthsize = aead_setauthsize, 2933 .encrypt = aead_encrypt, 2934 .decrypt = aead_decrypt, 2935 .ivsize = DES_BLOCK_SIZE, 2936 .maxauthsize = SHA256_DIGEST_SIZE, 2937 }, 2938 .caam = { 2939 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2940 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2941 OP_ALG_AAI_HMAC_PRECOMP, 2942 }, 2943 }, 2944 { 2945 .aead = { 2946 .base = { 2947 .cra_name = "echainiv(authenc(hmac(sha256)," 2948 "cbc(des)))", 2949 .cra_driver_name = "echainiv-authenc-" 2950 "hmac-sha256-cbc-des-caam", 2951 .cra_blocksize = DES_BLOCK_SIZE, 2952 }, 2953 .setkey = aead_setkey, 2954 .setauthsize = aead_setauthsize, 2955 .encrypt = aead_encrypt, 2956 .decrypt = aead_decrypt, 2957 .ivsize = DES_BLOCK_SIZE, 2958 .maxauthsize = SHA256_DIGEST_SIZE, 2959 }, 2960 .caam = { 2961 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2962 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2963 OP_ALG_AAI_HMAC_PRECOMP, 2964 .geniv = true, 2965 }, 2966 }, 2967 { 2968 .aead = { 2969 .base = { 2970 .cra_name = "authenc(hmac(sha384),cbc(des))", 2971 .cra_driver_name = "authenc-hmac-sha384-" 2972 "cbc-des-caam", 2973 .cra_blocksize = DES_BLOCK_SIZE, 2974 }, 2975 .setkey = aead_setkey, 2976 .setauthsize = aead_setauthsize, 2977 .encrypt = aead_encrypt, 2978 .decrypt = aead_decrypt, 2979 .ivsize = DES_BLOCK_SIZE, 2980 .maxauthsize = SHA384_DIGEST_SIZE, 2981 }, 2982 .caam = { 2983 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2984 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2985 OP_ALG_AAI_HMAC_PRECOMP, 2986 }, 2987 }, 2988 { 2989 .aead = { 2990 .base = { 2991 .cra_name = "echainiv(authenc(hmac(sha384)," 2992 "cbc(des)))", 2993 .cra_driver_name = "echainiv-authenc-" 2994 "hmac-sha384-cbc-des-caam", 2995 .cra_blocksize = DES_BLOCK_SIZE, 2996 }, 2997 .setkey = aead_setkey, 2998 .setauthsize = aead_setauthsize, 2999 .encrypt = aead_encrypt, 3000 .decrypt = aead_decrypt, 3001 .ivsize = DES_BLOCK_SIZE, 3002 .maxauthsize = SHA384_DIGEST_SIZE, 3003 }, 3004 .caam = { 3005 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3006 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3007 OP_ALG_AAI_HMAC_PRECOMP, 3008 .geniv = true, 3009 }, 3010 }, 3011 { 3012 .aead = { 3013 .base = { 3014 .cra_name = "authenc(hmac(sha512),cbc(des))", 3015 .cra_driver_name = "authenc-hmac-sha512-" 3016 "cbc-des-caam", 3017 .cra_blocksize = DES_BLOCK_SIZE, 3018 }, 3019 .setkey = aead_setkey, 3020 .setauthsize = aead_setauthsize, 3021 .encrypt = aead_encrypt, 3022 .decrypt = aead_decrypt, 3023 .ivsize = DES_BLOCK_SIZE, 3024 .maxauthsize = SHA512_DIGEST_SIZE, 3025 }, 3026 .caam = { 3027 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3028 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3029 OP_ALG_AAI_HMAC_PRECOMP, 3030 }, 3031 }, 3032 { 3033 .aead = { 3034 .base = { 3035 .cra_name = "echainiv(authenc(hmac(sha512)," 3036 "cbc(des)))", 3037 .cra_driver_name = "echainiv-authenc-" 3038 "hmac-sha512-cbc-des-caam", 3039 .cra_blocksize = DES_BLOCK_SIZE, 3040 }, 3041 .setkey = aead_setkey, 3042 .setauthsize = aead_setauthsize, 3043 .encrypt = aead_encrypt, 3044 .decrypt = aead_decrypt, 3045 .ivsize = DES_BLOCK_SIZE, 3046 .maxauthsize = SHA512_DIGEST_SIZE, 3047 }, 3048 .caam = { 3049 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3050 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3051 OP_ALG_AAI_HMAC_PRECOMP, 3052 .geniv = true, 3053 }, 3054 }, 3055 { 3056 .aead = { 3057 .base = { 3058 .cra_name = "authenc(hmac(md5)," 3059 "rfc3686(ctr(aes)))", 3060 .cra_driver_name = "authenc-hmac-md5-" 3061 "rfc3686-ctr-aes-caam", 3062 .cra_blocksize = 1, 3063 }, 3064 .setkey = aead_setkey, 3065 .setauthsize = aead_setauthsize, 3066 .encrypt = aead_encrypt, 3067 .decrypt = aead_decrypt, 3068 .ivsize = CTR_RFC3686_IV_SIZE, 3069 .maxauthsize = MD5_DIGEST_SIZE, 3070 }, 3071 .caam = { 3072 .class1_alg_type = OP_ALG_ALGSEL_AES | 3073 OP_ALG_AAI_CTR_MOD128, 3074 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3075 OP_ALG_AAI_HMAC_PRECOMP, 3076 .rfc3686 = true, 3077 }, 3078 }, 3079 { 3080 .aead = { 3081 .base = { 3082 .cra_name = "seqiv(authenc(" 3083 "hmac(md5),rfc3686(ctr(aes))))", 3084 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3085 "rfc3686-ctr-aes-caam", 3086 .cra_blocksize = 1, 3087 }, 3088 .setkey = aead_setkey, 3089 .setauthsize = aead_setauthsize, 3090 .encrypt = aead_encrypt, 3091 .decrypt = aead_decrypt, 3092 .ivsize = CTR_RFC3686_IV_SIZE, 3093 .maxauthsize = MD5_DIGEST_SIZE, 3094 }, 3095 .caam = { 3096 .class1_alg_type = OP_ALG_ALGSEL_AES | 3097 OP_ALG_AAI_CTR_MOD128, 3098 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3099 OP_ALG_AAI_HMAC_PRECOMP, 3100 .rfc3686 = true, 3101 .geniv = true, 3102 }, 3103 }, 3104 { 3105 .aead = { 3106 .base = { 3107 .cra_name = "authenc(hmac(sha1)," 3108 "rfc3686(ctr(aes)))", 3109 .cra_driver_name = "authenc-hmac-sha1-" 3110 "rfc3686-ctr-aes-caam", 3111 .cra_blocksize = 1, 3112 }, 3113 .setkey = aead_setkey, 3114 .setauthsize = aead_setauthsize, 3115 .encrypt = aead_encrypt, 3116 .decrypt = aead_decrypt, 3117 .ivsize = CTR_RFC3686_IV_SIZE, 3118 .maxauthsize = SHA1_DIGEST_SIZE, 3119 }, 3120 .caam = { 3121 .class1_alg_type = OP_ALG_ALGSEL_AES | 3122 OP_ALG_AAI_CTR_MOD128, 3123 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3124 OP_ALG_AAI_HMAC_PRECOMP, 3125 .rfc3686 = true, 3126 }, 3127 }, 3128 { 3129 .aead = { 3130 .base = { 3131 .cra_name = "seqiv(authenc(" 3132 "hmac(sha1),rfc3686(ctr(aes))))", 3133 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3134 "rfc3686-ctr-aes-caam", 3135 .cra_blocksize = 1, 3136 }, 3137 .setkey = aead_setkey, 3138 .setauthsize = aead_setauthsize, 3139 .encrypt = aead_encrypt, 3140 .decrypt = aead_decrypt, 3141 .ivsize = CTR_RFC3686_IV_SIZE, 3142 .maxauthsize = SHA1_DIGEST_SIZE, 3143 }, 3144 .caam = { 3145 .class1_alg_type = OP_ALG_ALGSEL_AES | 3146 OP_ALG_AAI_CTR_MOD128, 3147 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3148 OP_ALG_AAI_HMAC_PRECOMP, 3149 .rfc3686 = true, 3150 .geniv = true, 3151 }, 3152 }, 3153 { 3154 .aead = { 3155 .base = { 3156 .cra_name = "authenc(hmac(sha224)," 3157 "rfc3686(ctr(aes)))", 3158 .cra_driver_name = "authenc-hmac-sha224-" 3159 "rfc3686-ctr-aes-caam", 3160 .cra_blocksize = 1, 3161 }, 3162 .setkey = aead_setkey, 3163 .setauthsize = aead_setauthsize, 3164 .encrypt = aead_encrypt, 3165 .decrypt = aead_decrypt, 3166 .ivsize = CTR_RFC3686_IV_SIZE, 3167 .maxauthsize = SHA224_DIGEST_SIZE, 3168 }, 3169 .caam = { 3170 .class1_alg_type = OP_ALG_ALGSEL_AES | 3171 OP_ALG_AAI_CTR_MOD128, 3172 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3173 OP_ALG_AAI_HMAC_PRECOMP, 3174 .rfc3686 = true, 3175 }, 3176 }, 3177 { 3178 .aead = { 3179 .base = { 3180 .cra_name = "seqiv(authenc(" 3181 "hmac(sha224),rfc3686(ctr(aes))))", 3182 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3183 "rfc3686-ctr-aes-caam", 3184 .cra_blocksize = 1, 3185 }, 3186 .setkey = aead_setkey, 3187 .setauthsize = aead_setauthsize, 3188 .encrypt = aead_encrypt, 3189 .decrypt = aead_decrypt, 3190 .ivsize = CTR_RFC3686_IV_SIZE, 3191 .maxauthsize = SHA224_DIGEST_SIZE, 3192 }, 3193 .caam = { 3194 .class1_alg_type = OP_ALG_ALGSEL_AES | 3195 OP_ALG_AAI_CTR_MOD128, 3196 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3197 OP_ALG_AAI_HMAC_PRECOMP, 3198 .rfc3686 = true, 3199 .geniv = true, 3200 }, 3201 }, 3202 { 3203 .aead = { 3204 .base = { 3205 .cra_name = "authenc(hmac(sha256)," 3206 "rfc3686(ctr(aes)))", 3207 .cra_driver_name = "authenc-hmac-sha256-" 3208 "rfc3686-ctr-aes-caam", 3209 .cra_blocksize = 1, 3210 }, 3211 .setkey = aead_setkey, 3212 .setauthsize = aead_setauthsize, 3213 .encrypt = aead_encrypt, 3214 .decrypt = aead_decrypt, 3215 .ivsize = CTR_RFC3686_IV_SIZE, 3216 .maxauthsize = SHA256_DIGEST_SIZE, 3217 }, 3218 .caam = { 3219 .class1_alg_type = OP_ALG_ALGSEL_AES | 3220 OP_ALG_AAI_CTR_MOD128, 3221 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3222 OP_ALG_AAI_HMAC_PRECOMP, 3223 .rfc3686 = true, 3224 }, 3225 }, 3226 { 3227 .aead = { 3228 .base = { 3229 .cra_name = "seqiv(authenc(hmac(sha256)," 3230 "rfc3686(ctr(aes))))", 3231 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3232 "rfc3686-ctr-aes-caam", 3233 .cra_blocksize = 1, 3234 }, 3235 .setkey = aead_setkey, 3236 .setauthsize = aead_setauthsize, 3237 .encrypt = aead_encrypt, 3238 .decrypt = aead_decrypt, 3239 .ivsize = CTR_RFC3686_IV_SIZE, 3240 .maxauthsize = SHA256_DIGEST_SIZE, 3241 }, 3242 .caam = { 3243 .class1_alg_type = OP_ALG_ALGSEL_AES | 3244 OP_ALG_AAI_CTR_MOD128, 3245 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3246 OP_ALG_AAI_HMAC_PRECOMP, 3247 .rfc3686 = true, 3248 .geniv = true, 3249 }, 3250 }, 3251 { 3252 .aead = { 3253 .base = { 3254 .cra_name = "authenc(hmac(sha384)," 3255 "rfc3686(ctr(aes)))", 3256 .cra_driver_name = "authenc-hmac-sha384-" 3257 "rfc3686-ctr-aes-caam", 3258 .cra_blocksize = 1, 3259 }, 3260 .setkey = aead_setkey, 3261 .setauthsize = aead_setauthsize, 3262 .encrypt = aead_encrypt, 3263 .decrypt = aead_decrypt, 3264 .ivsize = CTR_RFC3686_IV_SIZE, 3265 .maxauthsize = SHA384_DIGEST_SIZE, 3266 }, 3267 .caam = { 3268 .class1_alg_type = OP_ALG_ALGSEL_AES | 3269 OP_ALG_AAI_CTR_MOD128, 3270 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3271 OP_ALG_AAI_HMAC_PRECOMP, 3272 .rfc3686 = true, 3273 }, 3274 }, 3275 { 3276 .aead = { 3277 .base = { 3278 .cra_name = "seqiv(authenc(hmac(sha384)," 3279 "rfc3686(ctr(aes))))", 3280 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3281 "rfc3686-ctr-aes-caam", 3282 .cra_blocksize = 1, 3283 }, 3284 .setkey = aead_setkey, 3285 .setauthsize = aead_setauthsize, 3286 .encrypt = aead_encrypt, 3287 .decrypt = aead_decrypt, 3288 .ivsize = CTR_RFC3686_IV_SIZE, 3289 .maxauthsize = SHA384_DIGEST_SIZE, 3290 }, 3291 .caam = { 3292 .class1_alg_type = OP_ALG_ALGSEL_AES | 3293 OP_ALG_AAI_CTR_MOD128, 3294 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3295 OP_ALG_AAI_HMAC_PRECOMP, 3296 .rfc3686 = true, 3297 .geniv = true, 3298 }, 3299 }, 3300 { 3301 .aead = { 3302 .base = { 3303 .cra_name = "authenc(hmac(sha512)," 3304 "rfc3686(ctr(aes)))", 3305 .cra_driver_name = "authenc-hmac-sha512-" 3306 "rfc3686-ctr-aes-caam", 3307 .cra_blocksize = 1, 3308 }, 3309 .setkey = aead_setkey, 3310 .setauthsize = aead_setauthsize, 3311 .encrypt = aead_encrypt, 3312 .decrypt = aead_decrypt, 3313 .ivsize = CTR_RFC3686_IV_SIZE, 3314 .maxauthsize = SHA512_DIGEST_SIZE, 3315 }, 3316 .caam = { 3317 .class1_alg_type = OP_ALG_ALGSEL_AES | 3318 OP_ALG_AAI_CTR_MOD128, 3319 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3320 OP_ALG_AAI_HMAC_PRECOMP, 3321 .rfc3686 = true, 3322 }, 3323 }, 3324 { 3325 .aead = { 3326 .base = { 3327 .cra_name = "seqiv(authenc(hmac(sha512)," 3328 "rfc3686(ctr(aes))))", 3329 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3330 "rfc3686-ctr-aes-caam", 3331 .cra_blocksize = 1, 3332 }, 3333 .setkey = aead_setkey, 3334 .setauthsize = aead_setauthsize, 3335 .encrypt = aead_encrypt, 3336 .decrypt = aead_decrypt, 3337 .ivsize = CTR_RFC3686_IV_SIZE, 3338 .maxauthsize = SHA512_DIGEST_SIZE, 3339 }, 3340 .caam = { 3341 .class1_alg_type = OP_ALG_ALGSEL_AES | 3342 OP_ALG_AAI_CTR_MOD128, 3343 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3344 OP_ALG_AAI_HMAC_PRECOMP, 3345 .rfc3686 = true, 3346 .geniv = true, 3347 }, 3348 }, 3349 { 3350 .aead = { 3351 .base = { 3352 .cra_name = "rfc7539(chacha20,poly1305)", 3353 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3354 "caam", 3355 .cra_blocksize = 1, 3356 }, 3357 .setkey = chachapoly_setkey, 3358 .setauthsize = chachapoly_setauthsize, 3359 .encrypt = chachapoly_encrypt, 3360 .decrypt = chachapoly_decrypt, 3361 .ivsize = CHACHAPOLY_IV_SIZE, 3362 .maxauthsize = POLY1305_DIGEST_SIZE, 3363 }, 3364 .caam = { 3365 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3366 OP_ALG_AAI_AEAD, 3367 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3368 OP_ALG_AAI_AEAD, 3369 .nodkp = true, 3370 }, 3371 }, 3372 { 3373 .aead = { 3374 .base = { 3375 .cra_name = "rfc7539esp(chacha20,poly1305)", 3376 .cra_driver_name = "rfc7539esp-chacha20-" 3377 "poly1305-caam", 3378 .cra_blocksize = 1, 3379 }, 3380 .setkey = chachapoly_setkey, 3381 .setauthsize = chachapoly_setauthsize, 3382 .encrypt = chachapoly_encrypt, 3383 .decrypt = chachapoly_decrypt, 3384 .ivsize = 8, 3385 .maxauthsize = POLY1305_DIGEST_SIZE, 3386 }, 3387 .caam = { 3388 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3389 OP_ALG_AAI_AEAD, 3390 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3391 OP_ALG_AAI_AEAD, 3392 .nodkp = true, 3393 }, 3394 }, 3395 }; 3396 3397 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3398 bool uses_dkp) 3399 { 3400 dma_addr_t dma_addr; 3401 struct caam_drv_private *priv; 3402 3403 ctx->jrdev = caam_jr_alloc(); 3404 if (IS_ERR(ctx->jrdev)) { 3405 pr_err("Job Ring Device allocation for transform failed\n"); 3406 return PTR_ERR(ctx->jrdev); 3407 } 3408 3409 priv = dev_get_drvdata(ctx->jrdev->parent); 3410 if (priv->era >= 6 && uses_dkp) 3411 ctx->dir = DMA_BIDIRECTIONAL; 3412 else 3413 ctx->dir = DMA_TO_DEVICE; 3414 3415 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3416 offsetof(struct caam_ctx, 3417 sh_desc_enc_dma), 3418 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3419 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3420 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3421 caam_jr_free(ctx->jrdev); 3422 return -ENOMEM; 3423 } 3424 3425 ctx->sh_desc_enc_dma = dma_addr; 3426 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3427 sh_desc_dec); 3428 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3429 3430 /* copy descriptor header template value */ 3431 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3432 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3433 3434 return 0; 3435 } 3436 3437 static int caam_cra_init(struct crypto_skcipher *tfm) 3438 { 3439 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3440 struct caam_skcipher_alg *caam_alg = 3441 container_of(alg, typeof(*caam_alg), skcipher); 3442 3443 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3444 false); 3445 } 3446 3447 static int caam_aead_init(struct crypto_aead *tfm) 3448 { 3449 struct aead_alg *alg = crypto_aead_alg(tfm); 3450 struct caam_aead_alg *caam_alg = 3451 container_of(alg, struct caam_aead_alg, aead); 3452 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3453 3454 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3455 } 3456 3457 static void caam_exit_common(struct caam_ctx *ctx) 3458 { 3459 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3460 offsetof(struct caam_ctx, sh_desc_enc_dma), 3461 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3462 caam_jr_free(ctx->jrdev); 3463 } 3464 3465 static void caam_cra_exit(struct crypto_skcipher *tfm) 3466 { 3467 caam_exit_common(crypto_skcipher_ctx(tfm)); 3468 } 3469 3470 static void caam_aead_exit(struct crypto_aead *tfm) 3471 { 3472 caam_exit_common(crypto_aead_ctx(tfm)); 3473 } 3474 3475 void caam_algapi_exit(void) 3476 { 3477 int i; 3478 3479 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3480 struct caam_aead_alg *t_alg = driver_aeads + i; 3481 3482 if (t_alg->registered) 3483 crypto_unregister_aead(&t_alg->aead); 3484 } 3485 3486 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3487 struct caam_skcipher_alg *t_alg = driver_algs + i; 3488 3489 if (t_alg->registered) 3490 crypto_unregister_skcipher(&t_alg->skcipher); 3491 } 3492 } 3493 3494 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3495 { 3496 struct skcipher_alg *alg = &t_alg->skcipher; 3497 3498 alg->base.cra_module = THIS_MODULE; 3499 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3500 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3501 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3502 3503 alg->init = caam_cra_init; 3504 alg->exit = caam_cra_exit; 3505 } 3506 3507 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3508 { 3509 struct aead_alg *alg = &t_alg->aead; 3510 3511 alg->base.cra_module = THIS_MODULE; 3512 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3513 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3514 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3515 3516 alg->init = caam_aead_init; 3517 alg->exit = caam_aead_exit; 3518 } 3519 3520 int caam_algapi_init(struct device *ctrldev) 3521 { 3522 struct caam_drv_private *priv = dev_get_drvdata(ctrldev); 3523 int i = 0, err = 0; 3524 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3525 u32 arc4_inst; 3526 unsigned int md_limit = SHA512_DIGEST_SIZE; 3527 bool registered = false, gcm_support; 3528 3529 /* 3530 * Register crypto algorithms the device supports. 3531 * First, detect presence and attributes of DES, AES, and MD blocks. 3532 */ 3533 if (priv->era < 10) { 3534 u32 cha_vid, cha_inst, aes_rn; 3535 3536 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3537 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3538 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3539 3540 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3541 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3542 CHA_ID_LS_DES_SHIFT; 3543 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3544 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3545 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >> 3546 CHA_ID_LS_ARC4_SHIFT; 3547 ccha_inst = 0; 3548 ptha_inst = 0; 3549 3550 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3551 CHA_ID_LS_AES_MASK; 3552 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3553 } else { 3554 u32 aesa, mdha; 3555 3556 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3557 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3558 3559 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3560 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3561 3562 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3563 aes_inst = aesa & CHA_VER_NUM_MASK; 3564 md_inst = mdha & CHA_VER_NUM_MASK; 3565 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3566 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3567 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK; 3568 3569 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3570 } 3571 3572 /* If MD is present, limit digest size based on LP256 */ 3573 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3574 md_limit = SHA256_DIGEST_SIZE; 3575 3576 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3577 struct caam_skcipher_alg *t_alg = driver_algs + i; 3578 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3579 3580 /* Skip DES algorithms if not supported by device */ 3581 if (!des_inst && 3582 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3583 (alg_sel == OP_ALG_ALGSEL_DES))) 3584 continue; 3585 3586 /* Skip AES algorithms if not supported by device */ 3587 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3588 continue; 3589 3590 /* Skip ARC4 algorithms if not supported by device */ 3591 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4) 3592 continue; 3593 3594 /* 3595 * Check support for AES modes not available 3596 * on LP devices. 3597 */ 3598 if (aes_vid == CHA_VER_VID_AES_LP && 3599 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3600 OP_ALG_AAI_XTS) 3601 continue; 3602 3603 caam_skcipher_alg_init(t_alg); 3604 3605 err = crypto_register_skcipher(&t_alg->skcipher); 3606 if (err) { 3607 pr_warn("%s alg registration failed\n", 3608 t_alg->skcipher.base.cra_driver_name); 3609 continue; 3610 } 3611 3612 t_alg->registered = true; 3613 registered = true; 3614 } 3615 3616 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3617 struct caam_aead_alg *t_alg = driver_aeads + i; 3618 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3619 OP_ALG_ALGSEL_MASK; 3620 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3621 OP_ALG_ALGSEL_MASK; 3622 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3623 3624 /* Skip DES algorithms if not supported by device */ 3625 if (!des_inst && 3626 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3627 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3628 continue; 3629 3630 /* Skip AES algorithms if not supported by device */ 3631 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3632 continue; 3633 3634 /* Skip CHACHA20 algorithms if not supported by device */ 3635 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3636 continue; 3637 3638 /* Skip POLY1305 algorithms if not supported by device */ 3639 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3640 continue; 3641 3642 /* Skip GCM algorithms if not supported by device */ 3643 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3644 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3645 continue; 3646 3647 /* 3648 * Skip algorithms requiring message digests 3649 * if MD or MD size is not supported by device. 3650 */ 3651 if (is_mdha(c2_alg_sel) && 3652 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3653 continue; 3654 3655 caam_aead_alg_init(t_alg); 3656 3657 err = crypto_register_aead(&t_alg->aead); 3658 if (err) { 3659 pr_warn("%s alg registration failed\n", 3660 t_alg->aead.base.cra_driver_name); 3661 continue; 3662 } 3663 3664 t_alg->registered = true; 3665 registered = true; 3666 } 3667 3668 if (registered) 3669 pr_info("caam algorithms registered in /proc/crypto\n"); 3670 3671 return err; 3672 } 3673