1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 60 /* 61 * crypto alg 62 */ 63 #define CAAM_CRA_PRIORITY 3000 64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ 67 SHA512_DIGEST_SIZE * 2) 68 69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 71 CAAM_CMD_SZ * 4) 72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 73 CAAM_CMD_SZ * 5) 74 75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 76 77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 79 80 #ifdef DEBUG 81 /* for print_hex_dumps with line references */ 82 #define debug(format, arg...) printk(format, arg) 83 #else 84 #define debug(format, arg...) 85 #endif 86 87 struct caam_alg_entry { 88 int class1_alg_type; 89 int class2_alg_type; 90 bool rfc3686; 91 bool geniv; 92 bool nodkp; 93 }; 94 95 struct caam_aead_alg { 96 struct aead_alg aead; 97 struct caam_alg_entry caam; 98 bool registered; 99 }; 100 101 struct caam_skcipher_alg { 102 struct skcipher_alg skcipher; 103 struct caam_alg_entry caam; 104 bool registered; 105 }; 106 107 /* 108 * per-session context 109 */ 110 struct caam_ctx { 111 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 112 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 113 u8 key[CAAM_MAX_KEY_SIZE]; 114 dma_addr_t sh_desc_enc_dma; 115 dma_addr_t sh_desc_dec_dma; 116 dma_addr_t key_dma; 117 enum dma_data_direction dir; 118 struct device *jrdev; 119 struct alginfo adata; 120 struct alginfo cdata; 121 unsigned int authsize; 122 }; 123 124 static int aead_null_set_sh_desc(struct crypto_aead *aead) 125 { 126 struct caam_ctx *ctx = crypto_aead_ctx(aead); 127 struct device *jrdev = ctx->jrdev; 128 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 129 u32 *desc; 130 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 131 ctx->adata.keylen_pad; 132 133 /* 134 * Job Descriptor and Shared Descriptors 135 * must all fit into the 64-word Descriptor h/w Buffer 136 */ 137 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 138 ctx->adata.key_inline = true; 139 ctx->adata.key_virt = ctx->key; 140 } else { 141 ctx->adata.key_inline = false; 142 ctx->adata.key_dma = ctx->key_dma; 143 } 144 145 /* aead_encrypt shared descriptor */ 146 desc = ctx->sh_desc_enc; 147 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 148 ctrlpriv->era); 149 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 150 desc_bytes(desc), ctx->dir); 151 152 /* 153 * Job Descriptor and Shared Descriptors 154 * must all fit into the 64-word Descriptor h/w Buffer 155 */ 156 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 157 ctx->adata.key_inline = true; 158 ctx->adata.key_virt = ctx->key; 159 } else { 160 ctx->adata.key_inline = false; 161 ctx->adata.key_dma = ctx->key_dma; 162 } 163 164 /* aead_decrypt shared descriptor */ 165 desc = ctx->sh_desc_dec; 166 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 167 ctrlpriv->era); 168 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 169 desc_bytes(desc), ctx->dir); 170 171 return 0; 172 } 173 174 static int aead_set_sh_desc(struct crypto_aead *aead) 175 { 176 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 177 struct caam_aead_alg, aead); 178 unsigned int ivsize = crypto_aead_ivsize(aead); 179 struct caam_ctx *ctx = crypto_aead_ctx(aead); 180 struct device *jrdev = ctx->jrdev; 181 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 182 u32 ctx1_iv_off = 0; 183 u32 *desc, *nonce = NULL; 184 u32 inl_mask; 185 unsigned int data_len[2]; 186 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 187 OP_ALG_AAI_CTR_MOD128); 188 const bool is_rfc3686 = alg->caam.rfc3686; 189 190 if (!ctx->authsize) 191 return 0; 192 193 /* NULL encryption / decryption */ 194 if (!ctx->cdata.keylen) 195 return aead_null_set_sh_desc(aead); 196 197 /* 198 * AES-CTR needs to load IV in CONTEXT1 reg 199 * at an offset of 128bits (16bytes) 200 * CONTEXT1[255:128] = IV 201 */ 202 if (ctr_mode) 203 ctx1_iv_off = 16; 204 205 /* 206 * RFC3686 specific: 207 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 208 */ 209 if (is_rfc3686) { 210 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 211 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 212 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 213 } 214 215 data_len[0] = ctx->adata.keylen_pad; 216 data_len[1] = ctx->cdata.keylen; 217 218 if (alg->caam.geniv) 219 goto skip_enc; 220 221 /* 222 * Job Descriptor and Shared Descriptors 223 * must all fit into the 64-word Descriptor h/w Buffer 224 */ 225 if (desc_inline_query(DESC_AEAD_ENC_LEN + 226 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 227 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 228 ARRAY_SIZE(data_len)) < 0) 229 return -EINVAL; 230 231 if (inl_mask & 1) 232 ctx->adata.key_virt = ctx->key; 233 else 234 ctx->adata.key_dma = ctx->key_dma; 235 236 if (inl_mask & 2) 237 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 238 else 239 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 240 241 ctx->adata.key_inline = !!(inl_mask & 1); 242 ctx->cdata.key_inline = !!(inl_mask & 2); 243 244 /* aead_encrypt shared descriptor */ 245 desc = ctx->sh_desc_enc; 246 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 247 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 248 false, ctrlpriv->era); 249 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 250 desc_bytes(desc), ctx->dir); 251 252 skip_enc: 253 /* 254 * Job Descriptor and Shared Descriptors 255 * must all fit into the 64-word Descriptor h/w Buffer 256 */ 257 if (desc_inline_query(DESC_AEAD_DEC_LEN + 258 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 259 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 260 ARRAY_SIZE(data_len)) < 0) 261 return -EINVAL; 262 263 if (inl_mask & 1) 264 ctx->adata.key_virt = ctx->key; 265 else 266 ctx->adata.key_dma = ctx->key_dma; 267 268 if (inl_mask & 2) 269 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 270 else 271 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 272 273 ctx->adata.key_inline = !!(inl_mask & 1); 274 ctx->cdata.key_inline = !!(inl_mask & 2); 275 276 /* aead_decrypt shared descriptor */ 277 desc = ctx->sh_desc_dec; 278 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 279 ctx->authsize, alg->caam.geniv, is_rfc3686, 280 nonce, ctx1_iv_off, false, ctrlpriv->era); 281 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 282 desc_bytes(desc), ctx->dir); 283 284 if (!alg->caam.geniv) 285 goto skip_givenc; 286 287 /* 288 * Job Descriptor and Shared Descriptors 289 * must all fit into the 64-word Descriptor h/w Buffer 290 */ 291 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 292 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 293 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 294 ARRAY_SIZE(data_len)) < 0) 295 return -EINVAL; 296 297 if (inl_mask & 1) 298 ctx->adata.key_virt = ctx->key; 299 else 300 ctx->adata.key_dma = ctx->key_dma; 301 302 if (inl_mask & 2) 303 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 304 else 305 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 306 307 ctx->adata.key_inline = !!(inl_mask & 1); 308 ctx->cdata.key_inline = !!(inl_mask & 2); 309 310 /* aead_givencrypt shared descriptor */ 311 desc = ctx->sh_desc_enc; 312 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 313 ctx->authsize, is_rfc3686, nonce, 314 ctx1_iv_off, false, ctrlpriv->era); 315 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 316 desc_bytes(desc), ctx->dir); 317 318 skip_givenc: 319 return 0; 320 } 321 322 static int aead_setauthsize(struct crypto_aead *authenc, 323 unsigned int authsize) 324 { 325 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 326 327 ctx->authsize = authsize; 328 aead_set_sh_desc(authenc); 329 330 return 0; 331 } 332 333 static int gcm_set_sh_desc(struct crypto_aead *aead) 334 { 335 struct caam_ctx *ctx = crypto_aead_ctx(aead); 336 struct device *jrdev = ctx->jrdev; 337 unsigned int ivsize = crypto_aead_ivsize(aead); 338 u32 *desc; 339 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 340 ctx->cdata.keylen; 341 342 if (!ctx->cdata.keylen || !ctx->authsize) 343 return 0; 344 345 /* 346 * AES GCM encrypt shared descriptor 347 * Job Descriptor and Shared Descriptor 348 * must fit into the 64-word Descriptor h/w Buffer 349 */ 350 if (rem_bytes >= DESC_GCM_ENC_LEN) { 351 ctx->cdata.key_inline = true; 352 ctx->cdata.key_virt = ctx->key; 353 } else { 354 ctx->cdata.key_inline = false; 355 ctx->cdata.key_dma = ctx->key_dma; 356 } 357 358 desc = ctx->sh_desc_enc; 359 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 360 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 361 desc_bytes(desc), ctx->dir); 362 363 /* 364 * Job Descriptor and Shared Descriptors 365 * must all fit into the 64-word Descriptor h/w Buffer 366 */ 367 if (rem_bytes >= DESC_GCM_DEC_LEN) { 368 ctx->cdata.key_inline = true; 369 ctx->cdata.key_virt = ctx->key; 370 } else { 371 ctx->cdata.key_inline = false; 372 ctx->cdata.key_dma = ctx->key_dma; 373 } 374 375 desc = ctx->sh_desc_dec; 376 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 377 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 378 desc_bytes(desc), ctx->dir); 379 380 return 0; 381 } 382 383 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 384 { 385 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 386 387 ctx->authsize = authsize; 388 gcm_set_sh_desc(authenc); 389 390 return 0; 391 } 392 393 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 394 { 395 struct caam_ctx *ctx = crypto_aead_ctx(aead); 396 struct device *jrdev = ctx->jrdev; 397 unsigned int ivsize = crypto_aead_ivsize(aead); 398 u32 *desc; 399 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 400 ctx->cdata.keylen; 401 402 if (!ctx->cdata.keylen || !ctx->authsize) 403 return 0; 404 405 /* 406 * RFC4106 encrypt shared descriptor 407 * Job Descriptor and Shared Descriptor 408 * must fit into the 64-word Descriptor h/w Buffer 409 */ 410 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 411 ctx->cdata.key_inline = true; 412 ctx->cdata.key_virt = ctx->key; 413 } else { 414 ctx->cdata.key_inline = false; 415 ctx->cdata.key_dma = ctx->key_dma; 416 } 417 418 desc = ctx->sh_desc_enc; 419 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 420 false); 421 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 422 desc_bytes(desc), ctx->dir); 423 424 /* 425 * Job Descriptor and Shared Descriptors 426 * must all fit into the 64-word Descriptor h/w Buffer 427 */ 428 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 429 ctx->cdata.key_inline = true; 430 ctx->cdata.key_virt = ctx->key; 431 } else { 432 ctx->cdata.key_inline = false; 433 ctx->cdata.key_dma = ctx->key_dma; 434 } 435 436 desc = ctx->sh_desc_dec; 437 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 438 false); 439 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 440 desc_bytes(desc), ctx->dir); 441 442 return 0; 443 } 444 445 static int rfc4106_setauthsize(struct crypto_aead *authenc, 446 unsigned int authsize) 447 { 448 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 449 450 ctx->authsize = authsize; 451 rfc4106_set_sh_desc(authenc); 452 453 return 0; 454 } 455 456 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 457 { 458 struct caam_ctx *ctx = crypto_aead_ctx(aead); 459 struct device *jrdev = ctx->jrdev; 460 unsigned int ivsize = crypto_aead_ivsize(aead); 461 u32 *desc; 462 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 463 ctx->cdata.keylen; 464 465 if (!ctx->cdata.keylen || !ctx->authsize) 466 return 0; 467 468 /* 469 * RFC4543 encrypt shared descriptor 470 * Job Descriptor and Shared Descriptor 471 * must fit into the 64-word Descriptor h/w Buffer 472 */ 473 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 474 ctx->cdata.key_inline = true; 475 ctx->cdata.key_virt = ctx->key; 476 } else { 477 ctx->cdata.key_inline = false; 478 ctx->cdata.key_dma = ctx->key_dma; 479 } 480 481 desc = ctx->sh_desc_enc; 482 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 483 false); 484 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 485 desc_bytes(desc), ctx->dir); 486 487 /* 488 * Job Descriptor and Shared Descriptors 489 * must all fit into the 64-word Descriptor h/w Buffer 490 */ 491 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 492 ctx->cdata.key_inline = true; 493 ctx->cdata.key_virt = ctx->key; 494 } else { 495 ctx->cdata.key_inline = false; 496 ctx->cdata.key_dma = ctx->key_dma; 497 } 498 499 desc = ctx->sh_desc_dec; 500 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 501 false); 502 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 503 desc_bytes(desc), ctx->dir); 504 505 return 0; 506 } 507 508 static int rfc4543_setauthsize(struct crypto_aead *authenc, 509 unsigned int authsize) 510 { 511 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 512 513 ctx->authsize = authsize; 514 rfc4543_set_sh_desc(authenc); 515 516 return 0; 517 } 518 519 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 520 { 521 struct caam_ctx *ctx = crypto_aead_ctx(aead); 522 struct device *jrdev = ctx->jrdev; 523 unsigned int ivsize = crypto_aead_ivsize(aead); 524 u32 *desc; 525 526 if (!ctx->cdata.keylen || !ctx->authsize) 527 return 0; 528 529 desc = ctx->sh_desc_enc; 530 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 531 ctx->authsize, true, false); 532 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 533 desc_bytes(desc), ctx->dir); 534 535 desc = ctx->sh_desc_dec; 536 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 537 ctx->authsize, false, false); 538 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 539 desc_bytes(desc), ctx->dir); 540 541 return 0; 542 } 543 544 static int chachapoly_setauthsize(struct crypto_aead *aead, 545 unsigned int authsize) 546 { 547 struct caam_ctx *ctx = crypto_aead_ctx(aead); 548 549 if (authsize != POLY1305_DIGEST_SIZE) 550 return -EINVAL; 551 552 ctx->authsize = authsize; 553 return chachapoly_set_sh_desc(aead); 554 } 555 556 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 557 unsigned int keylen) 558 { 559 struct caam_ctx *ctx = crypto_aead_ctx(aead); 560 unsigned int ivsize = crypto_aead_ivsize(aead); 561 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 562 563 if (keylen != CHACHA_KEY_SIZE + saltlen) { 564 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 565 return -EINVAL; 566 } 567 568 ctx->cdata.key_virt = key; 569 ctx->cdata.keylen = keylen - saltlen; 570 571 return chachapoly_set_sh_desc(aead); 572 } 573 574 static int aead_setkey(struct crypto_aead *aead, 575 const u8 *key, unsigned int keylen) 576 { 577 struct caam_ctx *ctx = crypto_aead_ctx(aead); 578 struct device *jrdev = ctx->jrdev; 579 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 580 struct crypto_authenc_keys keys; 581 int ret = 0; 582 583 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 584 goto badkey; 585 586 #ifdef DEBUG 587 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 588 keys.authkeylen + keys.enckeylen, keys.enckeylen, 589 keys.authkeylen); 590 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 591 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 592 #endif 593 594 /* 595 * If DKP is supported, use it in the shared descriptor to generate 596 * the split key. 597 */ 598 if (ctrlpriv->era >= 6) { 599 ctx->adata.keylen = keys.authkeylen; 600 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 601 OP_ALG_ALGSEL_MASK); 602 603 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 604 goto badkey; 605 606 memcpy(ctx->key, keys.authkey, keys.authkeylen); 607 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 608 keys.enckeylen); 609 dma_sync_single_for_device(jrdev, ctx->key_dma, 610 ctx->adata.keylen_pad + 611 keys.enckeylen, ctx->dir); 612 goto skip_split_key; 613 } 614 615 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 616 keys.authkeylen, CAAM_MAX_KEY_SIZE - 617 keys.enckeylen); 618 if (ret) { 619 goto badkey; 620 } 621 622 /* postpend encryption key to auth split key */ 623 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 624 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 625 keys.enckeylen, ctx->dir); 626 #ifdef DEBUG 627 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 628 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 629 ctx->adata.keylen_pad + keys.enckeylen, 1); 630 #endif 631 632 skip_split_key: 633 ctx->cdata.keylen = keys.enckeylen; 634 memzero_explicit(&keys, sizeof(keys)); 635 return aead_set_sh_desc(aead); 636 badkey: 637 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 638 memzero_explicit(&keys, sizeof(keys)); 639 return -EINVAL; 640 } 641 642 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key, 643 unsigned int keylen) 644 { 645 struct crypto_authenc_keys keys; 646 u32 flags; 647 int err; 648 649 err = crypto_authenc_extractkeys(&keys, key, keylen); 650 if (unlikely(err)) 651 goto badkey; 652 653 err = -EINVAL; 654 if (keys.enckeylen != DES3_EDE_KEY_SIZE) 655 goto badkey; 656 657 flags = crypto_aead_get_flags(aead); 658 err = __des3_verify_key(&flags, keys.enckey); 659 if (unlikely(err)) { 660 crypto_aead_set_flags(aead, flags); 661 goto out; 662 } 663 664 err = aead_setkey(aead, key, keylen); 665 666 out: 667 memzero_explicit(&keys, sizeof(keys)); 668 return err; 669 670 badkey: 671 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 672 goto out; 673 } 674 675 static int gcm_setkey(struct crypto_aead *aead, 676 const u8 *key, unsigned int keylen) 677 { 678 struct caam_ctx *ctx = crypto_aead_ctx(aead); 679 struct device *jrdev = ctx->jrdev; 680 681 #ifdef DEBUG 682 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 683 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 684 #endif 685 686 memcpy(ctx->key, key, keylen); 687 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 688 ctx->cdata.keylen = keylen; 689 690 return gcm_set_sh_desc(aead); 691 } 692 693 static int rfc4106_setkey(struct crypto_aead *aead, 694 const u8 *key, unsigned int keylen) 695 { 696 struct caam_ctx *ctx = crypto_aead_ctx(aead); 697 struct device *jrdev = ctx->jrdev; 698 699 if (keylen < 4) 700 return -EINVAL; 701 702 #ifdef DEBUG 703 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 704 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 705 #endif 706 707 memcpy(ctx->key, key, keylen); 708 709 /* 710 * The last four bytes of the key material are used as the salt value 711 * in the nonce. Update the AES key length. 712 */ 713 ctx->cdata.keylen = keylen - 4; 714 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 715 ctx->dir); 716 return rfc4106_set_sh_desc(aead); 717 } 718 719 static int rfc4543_setkey(struct crypto_aead *aead, 720 const u8 *key, unsigned int keylen) 721 { 722 struct caam_ctx *ctx = crypto_aead_ctx(aead); 723 struct device *jrdev = ctx->jrdev; 724 725 if (keylen < 4) 726 return -EINVAL; 727 728 #ifdef DEBUG 729 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 730 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 731 #endif 732 733 memcpy(ctx->key, key, keylen); 734 735 /* 736 * The last four bytes of the key material are used as the salt value 737 * in the nonce. Update the AES key length. 738 */ 739 ctx->cdata.keylen = keylen - 4; 740 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 741 ctx->dir); 742 return rfc4543_set_sh_desc(aead); 743 } 744 745 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 746 unsigned int keylen) 747 { 748 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 749 struct caam_skcipher_alg *alg = 750 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 751 skcipher); 752 struct device *jrdev = ctx->jrdev; 753 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 754 u32 *desc; 755 u32 ctx1_iv_off = 0; 756 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 757 OP_ALG_AAI_CTR_MOD128); 758 const bool is_rfc3686 = alg->caam.rfc3686; 759 760 #ifdef DEBUG 761 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 762 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 763 #endif 764 /* 765 * AES-CTR needs to load IV in CONTEXT1 reg 766 * at an offset of 128bits (16bytes) 767 * CONTEXT1[255:128] = IV 768 */ 769 if (ctr_mode) 770 ctx1_iv_off = 16; 771 772 /* 773 * RFC3686 specific: 774 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 775 * | *key = {KEY, NONCE} 776 */ 777 if (is_rfc3686) { 778 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 779 keylen -= CTR_RFC3686_NONCE_SIZE; 780 } 781 782 ctx->cdata.keylen = keylen; 783 ctx->cdata.key_virt = key; 784 ctx->cdata.key_inline = true; 785 786 /* skcipher_encrypt shared descriptor */ 787 desc = ctx->sh_desc_enc; 788 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 789 ctx1_iv_off); 790 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 791 desc_bytes(desc), ctx->dir); 792 793 /* skcipher_decrypt shared descriptor */ 794 desc = ctx->sh_desc_dec; 795 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 796 ctx1_iv_off); 797 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 798 desc_bytes(desc), ctx->dir); 799 800 return 0; 801 } 802 803 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 804 const u8 *key, unsigned int keylen) 805 { 806 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 807 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 808 809 if (keylen == DES3_EDE_KEY_SIZE && 810 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) { 811 return -EINVAL; 812 } 813 814 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) & 815 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 816 crypto_skcipher_set_flags(skcipher, 817 CRYPTO_TFM_RES_WEAK_KEY); 818 return -EINVAL; 819 } 820 821 return skcipher_setkey(skcipher, key, keylen); 822 } 823 824 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 825 unsigned int keylen) 826 { 827 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 828 struct device *jrdev = ctx->jrdev; 829 u32 *desc; 830 831 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 832 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 833 dev_err(jrdev, "key size mismatch\n"); 834 return -EINVAL; 835 } 836 837 ctx->cdata.keylen = keylen; 838 ctx->cdata.key_virt = key; 839 ctx->cdata.key_inline = true; 840 841 /* xts_skcipher_encrypt shared descriptor */ 842 desc = ctx->sh_desc_enc; 843 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 844 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 845 desc_bytes(desc), ctx->dir); 846 847 /* xts_skcipher_decrypt shared descriptor */ 848 desc = ctx->sh_desc_dec; 849 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 850 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 851 desc_bytes(desc), ctx->dir); 852 853 return 0; 854 } 855 856 /* 857 * aead_edesc - s/w-extended aead descriptor 858 * @src_nents: number of segments in input s/w scatterlist 859 * @dst_nents: number of segments in output s/w scatterlist 860 * @mapped_src_nents: number of segments in input h/w link table 861 * @mapped_dst_nents: number of segments in output h/w link table 862 * @sec4_sg_bytes: length of dma mapped sec4_sg space 863 * @sec4_sg_dma: bus physical mapped address of h/w link table 864 * @sec4_sg: pointer to h/w link table 865 * @hw_desc: the h/w job descriptor followed by any referenced link tables 866 */ 867 struct aead_edesc { 868 int src_nents; 869 int dst_nents; 870 int mapped_src_nents; 871 int mapped_dst_nents; 872 int sec4_sg_bytes; 873 dma_addr_t sec4_sg_dma; 874 struct sec4_sg_entry *sec4_sg; 875 u32 hw_desc[]; 876 }; 877 878 /* 879 * skcipher_edesc - s/w-extended skcipher descriptor 880 * @src_nents: number of segments in input s/w scatterlist 881 * @dst_nents: number of segments in output s/w scatterlist 882 * @mapped_src_nents: number of segments in input h/w link table 883 * @mapped_dst_nents: number of segments in output h/w link table 884 * @iv_dma: dma address of iv for checking continuity and link table 885 * @sec4_sg_bytes: length of dma mapped sec4_sg space 886 * @sec4_sg_dma: bus physical mapped address of h/w link table 887 * @sec4_sg: pointer to h/w link table 888 * @hw_desc: the h/w job descriptor followed by any referenced link tables 889 * and IV 890 */ 891 struct skcipher_edesc { 892 int src_nents; 893 int dst_nents; 894 int mapped_src_nents; 895 int mapped_dst_nents; 896 dma_addr_t iv_dma; 897 int sec4_sg_bytes; 898 dma_addr_t sec4_sg_dma; 899 struct sec4_sg_entry *sec4_sg; 900 u32 hw_desc[0]; 901 }; 902 903 static void caam_unmap(struct device *dev, struct scatterlist *src, 904 struct scatterlist *dst, int src_nents, 905 int dst_nents, 906 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 907 int sec4_sg_bytes) 908 { 909 if (dst != src) { 910 if (src_nents) 911 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 912 if (dst_nents) 913 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 914 } else { 915 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 916 } 917 918 if (iv_dma) 919 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 920 if (sec4_sg_bytes) 921 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 922 DMA_TO_DEVICE); 923 } 924 925 static void aead_unmap(struct device *dev, 926 struct aead_edesc *edesc, 927 struct aead_request *req) 928 { 929 caam_unmap(dev, req->src, req->dst, 930 edesc->src_nents, edesc->dst_nents, 0, 0, 931 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 932 } 933 934 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 935 struct skcipher_request *req) 936 { 937 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 938 int ivsize = crypto_skcipher_ivsize(skcipher); 939 940 caam_unmap(dev, req->src, req->dst, 941 edesc->src_nents, edesc->dst_nents, 942 edesc->iv_dma, ivsize, 943 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 944 } 945 946 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 947 void *context) 948 { 949 struct aead_request *req = context; 950 struct aead_edesc *edesc; 951 952 #ifdef DEBUG 953 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 954 #endif 955 956 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 957 958 if (err) 959 caam_jr_strstatus(jrdev, err); 960 961 aead_unmap(jrdev, edesc, req); 962 963 kfree(edesc); 964 965 aead_request_complete(req, err); 966 } 967 968 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 969 void *context) 970 { 971 struct aead_request *req = context; 972 struct aead_edesc *edesc; 973 974 #ifdef DEBUG 975 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 976 #endif 977 978 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 979 980 if (err) 981 caam_jr_strstatus(jrdev, err); 982 983 aead_unmap(jrdev, edesc, req); 984 985 /* 986 * verify hw auth check passed else return -EBADMSG 987 */ 988 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 989 err = -EBADMSG; 990 991 kfree(edesc); 992 993 aead_request_complete(req, err); 994 } 995 996 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 997 void *context) 998 { 999 struct skcipher_request *req = context; 1000 struct skcipher_edesc *edesc; 1001 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1002 int ivsize = crypto_skcipher_ivsize(skcipher); 1003 1004 #ifdef DEBUG 1005 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1006 #endif 1007 1008 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1009 1010 if (err) 1011 caam_jr_strstatus(jrdev, err); 1012 1013 #ifdef DEBUG 1014 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 1015 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 1016 edesc->src_nents > 1 ? 100 : ivsize, 1); 1017 #endif 1018 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 1019 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1020 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1021 1022 skcipher_unmap(jrdev, edesc, req); 1023 1024 /* 1025 * The crypto API expects us to set the IV (req->iv) to the last 1026 * ciphertext block. This is used e.g. by the CTS mode. 1027 */ 1028 if (ivsize) 1029 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - 1030 ivsize, ivsize, 0); 1031 1032 kfree(edesc); 1033 1034 skcipher_request_complete(req, err); 1035 } 1036 1037 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 1038 void *context) 1039 { 1040 struct skcipher_request *req = context; 1041 struct skcipher_edesc *edesc; 1042 #ifdef DEBUG 1043 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1044 int ivsize = crypto_skcipher_ivsize(skcipher); 1045 1046 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1047 #endif 1048 1049 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1050 if (err) 1051 caam_jr_strstatus(jrdev, err); 1052 1053 #ifdef DEBUG 1054 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 1055 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1056 #endif 1057 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 1058 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1059 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1060 1061 skcipher_unmap(jrdev, edesc, req); 1062 kfree(edesc); 1063 1064 skcipher_request_complete(req, err); 1065 } 1066 1067 /* 1068 * Fill in aead job descriptor 1069 */ 1070 static void init_aead_job(struct aead_request *req, 1071 struct aead_edesc *edesc, 1072 bool all_contig, bool encrypt) 1073 { 1074 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1075 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1076 int authsize = ctx->authsize; 1077 u32 *desc = edesc->hw_desc; 1078 u32 out_options, in_options; 1079 dma_addr_t dst_dma, src_dma; 1080 int len, sec4_sg_index = 0; 1081 dma_addr_t ptr; 1082 u32 *sh_desc; 1083 1084 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1085 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1086 1087 len = desc_len(sh_desc); 1088 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1089 1090 if (all_contig) { 1091 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1092 0; 1093 in_options = 0; 1094 } else { 1095 src_dma = edesc->sec4_sg_dma; 1096 sec4_sg_index += edesc->mapped_src_nents; 1097 in_options = LDST_SGF; 1098 } 1099 1100 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1101 in_options); 1102 1103 dst_dma = src_dma; 1104 out_options = in_options; 1105 1106 if (unlikely(req->src != req->dst)) { 1107 if (!edesc->mapped_dst_nents) { 1108 dst_dma = 0; 1109 } else if (edesc->mapped_dst_nents == 1) { 1110 dst_dma = sg_dma_address(req->dst); 1111 out_options = 0; 1112 } else { 1113 dst_dma = edesc->sec4_sg_dma + 1114 sec4_sg_index * 1115 sizeof(struct sec4_sg_entry); 1116 out_options = LDST_SGF; 1117 } 1118 } 1119 1120 if (encrypt) 1121 append_seq_out_ptr(desc, dst_dma, 1122 req->assoclen + req->cryptlen + authsize, 1123 out_options); 1124 else 1125 append_seq_out_ptr(desc, dst_dma, 1126 req->assoclen + req->cryptlen - authsize, 1127 out_options); 1128 } 1129 1130 static void init_gcm_job(struct aead_request *req, 1131 struct aead_edesc *edesc, 1132 bool all_contig, bool encrypt) 1133 { 1134 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1135 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1136 unsigned int ivsize = crypto_aead_ivsize(aead); 1137 u32 *desc = edesc->hw_desc; 1138 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1139 unsigned int last; 1140 1141 init_aead_job(req, edesc, all_contig, encrypt); 1142 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1143 1144 /* BUG This should not be specific to generic GCM. */ 1145 last = 0; 1146 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1147 last = FIFOLD_TYPE_LAST1; 1148 1149 /* Read GCM IV */ 1150 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1151 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1152 /* Append Salt */ 1153 if (!generic_gcm) 1154 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1155 /* Append IV */ 1156 append_data(desc, req->iv, ivsize); 1157 /* End of blank commands */ 1158 } 1159 1160 static void init_chachapoly_job(struct aead_request *req, 1161 struct aead_edesc *edesc, bool all_contig, 1162 bool encrypt) 1163 { 1164 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1165 unsigned int ivsize = crypto_aead_ivsize(aead); 1166 unsigned int assoclen = req->assoclen; 1167 u32 *desc = edesc->hw_desc; 1168 u32 ctx_iv_off = 4; 1169 1170 init_aead_job(req, edesc, all_contig, encrypt); 1171 1172 if (ivsize != CHACHAPOLY_IV_SIZE) { 1173 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1174 ctx_iv_off += 4; 1175 1176 /* 1177 * The associated data comes already with the IV but we need 1178 * to skip it when we authenticate or encrypt... 1179 */ 1180 assoclen -= ivsize; 1181 } 1182 1183 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1184 1185 /* 1186 * For IPsec load the IV further in the same register. 1187 * For RFC7539 simply load the 12 bytes nonce in a single operation 1188 */ 1189 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1190 LDST_SRCDST_BYTE_CONTEXT | 1191 ctx_iv_off << LDST_OFFSET_SHIFT); 1192 } 1193 1194 static void init_authenc_job(struct aead_request *req, 1195 struct aead_edesc *edesc, 1196 bool all_contig, bool encrypt) 1197 { 1198 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1199 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1200 struct caam_aead_alg, aead); 1201 unsigned int ivsize = crypto_aead_ivsize(aead); 1202 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1203 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1204 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1205 OP_ALG_AAI_CTR_MOD128); 1206 const bool is_rfc3686 = alg->caam.rfc3686; 1207 u32 *desc = edesc->hw_desc; 1208 u32 ivoffset = 0; 1209 1210 /* 1211 * AES-CTR needs to load IV in CONTEXT1 reg 1212 * at an offset of 128bits (16bytes) 1213 * CONTEXT1[255:128] = IV 1214 */ 1215 if (ctr_mode) 1216 ivoffset = 16; 1217 1218 /* 1219 * RFC3686 specific: 1220 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1221 */ 1222 if (is_rfc3686) 1223 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1224 1225 init_aead_job(req, edesc, all_contig, encrypt); 1226 1227 /* 1228 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1229 * having DPOVRD as destination. 1230 */ 1231 if (ctrlpriv->era < 3) 1232 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1233 else 1234 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1235 1236 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1237 append_load_as_imm(desc, req->iv, ivsize, 1238 LDST_CLASS_1_CCB | 1239 LDST_SRCDST_BYTE_CONTEXT | 1240 (ivoffset << LDST_OFFSET_SHIFT)); 1241 } 1242 1243 /* 1244 * Fill in skcipher job descriptor 1245 */ 1246 static void init_skcipher_job(struct skcipher_request *req, 1247 struct skcipher_edesc *edesc, 1248 const bool encrypt) 1249 { 1250 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1251 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1252 int ivsize = crypto_skcipher_ivsize(skcipher); 1253 u32 *desc = edesc->hw_desc; 1254 u32 *sh_desc; 1255 u32 in_options = 0, out_options = 0; 1256 dma_addr_t src_dma, dst_dma, ptr; 1257 int len, sec4_sg_index = 0; 1258 1259 #ifdef DEBUG 1260 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1261 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1262 pr_err("asked=%d, cryptlen%d\n", 1263 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1264 #endif 1265 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1266 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1267 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1268 1269 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1270 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1271 1272 len = desc_len(sh_desc); 1273 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1274 1275 if (ivsize || edesc->mapped_src_nents > 1) { 1276 src_dma = edesc->sec4_sg_dma; 1277 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1278 in_options = LDST_SGF; 1279 } else { 1280 src_dma = sg_dma_address(req->src); 1281 } 1282 1283 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1284 1285 if (likely(req->src == req->dst)) { 1286 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1287 out_options = in_options; 1288 } else if (edesc->mapped_dst_nents == 1) { 1289 dst_dma = sg_dma_address(req->dst); 1290 } else { 1291 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1292 sizeof(struct sec4_sg_entry); 1293 out_options = LDST_SGF; 1294 } 1295 1296 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options); 1297 } 1298 1299 /* 1300 * allocate and map the aead extended descriptor 1301 */ 1302 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1303 int desc_bytes, bool *all_contig_ptr, 1304 bool encrypt) 1305 { 1306 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1307 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1308 struct device *jrdev = ctx->jrdev; 1309 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1310 GFP_KERNEL : GFP_ATOMIC; 1311 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1312 struct aead_edesc *edesc; 1313 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1314 unsigned int authsize = ctx->authsize; 1315 1316 if (unlikely(req->dst != req->src)) { 1317 src_nents = sg_nents_for_len(req->src, req->assoclen + 1318 req->cryptlen); 1319 if (unlikely(src_nents < 0)) { 1320 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1321 req->assoclen + req->cryptlen); 1322 return ERR_PTR(src_nents); 1323 } 1324 1325 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1326 req->cryptlen + 1327 (encrypt ? authsize : 1328 (-authsize))); 1329 if (unlikely(dst_nents < 0)) { 1330 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1331 req->assoclen + req->cryptlen + 1332 (encrypt ? authsize : (-authsize))); 1333 return ERR_PTR(dst_nents); 1334 } 1335 } else { 1336 src_nents = sg_nents_for_len(req->src, req->assoclen + 1337 req->cryptlen + 1338 (encrypt ? authsize : 0)); 1339 if (unlikely(src_nents < 0)) { 1340 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1341 req->assoclen + req->cryptlen + 1342 (encrypt ? authsize : 0)); 1343 return ERR_PTR(src_nents); 1344 } 1345 } 1346 1347 if (likely(req->src == req->dst)) { 1348 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1349 DMA_BIDIRECTIONAL); 1350 if (unlikely(!mapped_src_nents)) { 1351 dev_err(jrdev, "unable to map source\n"); 1352 return ERR_PTR(-ENOMEM); 1353 } 1354 } else { 1355 /* Cover also the case of null (zero length) input data */ 1356 if (src_nents) { 1357 mapped_src_nents = dma_map_sg(jrdev, req->src, 1358 src_nents, DMA_TO_DEVICE); 1359 if (unlikely(!mapped_src_nents)) { 1360 dev_err(jrdev, "unable to map source\n"); 1361 return ERR_PTR(-ENOMEM); 1362 } 1363 } else { 1364 mapped_src_nents = 0; 1365 } 1366 1367 /* Cover also the case of null (zero length) output data */ 1368 if (dst_nents) { 1369 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1370 dst_nents, 1371 DMA_FROM_DEVICE); 1372 if (unlikely(!mapped_dst_nents)) { 1373 dev_err(jrdev, "unable to map destination\n"); 1374 dma_unmap_sg(jrdev, req->src, src_nents, 1375 DMA_TO_DEVICE); 1376 return ERR_PTR(-ENOMEM); 1377 } 1378 } else { 1379 mapped_dst_nents = 0; 1380 } 1381 } 1382 1383 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1384 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1385 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1386 1387 /* allocate space for base edesc and hw desc commands, link tables */ 1388 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1389 GFP_DMA | flags); 1390 if (!edesc) { 1391 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1392 0, 0, 0); 1393 return ERR_PTR(-ENOMEM); 1394 } 1395 1396 edesc->src_nents = src_nents; 1397 edesc->dst_nents = dst_nents; 1398 edesc->mapped_src_nents = mapped_src_nents; 1399 edesc->mapped_dst_nents = mapped_dst_nents; 1400 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1401 desc_bytes; 1402 *all_contig_ptr = !(mapped_src_nents > 1); 1403 1404 sec4_sg_index = 0; 1405 if (mapped_src_nents > 1) { 1406 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1407 edesc->sec4_sg + sec4_sg_index, 0); 1408 sec4_sg_index += mapped_src_nents; 1409 } 1410 if (mapped_dst_nents > 1) { 1411 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1412 edesc->sec4_sg + sec4_sg_index, 0); 1413 } 1414 1415 if (!sec4_sg_bytes) 1416 return edesc; 1417 1418 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1419 sec4_sg_bytes, DMA_TO_DEVICE); 1420 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1421 dev_err(jrdev, "unable to map S/G table\n"); 1422 aead_unmap(jrdev, edesc, req); 1423 kfree(edesc); 1424 return ERR_PTR(-ENOMEM); 1425 } 1426 1427 edesc->sec4_sg_bytes = sec4_sg_bytes; 1428 1429 return edesc; 1430 } 1431 1432 static int gcm_encrypt(struct aead_request *req) 1433 { 1434 struct aead_edesc *edesc; 1435 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1436 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1437 struct device *jrdev = ctx->jrdev; 1438 bool all_contig; 1439 u32 *desc; 1440 int ret = 0; 1441 1442 /* allocate extended descriptor */ 1443 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1444 if (IS_ERR(edesc)) 1445 return PTR_ERR(edesc); 1446 1447 /* Create and submit job descriptor */ 1448 init_gcm_job(req, edesc, all_contig, true); 1449 #ifdef DEBUG 1450 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1451 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1452 desc_bytes(edesc->hw_desc), 1); 1453 #endif 1454 1455 desc = edesc->hw_desc; 1456 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1457 if (!ret) { 1458 ret = -EINPROGRESS; 1459 } else { 1460 aead_unmap(jrdev, edesc, req); 1461 kfree(edesc); 1462 } 1463 1464 return ret; 1465 } 1466 1467 static int chachapoly_encrypt(struct aead_request *req) 1468 { 1469 struct aead_edesc *edesc; 1470 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1471 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1472 struct device *jrdev = ctx->jrdev; 1473 bool all_contig; 1474 u32 *desc; 1475 int ret; 1476 1477 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1478 true); 1479 if (IS_ERR(edesc)) 1480 return PTR_ERR(edesc); 1481 1482 desc = edesc->hw_desc; 1483 1484 init_chachapoly_job(req, edesc, all_contig, true); 1485 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1486 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1487 1); 1488 1489 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1490 if (!ret) { 1491 ret = -EINPROGRESS; 1492 } else { 1493 aead_unmap(jrdev, edesc, req); 1494 kfree(edesc); 1495 } 1496 1497 return ret; 1498 } 1499 1500 static int chachapoly_decrypt(struct aead_request *req) 1501 { 1502 struct aead_edesc *edesc; 1503 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1504 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1505 struct device *jrdev = ctx->jrdev; 1506 bool all_contig; 1507 u32 *desc; 1508 int ret; 1509 1510 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1511 false); 1512 if (IS_ERR(edesc)) 1513 return PTR_ERR(edesc); 1514 1515 desc = edesc->hw_desc; 1516 1517 init_chachapoly_job(req, edesc, all_contig, false); 1518 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1519 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1520 1); 1521 1522 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1523 if (!ret) { 1524 ret = -EINPROGRESS; 1525 } else { 1526 aead_unmap(jrdev, edesc, req); 1527 kfree(edesc); 1528 } 1529 1530 return ret; 1531 } 1532 1533 static int ipsec_gcm_encrypt(struct aead_request *req) 1534 { 1535 if (req->assoclen < 8) 1536 return -EINVAL; 1537 1538 return gcm_encrypt(req); 1539 } 1540 1541 static int aead_encrypt(struct aead_request *req) 1542 { 1543 struct aead_edesc *edesc; 1544 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1545 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1546 struct device *jrdev = ctx->jrdev; 1547 bool all_contig; 1548 u32 *desc; 1549 int ret = 0; 1550 1551 /* allocate extended descriptor */ 1552 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1553 &all_contig, true); 1554 if (IS_ERR(edesc)) 1555 return PTR_ERR(edesc); 1556 1557 /* Create and submit job descriptor */ 1558 init_authenc_job(req, edesc, all_contig, true); 1559 #ifdef DEBUG 1560 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1561 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1562 desc_bytes(edesc->hw_desc), 1); 1563 #endif 1564 1565 desc = edesc->hw_desc; 1566 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1567 if (!ret) { 1568 ret = -EINPROGRESS; 1569 } else { 1570 aead_unmap(jrdev, edesc, req); 1571 kfree(edesc); 1572 } 1573 1574 return ret; 1575 } 1576 1577 static int gcm_decrypt(struct aead_request *req) 1578 { 1579 struct aead_edesc *edesc; 1580 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1581 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1582 struct device *jrdev = ctx->jrdev; 1583 bool all_contig; 1584 u32 *desc; 1585 int ret = 0; 1586 1587 /* allocate extended descriptor */ 1588 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1589 if (IS_ERR(edesc)) 1590 return PTR_ERR(edesc); 1591 1592 /* Create and submit job descriptor*/ 1593 init_gcm_job(req, edesc, all_contig, false); 1594 #ifdef DEBUG 1595 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1596 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1597 desc_bytes(edesc->hw_desc), 1); 1598 #endif 1599 1600 desc = edesc->hw_desc; 1601 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1602 if (!ret) { 1603 ret = -EINPROGRESS; 1604 } else { 1605 aead_unmap(jrdev, edesc, req); 1606 kfree(edesc); 1607 } 1608 1609 return ret; 1610 } 1611 1612 static int ipsec_gcm_decrypt(struct aead_request *req) 1613 { 1614 if (req->assoclen < 8) 1615 return -EINVAL; 1616 1617 return gcm_decrypt(req); 1618 } 1619 1620 static int aead_decrypt(struct aead_request *req) 1621 { 1622 struct aead_edesc *edesc; 1623 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1624 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1625 struct device *jrdev = ctx->jrdev; 1626 bool all_contig; 1627 u32 *desc; 1628 int ret = 0; 1629 1630 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1631 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1632 req->assoclen + req->cryptlen, 1); 1633 1634 /* allocate extended descriptor */ 1635 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1636 &all_contig, false); 1637 if (IS_ERR(edesc)) 1638 return PTR_ERR(edesc); 1639 1640 /* Create and submit job descriptor*/ 1641 init_authenc_job(req, edesc, all_contig, false); 1642 #ifdef DEBUG 1643 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1644 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1645 desc_bytes(edesc->hw_desc), 1); 1646 #endif 1647 1648 desc = edesc->hw_desc; 1649 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1650 if (!ret) { 1651 ret = -EINPROGRESS; 1652 } else { 1653 aead_unmap(jrdev, edesc, req); 1654 kfree(edesc); 1655 } 1656 1657 return ret; 1658 } 1659 1660 /* 1661 * allocate and map the skcipher extended descriptor for skcipher 1662 */ 1663 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1664 int desc_bytes) 1665 { 1666 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1667 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1668 struct device *jrdev = ctx->jrdev; 1669 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1670 GFP_KERNEL : GFP_ATOMIC; 1671 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1672 struct skcipher_edesc *edesc; 1673 dma_addr_t iv_dma = 0; 1674 u8 *iv; 1675 int ivsize = crypto_skcipher_ivsize(skcipher); 1676 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1677 1678 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1679 if (unlikely(src_nents < 0)) { 1680 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1681 req->cryptlen); 1682 return ERR_PTR(src_nents); 1683 } 1684 1685 if (req->dst != req->src) { 1686 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1687 if (unlikely(dst_nents < 0)) { 1688 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1689 req->cryptlen); 1690 return ERR_PTR(dst_nents); 1691 } 1692 } 1693 1694 if (likely(req->src == req->dst)) { 1695 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1696 DMA_BIDIRECTIONAL); 1697 if (unlikely(!mapped_src_nents)) { 1698 dev_err(jrdev, "unable to map source\n"); 1699 return ERR_PTR(-ENOMEM); 1700 } 1701 } else { 1702 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1703 DMA_TO_DEVICE); 1704 if (unlikely(!mapped_src_nents)) { 1705 dev_err(jrdev, "unable to map source\n"); 1706 return ERR_PTR(-ENOMEM); 1707 } 1708 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1709 DMA_FROM_DEVICE); 1710 if (unlikely(!mapped_dst_nents)) { 1711 dev_err(jrdev, "unable to map destination\n"); 1712 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1713 return ERR_PTR(-ENOMEM); 1714 } 1715 } 1716 1717 if (!ivsize && mapped_src_nents == 1) 1718 sec4_sg_ents = 0; // no need for an input hw s/g table 1719 else 1720 sec4_sg_ents = mapped_src_nents + !!ivsize; 1721 dst_sg_idx = sec4_sg_ents; 1722 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1723 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1724 1725 /* 1726 * allocate space for base edesc and hw desc commands, link tables, IV 1727 */ 1728 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1729 GFP_DMA | flags); 1730 if (!edesc) { 1731 dev_err(jrdev, "could not allocate extended descriptor\n"); 1732 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1733 0, 0, 0); 1734 return ERR_PTR(-ENOMEM); 1735 } 1736 1737 edesc->src_nents = src_nents; 1738 edesc->dst_nents = dst_nents; 1739 edesc->mapped_src_nents = mapped_src_nents; 1740 edesc->mapped_dst_nents = mapped_dst_nents; 1741 edesc->sec4_sg_bytes = sec4_sg_bytes; 1742 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1743 desc_bytes); 1744 1745 /* Make sure IV is located in a DMAable area */ 1746 if (ivsize) { 1747 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes; 1748 memcpy(iv, req->iv, ivsize); 1749 1750 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE); 1751 if (dma_mapping_error(jrdev, iv_dma)) { 1752 dev_err(jrdev, "unable to map IV\n"); 1753 caam_unmap(jrdev, req->src, req->dst, src_nents, 1754 dst_nents, 0, 0, 0, 0); 1755 kfree(edesc); 1756 return ERR_PTR(-ENOMEM); 1757 } 1758 1759 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1760 } 1761 if (dst_sg_idx) 1762 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1763 !!ivsize, 0); 1764 1765 if (mapped_dst_nents > 1) { 1766 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1767 edesc->sec4_sg + dst_sg_idx, 0); 1768 } 1769 1770 if (sec4_sg_bytes) { 1771 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1772 sec4_sg_bytes, 1773 DMA_TO_DEVICE); 1774 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1775 dev_err(jrdev, "unable to map S/G table\n"); 1776 caam_unmap(jrdev, req->src, req->dst, src_nents, 1777 dst_nents, iv_dma, ivsize, 0, 0); 1778 kfree(edesc); 1779 return ERR_PTR(-ENOMEM); 1780 } 1781 } 1782 1783 edesc->iv_dma = iv_dma; 1784 1785 #ifdef DEBUG 1786 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ", 1787 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1788 sec4_sg_bytes, 1); 1789 #endif 1790 1791 return edesc; 1792 } 1793 1794 static int skcipher_encrypt(struct skcipher_request *req) 1795 { 1796 struct skcipher_edesc *edesc; 1797 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1798 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1799 struct device *jrdev = ctx->jrdev; 1800 u32 *desc; 1801 int ret = 0; 1802 1803 /* allocate extended descriptor */ 1804 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1805 if (IS_ERR(edesc)) 1806 return PTR_ERR(edesc); 1807 1808 /* Create and submit job descriptor*/ 1809 init_skcipher_job(req, edesc, true); 1810 #ifdef DEBUG 1811 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1812 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1813 desc_bytes(edesc->hw_desc), 1); 1814 #endif 1815 desc = edesc->hw_desc; 1816 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1817 1818 if (!ret) { 1819 ret = -EINPROGRESS; 1820 } else { 1821 skcipher_unmap(jrdev, edesc, req); 1822 kfree(edesc); 1823 } 1824 1825 return ret; 1826 } 1827 1828 static int skcipher_decrypt(struct skcipher_request *req) 1829 { 1830 struct skcipher_edesc *edesc; 1831 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1832 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1833 int ivsize = crypto_skcipher_ivsize(skcipher); 1834 struct device *jrdev = ctx->jrdev; 1835 u32 *desc; 1836 int ret = 0; 1837 1838 /* allocate extended descriptor */ 1839 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1840 if (IS_ERR(edesc)) 1841 return PTR_ERR(edesc); 1842 1843 /* 1844 * The crypto API expects us to set the IV (req->iv) to the last 1845 * ciphertext block. 1846 */ 1847 if (ivsize) 1848 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - 1849 ivsize, ivsize, 0); 1850 1851 /* Create and submit job descriptor*/ 1852 init_skcipher_job(req, edesc, false); 1853 desc = edesc->hw_desc; 1854 #ifdef DEBUG 1855 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1856 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1857 desc_bytes(edesc->hw_desc), 1); 1858 #endif 1859 1860 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1861 if (!ret) { 1862 ret = -EINPROGRESS; 1863 } else { 1864 skcipher_unmap(jrdev, edesc, req); 1865 kfree(edesc); 1866 } 1867 1868 return ret; 1869 } 1870 1871 static struct caam_skcipher_alg driver_algs[] = { 1872 { 1873 .skcipher = { 1874 .base = { 1875 .cra_name = "cbc(aes)", 1876 .cra_driver_name = "cbc-aes-caam", 1877 .cra_blocksize = AES_BLOCK_SIZE, 1878 }, 1879 .setkey = skcipher_setkey, 1880 .encrypt = skcipher_encrypt, 1881 .decrypt = skcipher_decrypt, 1882 .min_keysize = AES_MIN_KEY_SIZE, 1883 .max_keysize = AES_MAX_KEY_SIZE, 1884 .ivsize = AES_BLOCK_SIZE, 1885 }, 1886 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1887 }, 1888 { 1889 .skcipher = { 1890 .base = { 1891 .cra_name = "cbc(des3_ede)", 1892 .cra_driver_name = "cbc-3des-caam", 1893 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1894 }, 1895 .setkey = des_skcipher_setkey, 1896 .encrypt = skcipher_encrypt, 1897 .decrypt = skcipher_decrypt, 1898 .min_keysize = DES3_EDE_KEY_SIZE, 1899 .max_keysize = DES3_EDE_KEY_SIZE, 1900 .ivsize = DES3_EDE_BLOCK_SIZE, 1901 }, 1902 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1903 }, 1904 { 1905 .skcipher = { 1906 .base = { 1907 .cra_name = "cbc(des)", 1908 .cra_driver_name = "cbc-des-caam", 1909 .cra_blocksize = DES_BLOCK_SIZE, 1910 }, 1911 .setkey = des_skcipher_setkey, 1912 .encrypt = skcipher_encrypt, 1913 .decrypt = skcipher_decrypt, 1914 .min_keysize = DES_KEY_SIZE, 1915 .max_keysize = DES_KEY_SIZE, 1916 .ivsize = DES_BLOCK_SIZE, 1917 }, 1918 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1919 }, 1920 { 1921 .skcipher = { 1922 .base = { 1923 .cra_name = "ctr(aes)", 1924 .cra_driver_name = "ctr-aes-caam", 1925 .cra_blocksize = 1, 1926 }, 1927 .setkey = skcipher_setkey, 1928 .encrypt = skcipher_encrypt, 1929 .decrypt = skcipher_decrypt, 1930 .min_keysize = AES_MIN_KEY_SIZE, 1931 .max_keysize = AES_MAX_KEY_SIZE, 1932 .ivsize = AES_BLOCK_SIZE, 1933 .chunksize = AES_BLOCK_SIZE, 1934 }, 1935 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1936 OP_ALG_AAI_CTR_MOD128, 1937 }, 1938 { 1939 .skcipher = { 1940 .base = { 1941 .cra_name = "rfc3686(ctr(aes))", 1942 .cra_driver_name = "rfc3686-ctr-aes-caam", 1943 .cra_blocksize = 1, 1944 }, 1945 .setkey = skcipher_setkey, 1946 .encrypt = skcipher_encrypt, 1947 .decrypt = skcipher_decrypt, 1948 .min_keysize = AES_MIN_KEY_SIZE + 1949 CTR_RFC3686_NONCE_SIZE, 1950 .max_keysize = AES_MAX_KEY_SIZE + 1951 CTR_RFC3686_NONCE_SIZE, 1952 .ivsize = CTR_RFC3686_IV_SIZE, 1953 .chunksize = AES_BLOCK_SIZE, 1954 }, 1955 .caam = { 1956 .class1_alg_type = OP_ALG_ALGSEL_AES | 1957 OP_ALG_AAI_CTR_MOD128, 1958 .rfc3686 = true, 1959 }, 1960 }, 1961 { 1962 .skcipher = { 1963 .base = { 1964 .cra_name = "xts(aes)", 1965 .cra_driver_name = "xts-aes-caam", 1966 .cra_blocksize = AES_BLOCK_SIZE, 1967 }, 1968 .setkey = xts_skcipher_setkey, 1969 .encrypt = skcipher_encrypt, 1970 .decrypt = skcipher_decrypt, 1971 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1972 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1973 .ivsize = AES_BLOCK_SIZE, 1974 }, 1975 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1976 }, 1977 { 1978 .skcipher = { 1979 .base = { 1980 .cra_name = "ecb(des)", 1981 .cra_driver_name = "ecb-des-caam", 1982 .cra_blocksize = DES_BLOCK_SIZE, 1983 }, 1984 .setkey = des_skcipher_setkey, 1985 .encrypt = skcipher_encrypt, 1986 .decrypt = skcipher_decrypt, 1987 .min_keysize = DES_KEY_SIZE, 1988 .max_keysize = DES_KEY_SIZE, 1989 }, 1990 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 1991 }, 1992 { 1993 .skcipher = { 1994 .base = { 1995 .cra_name = "ecb(aes)", 1996 .cra_driver_name = "ecb-aes-caam", 1997 .cra_blocksize = AES_BLOCK_SIZE, 1998 }, 1999 .setkey = skcipher_setkey, 2000 .encrypt = skcipher_encrypt, 2001 .decrypt = skcipher_decrypt, 2002 .min_keysize = AES_MIN_KEY_SIZE, 2003 .max_keysize = AES_MAX_KEY_SIZE, 2004 }, 2005 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 2006 }, 2007 { 2008 .skcipher = { 2009 .base = { 2010 .cra_name = "ecb(des3_ede)", 2011 .cra_driver_name = "ecb-des3-caam", 2012 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2013 }, 2014 .setkey = des_skcipher_setkey, 2015 .encrypt = skcipher_encrypt, 2016 .decrypt = skcipher_decrypt, 2017 .min_keysize = DES3_EDE_KEY_SIZE, 2018 .max_keysize = DES3_EDE_KEY_SIZE, 2019 }, 2020 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 2021 }, 2022 { 2023 .skcipher = { 2024 .base = { 2025 .cra_name = "ecb(arc4)", 2026 .cra_driver_name = "ecb-arc4-caam", 2027 .cra_blocksize = ARC4_BLOCK_SIZE, 2028 }, 2029 .setkey = skcipher_setkey, 2030 .encrypt = skcipher_encrypt, 2031 .decrypt = skcipher_decrypt, 2032 .min_keysize = ARC4_MIN_KEY_SIZE, 2033 .max_keysize = ARC4_MAX_KEY_SIZE, 2034 }, 2035 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB, 2036 }, 2037 }; 2038 2039 static struct caam_aead_alg driver_aeads[] = { 2040 { 2041 .aead = { 2042 .base = { 2043 .cra_name = "rfc4106(gcm(aes))", 2044 .cra_driver_name = "rfc4106-gcm-aes-caam", 2045 .cra_blocksize = 1, 2046 }, 2047 .setkey = rfc4106_setkey, 2048 .setauthsize = rfc4106_setauthsize, 2049 .encrypt = ipsec_gcm_encrypt, 2050 .decrypt = ipsec_gcm_decrypt, 2051 .ivsize = GCM_RFC4106_IV_SIZE, 2052 .maxauthsize = AES_BLOCK_SIZE, 2053 }, 2054 .caam = { 2055 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2056 .nodkp = true, 2057 }, 2058 }, 2059 { 2060 .aead = { 2061 .base = { 2062 .cra_name = "rfc4543(gcm(aes))", 2063 .cra_driver_name = "rfc4543-gcm-aes-caam", 2064 .cra_blocksize = 1, 2065 }, 2066 .setkey = rfc4543_setkey, 2067 .setauthsize = rfc4543_setauthsize, 2068 .encrypt = ipsec_gcm_encrypt, 2069 .decrypt = ipsec_gcm_decrypt, 2070 .ivsize = GCM_RFC4543_IV_SIZE, 2071 .maxauthsize = AES_BLOCK_SIZE, 2072 }, 2073 .caam = { 2074 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2075 .nodkp = true, 2076 }, 2077 }, 2078 /* Galois Counter Mode */ 2079 { 2080 .aead = { 2081 .base = { 2082 .cra_name = "gcm(aes)", 2083 .cra_driver_name = "gcm-aes-caam", 2084 .cra_blocksize = 1, 2085 }, 2086 .setkey = gcm_setkey, 2087 .setauthsize = gcm_setauthsize, 2088 .encrypt = gcm_encrypt, 2089 .decrypt = gcm_decrypt, 2090 .ivsize = GCM_AES_IV_SIZE, 2091 .maxauthsize = AES_BLOCK_SIZE, 2092 }, 2093 .caam = { 2094 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2095 .nodkp = true, 2096 }, 2097 }, 2098 /* single-pass ipsec_esp descriptor */ 2099 { 2100 .aead = { 2101 .base = { 2102 .cra_name = "authenc(hmac(md5)," 2103 "ecb(cipher_null))", 2104 .cra_driver_name = "authenc-hmac-md5-" 2105 "ecb-cipher_null-caam", 2106 .cra_blocksize = NULL_BLOCK_SIZE, 2107 }, 2108 .setkey = aead_setkey, 2109 .setauthsize = aead_setauthsize, 2110 .encrypt = aead_encrypt, 2111 .decrypt = aead_decrypt, 2112 .ivsize = NULL_IV_SIZE, 2113 .maxauthsize = MD5_DIGEST_SIZE, 2114 }, 2115 .caam = { 2116 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2117 OP_ALG_AAI_HMAC_PRECOMP, 2118 }, 2119 }, 2120 { 2121 .aead = { 2122 .base = { 2123 .cra_name = "authenc(hmac(sha1)," 2124 "ecb(cipher_null))", 2125 .cra_driver_name = "authenc-hmac-sha1-" 2126 "ecb-cipher_null-caam", 2127 .cra_blocksize = NULL_BLOCK_SIZE, 2128 }, 2129 .setkey = aead_setkey, 2130 .setauthsize = aead_setauthsize, 2131 .encrypt = aead_encrypt, 2132 .decrypt = aead_decrypt, 2133 .ivsize = NULL_IV_SIZE, 2134 .maxauthsize = SHA1_DIGEST_SIZE, 2135 }, 2136 .caam = { 2137 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2138 OP_ALG_AAI_HMAC_PRECOMP, 2139 }, 2140 }, 2141 { 2142 .aead = { 2143 .base = { 2144 .cra_name = "authenc(hmac(sha224)," 2145 "ecb(cipher_null))", 2146 .cra_driver_name = "authenc-hmac-sha224-" 2147 "ecb-cipher_null-caam", 2148 .cra_blocksize = NULL_BLOCK_SIZE, 2149 }, 2150 .setkey = aead_setkey, 2151 .setauthsize = aead_setauthsize, 2152 .encrypt = aead_encrypt, 2153 .decrypt = aead_decrypt, 2154 .ivsize = NULL_IV_SIZE, 2155 .maxauthsize = SHA224_DIGEST_SIZE, 2156 }, 2157 .caam = { 2158 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2159 OP_ALG_AAI_HMAC_PRECOMP, 2160 }, 2161 }, 2162 { 2163 .aead = { 2164 .base = { 2165 .cra_name = "authenc(hmac(sha256)," 2166 "ecb(cipher_null))", 2167 .cra_driver_name = "authenc-hmac-sha256-" 2168 "ecb-cipher_null-caam", 2169 .cra_blocksize = NULL_BLOCK_SIZE, 2170 }, 2171 .setkey = aead_setkey, 2172 .setauthsize = aead_setauthsize, 2173 .encrypt = aead_encrypt, 2174 .decrypt = aead_decrypt, 2175 .ivsize = NULL_IV_SIZE, 2176 .maxauthsize = SHA256_DIGEST_SIZE, 2177 }, 2178 .caam = { 2179 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2180 OP_ALG_AAI_HMAC_PRECOMP, 2181 }, 2182 }, 2183 { 2184 .aead = { 2185 .base = { 2186 .cra_name = "authenc(hmac(sha384)," 2187 "ecb(cipher_null))", 2188 .cra_driver_name = "authenc-hmac-sha384-" 2189 "ecb-cipher_null-caam", 2190 .cra_blocksize = NULL_BLOCK_SIZE, 2191 }, 2192 .setkey = aead_setkey, 2193 .setauthsize = aead_setauthsize, 2194 .encrypt = aead_encrypt, 2195 .decrypt = aead_decrypt, 2196 .ivsize = NULL_IV_SIZE, 2197 .maxauthsize = SHA384_DIGEST_SIZE, 2198 }, 2199 .caam = { 2200 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2201 OP_ALG_AAI_HMAC_PRECOMP, 2202 }, 2203 }, 2204 { 2205 .aead = { 2206 .base = { 2207 .cra_name = "authenc(hmac(sha512)," 2208 "ecb(cipher_null))", 2209 .cra_driver_name = "authenc-hmac-sha512-" 2210 "ecb-cipher_null-caam", 2211 .cra_blocksize = NULL_BLOCK_SIZE, 2212 }, 2213 .setkey = aead_setkey, 2214 .setauthsize = aead_setauthsize, 2215 .encrypt = aead_encrypt, 2216 .decrypt = aead_decrypt, 2217 .ivsize = NULL_IV_SIZE, 2218 .maxauthsize = SHA512_DIGEST_SIZE, 2219 }, 2220 .caam = { 2221 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2222 OP_ALG_AAI_HMAC_PRECOMP, 2223 }, 2224 }, 2225 { 2226 .aead = { 2227 .base = { 2228 .cra_name = "authenc(hmac(md5),cbc(aes))", 2229 .cra_driver_name = "authenc-hmac-md5-" 2230 "cbc-aes-caam", 2231 .cra_blocksize = AES_BLOCK_SIZE, 2232 }, 2233 .setkey = aead_setkey, 2234 .setauthsize = aead_setauthsize, 2235 .encrypt = aead_encrypt, 2236 .decrypt = aead_decrypt, 2237 .ivsize = AES_BLOCK_SIZE, 2238 .maxauthsize = MD5_DIGEST_SIZE, 2239 }, 2240 .caam = { 2241 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2242 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2243 OP_ALG_AAI_HMAC_PRECOMP, 2244 }, 2245 }, 2246 { 2247 .aead = { 2248 .base = { 2249 .cra_name = "echainiv(authenc(hmac(md5)," 2250 "cbc(aes)))", 2251 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2252 "cbc-aes-caam", 2253 .cra_blocksize = AES_BLOCK_SIZE, 2254 }, 2255 .setkey = aead_setkey, 2256 .setauthsize = aead_setauthsize, 2257 .encrypt = aead_encrypt, 2258 .decrypt = aead_decrypt, 2259 .ivsize = AES_BLOCK_SIZE, 2260 .maxauthsize = MD5_DIGEST_SIZE, 2261 }, 2262 .caam = { 2263 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2264 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2265 OP_ALG_AAI_HMAC_PRECOMP, 2266 .geniv = true, 2267 }, 2268 }, 2269 { 2270 .aead = { 2271 .base = { 2272 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2273 .cra_driver_name = "authenc-hmac-sha1-" 2274 "cbc-aes-caam", 2275 .cra_blocksize = AES_BLOCK_SIZE, 2276 }, 2277 .setkey = aead_setkey, 2278 .setauthsize = aead_setauthsize, 2279 .encrypt = aead_encrypt, 2280 .decrypt = aead_decrypt, 2281 .ivsize = AES_BLOCK_SIZE, 2282 .maxauthsize = SHA1_DIGEST_SIZE, 2283 }, 2284 .caam = { 2285 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2286 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2287 OP_ALG_AAI_HMAC_PRECOMP, 2288 }, 2289 }, 2290 { 2291 .aead = { 2292 .base = { 2293 .cra_name = "echainiv(authenc(hmac(sha1)," 2294 "cbc(aes)))", 2295 .cra_driver_name = "echainiv-authenc-" 2296 "hmac-sha1-cbc-aes-caam", 2297 .cra_blocksize = AES_BLOCK_SIZE, 2298 }, 2299 .setkey = aead_setkey, 2300 .setauthsize = aead_setauthsize, 2301 .encrypt = aead_encrypt, 2302 .decrypt = aead_decrypt, 2303 .ivsize = AES_BLOCK_SIZE, 2304 .maxauthsize = SHA1_DIGEST_SIZE, 2305 }, 2306 .caam = { 2307 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2308 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2309 OP_ALG_AAI_HMAC_PRECOMP, 2310 .geniv = true, 2311 }, 2312 }, 2313 { 2314 .aead = { 2315 .base = { 2316 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2317 .cra_driver_name = "authenc-hmac-sha224-" 2318 "cbc-aes-caam", 2319 .cra_blocksize = AES_BLOCK_SIZE, 2320 }, 2321 .setkey = aead_setkey, 2322 .setauthsize = aead_setauthsize, 2323 .encrypt = aead_encrypt, 2324 .decrypt = aead_decrypt, 2325 .ivsize = AES_BLOCK_SIZE, 2326 .maxauthsize = SHA224_DIGEST_SIZE, 2327 }, 2328 .caam = { 2329 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2330 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2331 OP_ALG_AAI_HMAC_PRECOMP, 2332 }, 2333 }, 2334 { 2335 .aead = { 2336 .base = { 2337 .cra_name = "echainiv(authenc(hmac(sha224)," 2338 "cbc(aes)))", 2339 .cra_driver_name = "echainiv-authenc-" 2340 "hmac-sha224-cbc-aes-caam", 2341 .cra_blocksize = AES_BLOCK_SIZE, 2342 }, 2343 .setkey = aead_setkey, 2344 .setauthsize = aead_setauthsize, 2345 .encrypt = aead_encrypt, 2346 .decrypt = aead_decrypt, 2347 .ivsize = AES_BLOCK_SIZE, 2348 .maxauthsize = SHA224_DIGEST_SIZE, 2349 }, 2350 .caam = { 2351 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2352 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2353 OP_ALG_AAI_HMAC_PRECOMP, 2354 .geniv = true, 2355 }, 2356 }, 2357 { 2358 .aead = { 2359 .base = { 2360 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2361 .cra_driver_name = "authenc-hmac-sha256-" 2362 "cbc-aes-caam", 2363 .cra_blocksize = AES_BLOCK_SIZE, 2364 }, 2365 .setkey = aead_setkey, 2366 .setauthsize = aead_setauthsize, 2367 .encrypt = aead_encrypt, 2368 .decrypt = aead_decrypt, 2369 .ivsize = AES_BLOCK_SIZE, 2370 .maxauthsize = SHA256_DIGEST_SIZE, 2371 }, 2372 .caam = { 2373 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2374 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2375 OP_ALG_AAI_HMAC_PRECOMP, 2376 }, 2377 }, 2378 { 2379 .aead = { 2380 .base = { 2381 .cra_name = "echainiv(authenc(hmac(sha256)," 2382 "cbc(aes)))", 2383 .cra_driver_name = "echainiv-authenc-" 2384 "hmac-sha256-cbc-aes-caam", 2385 .cra_blocksize = AES_BLOCK_SIZE, 2386 }, 2387 .setkey = aead_setkey, 2388 .setauthsize = aead_setauthsize, 2389 .encrypt = aead_encrypt, 2390 .decrypt = aead_decrypt, 2391 .ivsize = AES_BLOCK_SIZE, 2392 .maxauthsize = SHA256_DIGEST_SIZE, 2393 }, 2394 .caam = { 2395 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2396 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2397 OP_ALG_AAI_HMAC_PRECOMP, 2398 .geniv = true, 2399 }, 2400 }, 2401 { 2402 .aead = { 2403 .base = { 2404 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2405 .cra_driver_name = "authenc-hmac-sha384-" 2406 "cbc-aes-caam", 2407 .cra_blocksize = AES_BLOCK_SIZE, 2408 }, 2409 .setkey = aead_setkey, 2410 .setauthsize = aead_setauthsize, 2411 .encrypt = aead_encrypt, 2412 .decrypt = aead_decrypt, 2413 .ivsize = AES_BLOCK_SIZE, 2414 .maxauthsize = SHA384_DIGEST_SIZE, 2415 }, 2416 .caam = { 2417 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2418 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2419 OP_ALG_AAI_HMAC_PRECOMP, 2420 }, 2421 }, 2422 { 2423 .aead = { 2424 .base = { 2425 .cra_name = "echainiv(authenc(hmac(sha384)," 2426 "cbc(aes)))", 2427 .cra_driver_name = "echainiv-authenc-" 2428 "hmac-sha384-cbc-aes-caam", 2429 .cra_blocksize = AES_BLOCK_SIZE, 2430 }, 2431 .setkey = aead_setkey, 2432 .setauthsize = aead_setauthsize, 2433 .encrypt = aead_encrypt, 2434 .decrypt = aead_decrypt, 2435 .ivsize = AES_BLOCK_SIZE, 2436 .maxauthsize = SHA384_DIGEST_SIZE, 2437 }, 2438 .caam = { 2439 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2440 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2441 OP_ALG_AAI_HMAC_PRECOMP, 2442 .geniv = true, 2443 }, 2444 }, 2445 { 2446 .aead = { 2447 .base = { 2448 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2449 .cra_driver_name = "authenc-hmac-sha512-" 2450 "cbc-aes-caam", 2451 .cra_blocksize = AES_BLOCK_SIZE, 2452 }, 2453 .setkey = aead_setkey, 2454 .setauthsize = aead_setauthsize, 2455 .encrypt = aead_encrypt, 2456 .decrypt = aead_decrypt, 2457 .ivsize = AES_BLOCK_SIZE, 2458 .maxauthsize = SHA512_DIGEST_SIZE, 2459 }, 2460 .caam = { 2461 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2462 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2463 OP_ALG_AAI_HMAC_PRECOMP, 2464 }, 2465 }, 2466 { 2467 .aead = { 2468 .base = { 2469 .cra_name = "echainiv(authenc(hmac(sha512)," 2470 "cbc(aes)))", 2471 .cra_driver_name = "echainiv-authenc-" 2472 "hmac-sha512-cbc-aes-caam", 2473 .cra_blocksize = AES_BLOCK_SIZE, 2474 }, 2475 .setkey = aead_setkey, 2476 .setauthsize = aead_setauthsize, 2477 .encrypt = aead_encrypt, 2478 .decrypt = aead_decrypt, 2479 .ivsize = AES_BLOCK_SIZE, 2480 .maxauthsize = SHA512_DIGEST_SIZE, 2481 }, 2482 .caam = { 2483 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2484 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2485 OP_ALG_AAI_HMAC_PRECOMP, 2486 .geniv = true, 2487 }, 2488 }, 2489 { 2490 .aead = { 2491 .base = { 2492 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2493 .cra_driver_name = "authenc-hmac-md5-" 2494 "cbc-des3_ede-caam", 2495 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2496 }, 2497 .setkey = des3_aead_setkey, 2498 .setauthsize = aead_setauthsize, 2499 .encrypt = aead_encrypt, 2500 .decrypt = aead_decrypt, 2501 .ivsize = DES3_EDE_BLOCK_SIZE, 2502 .maxauthsize = MD5_DIGEST_SIZE, 2503 }, 2504 .caam = { 2505 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2506 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2507 OP_ALG_AAI_HMAC_PRECOMP, 2508 } 2509 }, 2510 { 2511 .aead = { 2512 .base = { 2513 .cra_name = "echainiv(authenc(hmac(md5)," 2514 "cbc(des3_ede)))", 2515 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2516 "cbc-des3_ede-caam", 2517 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2518 }, 2519 .setkey = des3_aead_setkey, 2520 .setauthsize = aead_setauthsize, 2521 .encrypt = aead_encrypt, 2522 .decrypt = aead_decrypt, 2523 .ivsize = DES3_EDE_BLOCK_SIZE, 2524 .maxauthsize = MD5_DIGEST_SIZE, 2525 }, 2526 .caam = { 2527 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2528 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2529 OP_ALG_AAI_HMAC_PRECOMP, 2530 .geniv = true, 2531 } 2532 }, 2533 { 2534 .aead = { 2535 .base = { 2536 .cra_name = "authenc(hmac(sha1)," 2537 "cbc(des3_ede))", 2538 .cra_driver_name = "authenc-hmac-sha1-" 2539 "cbc-des3_ede-caam", 2540 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2541 }, 2542 .setkey = des3_aead_setkey, 2543 .setauthsize = aead_setauthsize, 2544 .encrypt = aead_encrypt, 2545 .decrypt = aead_decrypt, 2546 .ivsize = DES3_EDE_BLOCK_SIZE, 2547 .maxauthsize = SHA1_DIGEST_SIZE, 2548 }, 2549 .caam = { 2550 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2551 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2552 OP_ALG_AAI_HMAC_PRECOMP, 2553 }, 2554 }, 2555 { 2556 .aead = { 2557 .base = { 2558 .cra_name = "echainiv(authenc(hmac(sha1)," 2559 "cbc(des3_ede)))", 2560 .cra_driver_name = "echainiv-authenc-" 2561 "hmac-sha1-" 2562 "cbc-des3_ede-caam", 2563 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2564 }, 2565 .setkey = des3_aead_setkey, 2566 .setauthsize = aead_setauthsize, 2567 .encrypt = aead_encrypt, 2568 .decrypt = aead_decrypt, 2569 .ivsize = DES3_EDE_BLOCK_SIZE, 2570 .maxauthsize = SHA1_DIGEST_SIZE, 2571 }, 2572 .caam = { 2573 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2574 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2575 OP_ALG_AAI_HMAC_PRECOMP, 2576 .geniv = true, 2577 }, 2578 }, 2579 { 2580 .aead = { 2581 .base = { 2582 .cra_name = "authenc(hmac(sha224)," 2583 "cbc(des3_ede))", 2584 .cra_driver_name = "authenc-hmac-sha224-" 2585 "cbc-des3_ede-caam", 2586 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2587 }, 2588 .setkey = des3_aead_setkey, 2589 .setauthsize = aead_setauthsize, 2590 .encrypt = aead_encrypt, 2591 .decrypt = aead_decrypt, 2592 .ivsize = DES3_EDE_BLOCK_SIZE, 2593 .maxauthsize = SHA224_DIGEST_SIZE, 2594 }, 2595 .caam = { 2596 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2597 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2598 OP_ALG_AAI_HMAC_PRECOMP, 2599 }, 2600 }, 2601 { 2602 .aead = { 2603 .base = { 2604 .cra_name = "echainiv(authenc(hmac(sha224)," 2605 "cbc(des3_ede)))", 2606 .cra_driver_name = "echainiv-authenc-" 2607 "hmac-sha224-" 2608 "cbc-des3_ede-caam", 2609 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2610 }, 2611 .setkey = des3_aead_setkey, 2612 .setauthsize = aead_setauthsize, 2613 .encrypt = aead_encrypt, 2614 .decrypt = aead_decrypt, 2615 .ivsize = DES3_EDE_BLOCK_SIZE, 2616 .maxauthsize = SHA224_DIGEST_SIZE, 2617 }, 2618 .caam = { 2619 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2620 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2621 OP_ALG_AAI_HMAC_PRECOMP, 2622 .geniv = true, 2623 }, 2624 }, 2625 { 2626 .aead = { 2627 .base = { 2628 .cra_name = "authenc(hmac(sha256)," 2629 "cbc(des3_ede))", 2630 .cra_driver_name = "authenc-hmac-sha256-" 2631 "cbc-des3_ede-caam", 2632 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2633 }, 2634 .setkey = des3_aead_setkey, 2635 .setauthsize = aead_setauthsize, 2636 .encrypt = aead_encrypt, 2637 .decrypt = aead_decrypt, 2638 .ivsize = DES3_EDE_BLOCK_SIZE, 2639 .maxauthsize = SHA256_DIGEST_SIZE, 2640 }, 2641 .caam = { 2642 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2643 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2644 OP_ALG_AAI_HMAC_PRECOMP, 2645 }, 2646 }, 2647 { 2648 .aead = { 2649 .base = { 2650 .cra_name = "echainiv(authenc(hmac(sha256)," 2651 "cbc(des3_ede)))", 2652 .cra_driver_name = "echainiv-authenc-" 2653 "hmac-sha256-" 2654 "cbc-des3_ede-caam", 2655 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2656 }, 2657 .setkey = des3_aead_setkey, 2658 .setauthsize = aead_setauthsize, 2659 .encrypt = aead_encrypt, 2660 .decrypt = aead_decrypt, 2661 .ivsize = DES3_EDE_BLOCK_SIZE, 2662 .maxauthsize = SHA256_DIGEST_SIZE, 2663 }, 2664 .caam = { 2665 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2666 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2667 OP_ALG_AAI_HMAC_PRECOMP, 2668 .geniv = true, 2669 }, 2670 }, 2671 { 2672 .aead = { 2673 .base = { 2674 .cra_name = "authenc(hmac(sha384)," 2675 "cbc(des3_ede))", 2676 .cra_driver_name = "authenc-hmac-sha384-" 2677 "cbc-des3_ede-caam", 2678 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2679 }, 2680 .setkey = des3_aead_setkey, 2681 .setauthsize = aead_setauthsize, 2682 .encrypt = aead_encrypt, 2683 .decrypt = aead_decrypt, 2684 .ivsize = DES3_EDE_BLOCK_SIZE, 2685 .maxauthsize = SHA384_DIGEST_SIZE, 2686 }, 2687 .caam = { 2688 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2689 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2690 OP_ALG_AAI_HMAC_PRECOMP, 2691 }, 2692 }, 2693 { 2694 .aead = { 2695 .base = { 2696 .cra_name = "echainiv(authenc(hmac(sha384)," 2697 "cbc(des3_ede)))", 2698 .cra_driver_name = "echainiv-authenc-" 2699 "hmac-sha384-" 2700 "cbc-des3_ede-caam", 2701 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2702 }, 2703 .setkey = des3_aead_setkey, 2704 .setauthsize = aead_setauthsize, 2705 .encrypt = aead_encrypt, 2706 .decrypt = aead_decrypt, 2707 .ivsize = DES3_EDE_BLOCK_SIZE, 2708 .maxauthsize = SHA384_DIGEST_SIZE, 2709 }, 2710 .caam = { 2711 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2712 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2713 OP_ALG_AAI_HMAC_PRECOMP, 2714 .geniv = true, 2715 }, 2716 }, 2717 { 2718 .aead = { 2719 .base = { 2720 .cra_name = "authenc(hmac(sha512)," 2721 "cbc(des3_ede))", 2722 .cra_driver_name = "authenc-hmac-sha512-" 2723 "cbc-des3_ede-caam", 2724 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2725 }, 2726 .setkey = des3_aead_setkey, 2727 .setauthsize = aead_setauthsize, 2728 .encrypt = aead_encrypt, 2729 .decrypt = aead_decrypt, 2730 .ivsize = DES3_EDE_BLOCK_SIZE, 2731 .maxauthsize = SHA512_DIGEST_SIZE, 2732 }, 2733 .caam = { 2734 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2735 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2736 OP_ALG_AAI_HMAC_PRECOMP, 2737 }, 2738 }, 2739 { 2740 .aead = { 2741 .base = { 2742 .cra_name = "echainiv(authenc(hmac(sha512)," 2743 "cbc(des3_ede)))", 2744 .cra_driver_name = "echainiv-authenc-" 2745 "hmac-sha512-" 2746 "cbc-des3_ede-caam", 2747 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2748 }, 2749 .setkey = des3_aead_setkey, 2750 .setauthsize = aead_setauthsize, 2751 .encrypt = aead_encrypt, 2752 .decrypt = aead_decrypt, 2753 .ivsize = DES3_EDE_BLOCK_SIZE, 2754 .maxauthsize = SHA512_DIGEST_SIZE, 2755 }, 2756 .caam = { 2757 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2758 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2759 OP_ALG_AAI_HMAC_PRECOMP, 2760 .geniv = true, 2761 }, 2762 }, 2763 { 2764 .aead = { 2765 .base = { 2766 .cra_name = "authenc(hmac(md5),cbc(des))", 2767 .cra_driver_name = "authenc-hmac-md5-" 2768 "cbc-des-caam", 2769 .cra_blocksize = DES_BLOCK_SIZE, 2770 }, 2771 .setkey = aead_setkey, 2772 .setauthsize = aead_setauthsize, 2773 .encrypt = aead_encrypt, 2774 .decrypt = aead_decrypt, 2775 .ivsize = DES_BLOCK_SIZE, 2776 .maxauthsize = MD5_DIGEST_SIZE, 2777 }, 2778 .caam = { 2779 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2780 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2781 OP_ALG_AAI_HMAC_PRECOMP, 2782 }, 2783 }, 2784 { 2785 .aead = { 2786 .base = { 2787 .cra_name = "echainiv(authenc(hmac(md5)," 2788 "cbc(des)))", 2789 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2790 "cbc-des-caam", 2791 .cra_blocksize = DES_BLOCK_SIZE, 2792 }, 2793 .setkey = aead_setkey, 2794 .setauthsize = aead_setauthsize, 2795 .encrypt = aead_encrypt, 2796 .decrypt = aead_decrypt, 2797 .ivsize = DES_BLOCK_SIZE, 2798 .maxauthsize = MD5_DIGEST_SIZE, 2799 }, 2800 .caam = { 2801 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2802 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2803 OP_ALG_AAI_HMAC_PRECOMP, 2804 .geniv = true, 2805 }, 2806 }, 2807 { 2808 .aead = { 2809 .base = { 2810 .cra_name = "authenc(hmac(sha1),cbc(des))", 2811 .cra_driver_name = "authenc-hmac-sha1-" 2812 "cbc-des-caam", 2813 .cra_blocksize = DES_BLOCK_SIZE, 2814 }, 2815 .setkey = aead_setkey, 2816 .setauthsize = aead_setauthsize, 2817 .encrypt = aead_encrypt, 2818 .decrypt = aead_decrypt, 2819 .ivsize = DES_BLOCK_SIZE, 2820 .maxauthsize = SHA1_DIGEST_SIZE, 2821 }, 2822 .caam = { 2823 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2824 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2825 OP_ALG_AAI_HMAC_PRECOMP, 2826 }, 2827 }, 2828 { 2829 .aead = { 2830 .base = { 2831 .cra_name = "echainiv(authenc(hmac(sha1)," 2832 "cbc(des)))", 2833 .cra_driver_name = "echainiv-authenc-" 2834 "hmac-sha1-cbc-des-caam", 2835 .cra_blocksize = DES_BLOCK_SIZE, 2836 }, 2837 .setkey = aead_setkey, 2838 .setauthsize = aead_setauthsize, 2839 .encrypt = aead_encrypt, 2840 .decrypt = aead_decrypt, 2841 .ivsize = DES_BLOCK_SIZE, 2842 .maxauthsize = SHA1_DIGEST_SIZE, 2843 }, 2844 .caam = { 2845 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2846 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2847 OP_ALG_AAI_HMAC_PRECOMP, 2848 .geniv = true, 2849 }, 2850 }, 2851 { 2852 .aead = { 2853 .base = { 2854 .cra_name = "authenc(hmac(sha224),cbc(des))", 2855 .cra_driver_name = "authenc-hmac-sha224-" 2856 "cbc-des-caam", 2857 .cra_blocksize = DES_BLOCK_SIZE, 2858 }, 2859 .setkey = aead_setkey, 2860 .setauthsize = aead_setauthsize, 2861 .encrypt = aead_encrypt, 2862 .decrypt = aead_decrypt, 2863 .ivsize = DES_BLOCK_SIZE, 2864 .maxauthsize = SHA224_DIGEST_SIZE, 2865 }, 2866 .caam = { 2867 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2868 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2869 OP_ALG_AAI_HMAC_PRECOMP, 2870 }, 2871 }, 2872 { 2873 .aead = { 2874 .base = { 2875 .cra_name = "echainiv(authenc(hmac(sha224)," 2876 "cbc(des)))", 2877 .cra_driver_name = "echainiv-authenc-" 2878 "hmac-sha224-cbc-des-caam", 2879 .cra_blocksize = DES_BLOCK_SIZE, 2880 }, 2881 .setkey = aead_setkey, 2882 .setauthsize = aead_setauthsize, 2883 .encrypt = aead_encrypt, 2884 .decrypt = aead_decrypt, 2885 .ivsize = DES_BLOCK_SIZE, 2886 .maxauthsize = SHA224_DIGEST_SIZE, 2887 }, 2888 .caam = { 2889 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2890 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2891 OP_ALG_AAI_HMAC_PRECOMP, 2892 .geniv = true, 2893 }, 2894 }, 2895 { 2896 .aead = { 2897 .base = { 2898 .cra_name = "authenc(hmac(sha256),cbc(des))", 2899 .cra_driver_name = "authenc-hmac-sha256-" 2900 "cbc-des-caam", 2901 .cra_blocksize = DES_BLOCK_SIZE, 2902 }, 2903 .setkey = aead_setkey, 2904 .setauthsize = aead_setauthsize, 2905 .encrypt = aead_encrypt, 2906 .decrypt = aead_decrypt, 2907 .ivsize = DES_BLOCK_SIZE, 2908 .maxauthsize = SHA256_DIGEST_SIZE, 2909 }, 2910 .caam = { 2911 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2912 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2913 OP_ALG_AAI_HMAC_PRECOMP, 2914 }, 2915 }, 2916 { 2917 .aead = { 2918 .base = { 2919 .cra_name = "echainiv(authenc(hmac(sha256)," 2920 "cbc(des)))", 2921 .cra_driver_name = "echainiv-authenc-" 2922 "hmac-sha256-cbc-des-caam", 2923 .cra_blocksize = DES_BLOCK_SIZE, 2924 }, 2925 .setkey = aead_setkey, 2926 .setauthsize = aead_setauthsize, 2927 .encrypt = aead_encrypt, 2928 .decrypt = aead_decrypt, 2929 .ivsize = DES_BLOCK_SIZE, 2930 .maxauthsize = SHA256_DIGEST_SIZE, 2931 }, 2932 .caam = { 2933 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2934 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2935 OP_ALG_AAI_HMAC_PRECOMP, 2936 .geniv = true, 2937 }, 2938 }, 2939 { 2940 .aead = { 2941 .base = { 2942 .cra_name = "authenc(hmac(sha384),cbc(des))", 2943 .cra_driver_name = "authenc-hmac-sha384-" 2944 "cbc-des-caam", 2945 .cra_blocksize = DES_BLOCK_SIZE, 2946 }, 2947 .setkey = aead_setkey, 2948 .setauthsize = aead_setauthsize, 2949 .encrypt = aead_encrypt, 2950 .decrypt = aead_decrypt, 2951 .ivsize = DES_BLOCK_SIZE, 2952 .maxauthsize = SHA384_DIGEST_SIZE, 2953 }, 2954 .caam = { 2955 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2956 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2957 OP_ALG_AAI_HMAC_PRECOMP, 2958 }, 2959 }, 2960 { 2961 .aead = { 2962 .base = { 2963 .cra_name = "echainiv(authenc(hmac(sha384)," 2964 "cbc(des)))", 2965 .cra_driver_name = "echainiv-authenc-" 2966 "hmac-sha384-cbc-des-caam", 2967 .cra_blocksize = DES_BLOCK_SIZE, 2968 }, 2969 .setkey = aead_setkey, 2970 .setauthsize = aead_setauthsize, 2971 .encrypt = aead_encrypt, 2972 .decrypt = aead_decrypt, 2973 .ivsize = DES_BLOCK_SIZE, 2974 .maxauthsize = SHA384_DIGEST_SIZE, 2975 }, 2976 .caam = { 2977 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2978 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2979 OP_ALG_AAI_HMAC_PRECOMP, 2980 .geniv = true, 2981 }, 2982 }, 2983 { 2984 .aead = { 2985 .base = { 2986 .cra_name = "authenc(hmac(sha512),cbc(des))", 2987 .cra_driver_name = "authenc-hmac-sha512-" 2988 "cbc-des-caam", 2989 .cra_blocksize = DES_BLOCK_SIZE, 2990 }, 2991 .setkey = aead_setkey, 2992 .setauthsize = aead_setauthsize, 2993 .encrypt = aead_encrypt, 2994 .decrypt = aead_decrypt, 2995 .ivsize = DES_BLOCK_SIZE, 2996 .maxauthsize = SHA512_DIGEST_SIZE, 2997 }, 2998 .caam = { 2999 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3000 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3001 OP_ALG_AAI_HMAC_PRECOMP, 3002 }, 3003 }, 3004 { 3005 .aead = { 3006 .base = { 3007 .cra_name = "echainiv(authenc(hmac(sha512)," 3008 "cbc(des)))", 3009 .cra_driver_name = "echainiv-authenc-" 3010 "hmac-sha512-cbc-des-caam", 3011 .cra_blocksize = DES_BLOCK_SIZE, 3012 }, 3013 .setkey = aead_setkey, 3014 .setauthsize = aead_setauthsize, 3015 .encrypt = aead_encrypt, 3016 .decrypt = aead_decrypt, 3017 .ivsize = DES_BLOCK_SIZE, 3018 .maxauthsize = SHA512_DIGEST_SIZE, 3019 }, 3020 .caam = { 3021 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 3022 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3023 OP_ALG_AAI_HMAC_PRECOMP, 3024 .geniv = true, 3025 }, 3026 }, 3027 { 3028 .aead = { 3029 .base = { 3030 .cra_name = "authenc(hmac(md5)," 3031 "rfc3686(ctr(aes)))", 3032 .cra_driver_name = "authenc-hmac-md5-" 3033 "rfc3686-ctr-aes-caam", 3034 .cra_blocksize = 1, 3035 }, 3036 .setkey = aead_setkey, 3037 .setauthsize = aead_setauthsize, 3038 .encrypt = aead_encrypt, 3039 .decrypt = aead_decrypt, 3040 .ivsize = CTR_RFC3686_IV_SIZE, 3041 .maxauthsize = MD5_DIGEST_SIZE, 3042 }, 3043 .caam = { 3044 .class1_alg_type = OP_ALG_ALGSEL_AES | 3045 OP_ALG_AAI_CTR_MOD128, 3046 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3047 OP_ALG_AAI_HMAC_PRECOMP, 3048 .rfc3686 = true, 3049 }, 3050 }, 3051 { 3052 .aead = { 3053 .base = { 3054 .cra_name = "seqiv(authenc(" 3055 "hmac(md5),rfc3686(ctr(aes))))", 3056 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3057 "rfc3686-ctr-aes-caam", 3058 .cra_blocksize = 1, 3059 }, 3060 .setkey = aead_setkey, 3061 .setauthsize = aead_setauthsize, 3062 .encrypt = aead_encrypt, 3063 .decrypt = aead_decrypt, 3064 .ivsize = CTR_RFC3686_IV_SIZE, 3065 .maxauthsize = MD5_DIGEST_SIZE, 3066 }, 3067 .caam = { 3068 .class1_alg_type = OP_ALG_ALGSEL_AES | 3069 OP_ALG_AAI_CTR_MOD128, 3070 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3071 OP_ALG_AAI_HMAC_PRECOMP, 3072 .rfc3686 = true, 3073 .geniv = true, 3074 }, 3075 }, 3076 { 3077 .aead = { 3078 .base = { 3079 .cra_name = "authenc(hmac(sha1)," 3080 "rfc3686(ctr(aes)))", 3081 .cra_driver_name = "authenc-hmac-sha1-" 3082 "rfc3686-ctr-aes-caam", 3083 .cra_blocksize = 1, 3084 }, 3085 .setkey = aead_setkey, 3086 .setauthsize = aead_setauthsize, 3087 .encrypt = aead_encrypt, 3088 .decrypt = aead_decrypt, 3089 .ivsize = CTR_RFC3686_IV_SIZE, 3090 .maxauthsize = SHA1_DIGEST_SIZE, 3091 }, 3092 .caam = { 3093 .class1_alg_type = OP_ALG_ALGSEL_AES | 3094 OP_ALG_AAI_CTR_MOD128, 3095 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3096 OP_ALG_AAI_HMAC_PRECOMP, 3097 .rfc3686 = true, 3098 }, 3099 }, 3100 { 3101 .aead = { 3102 .base = { 3103 .cra_name = "seqiv(authenc(" 3104 "hmac(sha1),rfc3686(ctr(aes))))", 3105 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3106 "rfc3686-ctr-aes-caam", 3107 .cra_blocksize = 1, 3108 }, 3109 .setkey = aead_setkey, 3110 .setauthsize = aead_setauthsize, 3111 .encrypt = aead_encrypt, 3112 .decrypt = aead_decrypt, 3113 .ivsize = CTR_RFC3686_IV_SIZE, 3114 .maxauthsize = SHA1_DIGEST_SIZE, 3115 }, 3116 .caam = { 3117 .class1_alg_type = OP_ALG_ALGSEL_AES | 3118 OP_ALG_AAI_CTR_MOD128, 3119 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3120 OP_ALG_AAI_HMAC_PRECOMP, 3121 .rfc3686 = true, 3122 .geniv = true, 3123 }, 3124 }, 3125 { 3126 .aead = { 3127 .base = { 3128 .cra_name = "authenc(hmac(sha224)," 3129 "rfc3686(ctr(aes)))", 3130 .cra_driver_name = "authenc-hmac-sha224-" 3131 "rfc3686-ctr-aes-caam", 3132 .cra_blocksize = 1, 3133 }, 3134 .setkey = aead_setkey, 3135 .setauthsize = aead_setauthsize, 3136 .encrypt = aead_encrypt, 3137 .decrypt = aead_decrypt, 3138 .ivsize = CTR_RFC3686_IV_SIZE, 3139 .maxauthsize = SHA224_DIGEST_SIZE, 3140 }, 3141 .caam = { 3142 .class1_alg_type = OP_ALG_ALGSEL_AES | 3143 OP_ALG_AAI_CTR_MOD128, 3144 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3145 OP_ALG_AAI_HMAC_PRECOMP, 3146 .rfc3686 = true, 3147 }, 3148 }, 3149 { 3150 .aead = { 3151 .base = { 3152 .cra_name = "seqiv(authenc(" 3153 "hmac(sha224),rfc3686(ctr(aes))))", 3154 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3155 "rfc3686-ctr-aes-caam", 3156 .cra_blocksize = 1, 3157 }, 3158 .setkey = aead_setkey, 3159 .setauthsize = aead_setauthsize, 3160 .encrypt = aead_encrypt, 3161 .decrypt = aead_decrypt, 3162 .ivsize = CTR_RFC3686_IV_SIZE, 3163 .maxauthsize = SHA224_DIGEST_SIZE, 3164 }, 3165 .caam = { 3166 .class1_alg_type = OP_ALG_ALGSEL_AES | 3167 OP_ALG_AAI_CTR_MOD128, 3168 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3169 OP_ALG_AAI_HMAC_PRECOMP, 3170 .rfc3686 = true, 3171 .geniv = true, 3172 }, 3173 }, 3174 { 3175 .aead = { 3176 .base = { 3177 .cra_name = "authenc(hmac(sha256)," 3178 "rfc3686(ctr(aes)))", 3179 .cra_driver_name = "authenc-hmac-sha256-" 3180 "rfc3686-ctr-aes-caam", 3181 .cra_blocksize = 1, 3182 }, 3183 .setkey = aead_setkey, 3184 .setauthsize = aead_setauthsize, 3185 .encrypt = aead_encrypt, 3186 .decrypt = aead_decrypt, 3187 .ivsize = CTR_RFC3686_IV_SIZE, 3188 .maxauthsize = SHA256_DIGEST_SIZE, 3189 }, 3190 .caam = { 3191 .class1_alg_type = OP_ALG_ALGSEL_AES | 3192 OP_ALG_AAI_CTR_MOD128, 3193 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3194 OP_ALG_AAI_HMAC_PRECOMP, 3195 .rfc3686 = true, 3196 }, 3197 }, 3198 { 3199 .aead = { 3200 .base = { 3201 .cra_name = "seqiv(authenc(hmac(sha256)," 3202 "rfc3686(ctr(aes))))", 3203 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3204 "rfc3686-ctr-aes-caam", 3205 .cra_blocksize = 1, 3206 }, 3207 .setkey = aead_setkey, 3208 .setauthsize = aead_setauthsize, 3209 .encrypt = aead_encrypt, 3210 .decrypt = aead_decrypt, 3211 .ivsize = CTR_RFC3686_IV_SIZE, 3212 .maxauthsize = SHA256_DIGEST_SIZE, 3213 }, 3214 .caam = { 3215 .class1_alg_type = OP_ALG_ALGSEL_AES | 3216 OP_ALG_AAI_CTR_MOD128, 3217 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3218 OP_ALG_AAI_HMAC_PRECOMP, 3219 .rfc3686 = true, 3220 .geniv = true, 3221 }, 3222 }, 3223 { 3224 .aead = { 3225 .base = { 3226 .cra_name = "authenc(hmac(sha384)," 3227 "rfc3686(ctr(aes)))", 3228 .cra_driver_name = "authenc-hmac-sha384-" 3229 "rfc3686-ctr-aes-caam", 3230 .cra_blocksize = 1, 3231 }, 3232 .setkey = aead_setkey, 3233 .setauthsize = aead_setauthsize, 3234 .encrypt = aead_encrypt, 3235 .decrypt = aead_decrypt, 3236 .ivsize = CTR_RFC3686_IV_SIZE, 3237 .maxauthsize = SHA384_DIGEST_SIZE, 3238 }, 3239 .caam = { 3240 .class1_alg_type = OP_ALG_ALGSEL_AES | 3241 OP_ALG_AAI_CTR_MOD128, 3242 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3243 OP_ALG_AAI_HMAC_PRECOMP, 3244 .rfc3686 = true, 3245 }, 3246 }, 3247 { 3248 .aead = { 3249 .base = { 3250 .cra_name = "seqiv(authenc(hmac(sha384)," 3251 "rfc3686(ctr(aes))))", 3252 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3253 "rfc3686-ctr-aes-caam", 3254 .cra_blocksize = 1, 3255 }, 3256 .setkey = aead_setkey, 3257 .setauthsize = aead_setauthsize, 3258 .encrypt = aead_encrypt, 3259 .decrypt = aead_decrypt, 3260 .ivsize = CTR_RFC3686_IV_SIZE, 3261 .maxauthsize = SHA384_DIGEST_SIZE, 3262 }, 3263 .caam = { 3264 .class1_alg_type = OP_ALG_ALGSEL_AES | 3265 OP_ALG_AAI_CTR_MOD128, 3266 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3267 OP_ALG_AAI_HMAC_PRECOMP, 3268 .rfc3686 = true, 3269 .geniv = true, 3270 }, 3271 }, 3272 { 3273 .aead = { 3274 .base = { 3275 .cra_name = "authenc(hmac(sha512)," 3276 "rfc3686(ctr(aes)))", 3277 .cra_driver_name = "authenc-hmac-sha512-" 3278 "rfc3686-ctr-aes-caam", 3279 .cra_blocksize = 1, 3280 }, 3281 .setkey = aead_setkey, 3282 .setauthsize = aead_setauthsize, 3283 .encrypt = aead_encrypt, 3284 .decrypt = aead_decrypt, 3285 .ivsize = CTR_RFC3686_IV_SIZE, 3286 .maxauthsize = SHA512_DIGEST_SIZE, 3287 }, 3288 .caam = { 3289 .class1_alg_type = OP_ALG_ALGSEL_AES | 3290 OP_ALG_AAI_CTR_MOD128, 3291 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3292 OP_ALG_AAI_HMAC_PRECOMP, 3293 .rfc3686 = true, 3294 }, 3295 }, 3296 { 3297 .aead = { 3298 .base = { 3299 .cra_name = "seqiv(authenc(hmac(sha512)," 3300 "rfc3686(ctr(aes))))", 3301 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3302 "rfc3686-ctr-aes-caam", 3303 .cra_blocksize = 1, 3304 }, 3305 .setkey = aead_setkey, 3306 .setauthsize = aead_setauthsize, 3307 .encrypt = aead_encrypt, 3308 .decrypt = aead_decrypt, 3309 .ivsize = CTR_RFC3686_IV_SIZE, 3310 .maxauthsize = SHA512_DIGEST_SIZE, 3311 }, 3312 .caam = { 3313 .class1_alg_type = OP_ALG_ALGSEL_AES | 3314 OP_ALG_AAI_CTR_MOD128, 3315 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3316 OP_ALG_AAI_HMAC_PRECOMP, 3317 .rfc3686 = true, 3318 .geniv = true, 3319 }, 3320 }, 3321 { 3322 .aead = { 3323 .base = { 3324 .cra_name = "rfc7539(chacha20,poly1305)", 3325 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3326 "caam", 3327 .cra_blocksize = 1, 3328 }, 3329 .setkey = chachapoly_setkey, 3330 .setauthsize = chachapoly_setauthsize, 3331 .encrypt = chachapoly_encrypt, 3332 .decrypt = chachapoly_decrypt, 3333 .ivsize = CHACHAPOLY_IV_SIZE, 3334 .maxauthsize = POLY1305_DIGEST_SIZE, 3335 }, 3336 .caam = { 3337 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3338 OP_ALG_AAI_AEAD, 3339 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3340 OP_ALG_AAI_AEAD, 3341 .nodkp = true, 3342 }, 3343 }, 3344 { 3345 .aead = { 3346 .base = { 3347 .cra_name = "rfc7539esp(chacha20,poly1305)", 3348 .cra_driver_name = "rfc7539esp-chacha20-" 3349 "poly1305-caam", 3350 .cra_blocksize = 1, 3351 }, 3352 .setkey = chachapoly_setkey, 3353 .setauthsize = chachapoly_setauthsize, 3354 .encrypt = chachapoly_encrypt, 3355 .decrypt = chachapoly_decrypt, 3356 .ivsize = 8, 3357 .maxauthsize = POLY1305_DIGEST_SIZE, 3358 }, 3359 .caam = { 3360 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3361 OP_ALG_AAI_AEAD, 3362 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3363 OP_ALG_AAI_AEAD, 3364 .nodkp = true, 3365 }, 3366 }, 3367 }; 3368 3369 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3370 bool uses_dkp) 3371 { 3372 dma_addr_t dma_addr; 3373 struct caam_drv_private *priv; 3374 3375 ctx->jrdev = caam_jr_alloc(); 3376 if (IS_ERR(ctx->jrdev)) { 3377 pr_err("Job Ring Device allocation for transform failed\n"); 3378 return PTR_ERR(ctx->jrdev); 3379 } 3380 3381 priv = dev_get_drvdata(ctx->jrdev->parent); 3382 if (priv->era >= 6 && uses_dkp) 3383 ctx->dir = DMA_BIDIRECTIONAL; 3384 else 3385 ctx->dir = DMA_TO_DEVICE; 3386 3387 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3388 offsetof(struct caam_ctx, 3389 sh_desc_enc_dma), 3390 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3391 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3392 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3393 caam_jr_free(ctx->jrdev); 3394 return -ENOMEM; 3395 } 3396 3397 ctx->sh_desc_enc_dma = dma_addr; 3398 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3399 sh_desc_dec); 3400 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3401 3402 /* copy descriptor header template value */ 3403 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3404 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3405 3406 return 0; 3407 } 3408 3409 static int caam_cra_init(struct crypto_skcipher *tfm) 3410 { 3411 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3412 struct caam_skcipher_alg *caam_alg = 3413 container_of(alg, typeof(*caam_alg), skcipher); 3414 3415 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3416 false); 3417 } 3418 3419 static int caam_aead_init(struct crypto_aead *tfm) 3420 { 3421 struct aead_alg *alg = crypto_aead_alg(tfm); 3422 struct caam_aead_alg *caam_alg = 3423 container_of(alg, struct caam_aead_alg, aead); 3424 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3425 3426 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp); 3427 } 3428 3429 static void caam_exit_common(struct caam_ctx *ctx) 3430 { 3431 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3432 offsetof(struct caam_ctx, sh_desc_enc_dma), 3433 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3434 caam_jr_free(ctx->jrdev); 3435 } 3436 3437 static void caam_cra_exit(struct crypto_skcipher *tfm) 3438 { 3439 caam_exit_common(crypto_skcipher_ctx(tfm)); 3440 } 3441 3442 static void caam_aead_exit(struct crypto_aead *tfm) 3443 { 3444 caam_exit_common(crypto_aead_ctx(tfm)); 3445 } 3446 3447 static void __exit caam_algapi_exit(void) 3448 { 3449 int i; 3450 3451 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3452 struct caam_aead_alg *t_alg = driver_aeads + i; 3453 3454 if (t_alg->registered) 3455 crypto_unregister_aead(&t_alg->aead); 3456 } 3457 3458 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3459 struct caam_skcipher_alg *t_alg = driver_algs + i; 3460 3461 if (t_alg->registered) 3462 crypto_unregister_skcipher(&t_alg->skcipher); 3463 } 3464 } 3465 3466 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3467 { 3468 struct skcipher_alg *alg = &t_alg->skcipher; 3469 3470 alg->base.cra_module = THIS_MODULE; 3471 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3472 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3473 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3474 3475 alg->init = caam_cra_init; 3476 alg->exit = caam_cra_exit; 3477 } 3478 3479 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3480 { 3481 struct aead_alg *alg = &t_alg->aead; 3482 3483 alg->base.cra_module = THIS_MODULE; 3484 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3485 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3486 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3487 3488 alg->init = caam_aead_init; 3489 alg->exit = caam_aead_exit; 3490 } 3491 3492 static int __init caam_algapi_init(void) 3493 { 3494 struct device_node *dev_node; 3495 struct platform_device *pdev; 3496 struct caam_drv_private *priv; 3497 int i = 0, err = 0; 3498 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3499 u32 arc4_inst; 3500 unsigned int md_limit = SHA512_DIGEST_SIZE; 3501 bool registered = false, gcm_support; 3502 3503 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3504 if (!dev_node) { 3505 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3506 if (!dev_node) 3507 return -ENODEV; 3508 } 3509 3510 pdev = of_find_device_by_node(dev_node); 3511 if (!pdev) { 3512 of_node_put(dev_node); 3513 return -ENODEV; 3514 } 3515 3516 priv = dev_get_drvdata(&pdev->dev); 3517 of_node_put(dev_node); 3518 3519 /* 3520 * If priv is NULL, it's probably because the caam driver wasn't 3521 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3522 */ 3523 if (!priv) { 3524 err = -ENODEV; 3525 goto out_put_dev; 3526 } 3527 3528 3529 /* 3530 * Register crypto algorithms the device supports. 3531 * First, detect presence and attributes of DES, AES, and MD blocks. 3532 */ 3533 if (priv->era < 10) { 3534 u32 cha_vid, cha_inst, aes_rn; 3535 3536 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3537 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3538 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3539 3540 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3541 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3542 CHA_ID_LS_DES_SHIFT; 3543 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3544 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3545 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >> 3546 CHA_ID_LS_ARC4_SHIFT; 3547 ccha_inst = 0; 3548 ptha_inst = 0; 3549 3550 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) & 3551 CHA_ID_LS_AES_MASK; 3552 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8); 3553 } else { 3554 u32 aesa, mdha; 3555 3556 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3557 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3558 3559 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3560 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3561 3562 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3563 aes_inst = aesa & CHA_VER_NUM_MASK; 3564 md_inst = mdha & CHA_VER_NUM_MASK; 3565 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3566 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3567 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK; 3568 3569 gcm_support = aesa & CHA_VER_MISC_AES_GCM; 3570 } 3571 3572 /* If MD is present, limit digest size based on LP256 */ 3573 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3574 md_limit = SHA256_DIGEST_SIZE; 3575 3576 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3577 struct caam_skcipher_alg *t_alg = driver_algs + i; 3578 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3579 3580 /* Skip DES algorithms if not supported by device */ 3581 if (!des_inst && 3582 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3583 (alg_sel == OP_ALG_ALGSEL_DES))) 3584 continue; 3585 3586 /* Skip AES algorithms if not supported by device */ 3587 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3588 continue; 3589 3590 /* Skip ARC4 algorithms if not supported by device */ 3591 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4) 3592 continue; 3593 3594 /* 3595 * Check support for AES modes not available 3596 * on LP devices. 3597 */ 3598 if (aes_vid == CHA_VER_VID_AES_LP && 3599 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3600 OP_ALG_AAI_XTS) 3601 continue; 3602 3603 caam_skcipher_alg_init(t_alg); 3604 3605 err = crypto_register_skcipher(&t_alg->skcipher); 3606 if (err) { 3607 pr_warn("%s alg registration failed\n", 3608 t_alg->skcipher.base.cra_driver_name); 3609 continue; 3610 } 3611 3612 t_alg->registered = true; 3613 registered = true; 3614 } 3615 3616 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3617 struct caam_aead_alg *t_alg = driver_aeads + i; 3618 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3619 OP_ALG_ALGSEL_MASK; 3620 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3621 OP_ALG_ALGSEL_MASK; 3622 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3623 3624 /* Skip DES algorithms if not supported by device */ 3625 if (!des_inst && 3626 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3627 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3628 continue; 3629 3630 /* Skip AES algorithms if not supported by device */ 3631 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3632 continue; 3633 3634 /* Skip CHACHA20 algorithms if not supported by device */ 3635 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3636 continue; 3637 3638 /* Skip POLY1305 algorithms if not supported by device */ 3639 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3640 continue; 3641 3642 /* Skip GCM algorithms if not supported by device */ 3643 if (c1_alg_sel == OP_ALG_ALGSEL_AES && 3644 alg_aai == OP_ALG_AAI_GCM && !gcm_support) 3645 continue; 3646 3647 /* 3648 * Skip algorithms requiring message digests 3649 * if MD or MD size is not supported by device. 3650 */ 3651 if (is_mdha(c2_alg_sel) && 3652 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3653 continue; 3654 3655 caam_aead_alg_init(t_alg); 3656 3657 err = crypto_register_aead(&t_alg->aead); 3658 if (err) { 3659 pr_warn("%s alg registration failed\n", 3660 t_alg->aead.base.cra_driver_name); 3661 continue; 3662 } 3663 3664 t_alg->registered = true; 3665 registered = true; 3666 } 3667 3668 if (registered) 3669 pr_info("caam algorithms registered in /proc/crypto\n"); 3670 3671 out_put_dev: 3672 put_device(&pdev->dev); 3673 return err; 3674 } 3675 3676 module_init(caam_algapi_init); 3677 module_exit(caam_algapi_exit); 3678 3679 MODULE_LICENSE("GPL"); 3680 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3681 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3682