1 // SPDX-License-Identifier: GPL-2.0+ 2 /* 3 * caam - Freescale FSL CAAM support for crypto API 4 * 5 * Copyright 2008-2011 Freescale Semiconductor, Inc. 6 * Copyright 2016-2019 NXP 7 * 8 * Based on talitos crypto API driver. 9 * 10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 11 * 12 * --------------- --------------- 13 * | JobDesc #1 |-------------------->| ShareDesc | 14 * | *(packet 1) | | (PDB) | 15 * --------------- |------------->| (hashKey) | 16 * . | | (cipherKey) | 17 * . | |-------->| (operation) | 18 * --------------- | | --------------- 19 * | JobDesc #2 |------| | 20 * | *(packet 2) | | 21 * --------------- | 22 * . | 23 * . | 24 * --------------- | 25 * | JobDesc #3 |------------ 26 * | *(packet 3) | 27 * --------------- 28 * 29 * The SharedDesc never changes for a connection unless rekeyed, but 30 * each packet will likely be in a different place. So all we need 31 * to know to process the packet is where the input is, where the 32 * output goes, and what context we want to process with. Context is 33 * in the SharedDesc, packet references in the JobDesc. 34 * 35 * So, a job desc looks like: 36 * 37 * --------------------- 38 * | Header | 39 * | ShareDesc Pointer | 40 * | SEQ_OUT_PTR | 41 * | (output buffer) | 42 * | (output length) | 43 * | SEQ_IN_PTR | 44 * | (input buffer) | 45 * | (input length) | 46 * --------------------- 47 */ 48 49 #include "compat.h" 50 51 #include "regs.h" 52 #include "intern.h" 53 #include "desc_constr.h" 54 #include "jr.h" 55 #include "error.h" 56 #include "sg_sw_sec4.h" 57 #include "key_gen.h" 58 #include "caamalg_desc.h" 59 60 /* 61 * crypto alg 62 */ 63 #define CAAM_CRA_PRIORITY 3000 64 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 65 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 66 CTR_RFC3686_NONCE_SIZE + \ 67 SHA512_DIGEST_SIZE * 2) 68 69 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 70 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 71 CAAM_CMD_SZ * 4) 72 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 73 CAAM_CMD_SZ * 5) 74 75 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6) 76 77 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 78 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 79 80 #ifdef DEBUG 81 /* for print_hex_dumps with line references */ 82 #define debug(format, arg...) printk(format, arg) 83 #else 84 #define debug(format, arg...) 85 #endif 86 87 struct caam_alg_entry { 88 int class1_alg_type; 89 int class2_alg_type; 90 bool rfc3686; 91 bool geniv; 92 }; 93 94 struct caam_aead_alg { 95 struct aead_alg aead; 96 struct caam_alg_entry caam; 97 bool registered; 98 }; 99 100 struct caam_skcipher_alg { 101 struct skcipher_alg skcipher; 102 struct caam_alg_entry caam; 103 bool registered; 104 }; 105 106 /* 107 * per-session context 108 */ 109 struct caam_ctx { 110 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 111 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 112 u8 key[CAAM_MAX_KEY_SIZE]; 113 dma_addr_t sh_desc_enc_dma; 114 dma_addr_t sh_desc_dec_dma; 115 dma_addr_t key_dma; 116 enum dma_data_direction dir; 117 struct device *jrdev; 118 struct alginfo adata; 119 struct alginfo cdata; 120 unsigned int authsize; 121 }; 122 123 static int aead_null_set_sh_desc(struct crypto_aead *aead) 124 { 125 struct caam_ctx *ctx = crypto_aead_ctx(aead); 126 struct device *jrdev = ctx->jrdev; 127 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 128 u32 *desc; 129 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 130 ctx->adata.keylen_pad; 131 132 /* 133 * Job Descriptor and Shared Descriptors 134 * must all fit into the 64-word Descriptor h/w Buffer 135 */ 136 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 137 ctx->adata.key_inline = true; 138 ctx->adata.key_virt = ctx->key; 139 } else { 140 ctx->adata.key_inline = false; 141 ctx->adata.key_dma = ctx->key_dma; 142 } 143 144 /* aead_encrypt shared descriptor */ 145 desc = ctx->sh_desc_enc; 146 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 147 ctrlpriv->era); 148 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 149 desc_bytes(desc), ctx->dir); 150 151 /* 152 * Job Descriptor and Shared Descriptors 153 * must all fit into the 64-word Descriptor h/w Buffer 154 */ 155 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 156 ctx->adata.key_inline = true; 157 ctx->adata.key_virt = ctx->key; 158 } else { 159 ctx->adata.key_inline = false; 160 ctx->adata.key_dma = ctx->key_dma; 161 } 162 163 /* aead_decrypt shared descriptor */ 164 desc = ctx->sh_desc_dec; 165 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 166 ctrlpriv->era); 167 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 168 desc_bytes(desc), ctx->dir); 169 170 return 0; 171 } 172 173 static int aead_set_sh_desc(struct crypto_aead *aead) 174 { 175 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 176 struct caam_aead_alg, aead); 177 unsigned int ivsize = crypto_aead_ivsize(aead); 178 struct caam_ctx *ctx = crypto_aead_ctx(aead); 179 struct device *jrdev = ctx->jrdev; 180 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 181 u32 ctx1_iv_off = 0; 182 u32 *desc, *nonce = NULL; 183 u32 inl_mask; 184 unsigned int data_len[2]; 185 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 186 OP_ALG_AAI_CTR_MOD128); 187 const bool is_rfc3686 = alg->caam.rfc3686; 188 189 if (!ctx->authsize) 190 return 0; 191 192 /* NULL encryption / decryption */ 193 if (!ctx->cdata.keylen) 194 return aead_null_set_sh_desc(aead); 195 196 /* 197 * AES-CTR needs to load IV in CONTEXT1 reg 198 * at an offset of 128bits (16bytes) 199 * CONTEXT1[255:128] = IV 200 */ 201 if (ctr_mode) 202 ctx1_iv_off = 16; 203 204 /* 205 * RFC3686 specific: 206 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 207 */ 208 if (is_rfc3686) { 209 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 210 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 211 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 212 } 213 214 data_len[0] = ctx->adata.keylen_pad; 215 data_len[1] = ctx->cdata.keylen; 216 217 if (alg->caam.geniv) 218 goto skip_enc; 219 220 /* 221 * Job Descriptor and Shared Descriptors 222 * must all fit into the 64-word Descriptor h/w Buffer 223 */ 224 if (desc_inline_query(DESC_AEAD_ENC_LEN + 225 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 226 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 227 ARRAY_SIZE(data_len)) < 0) 228 return -EINVAL; 229 230 if (inl_mask & 1) 231 ctx->adata.key_virt = ctx->key; 232 else 233 ctx->adata.key_dma = ctx->key_dma; 234 235 if (inl_mask & 2) 236 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 237 else 238 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 239 240 ctx->adata.key_inline = !!(inl_mask & 1); 241 ctx->cdata.key_inline = !!(inl_mask & 2); 242 243 /* aead_encrypt shared descriptor */ 244 desc = ctx->sh_desc_enc; 245 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 246 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 247 false, ctrlpriv->era); 248 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 249 desc_bytes(desc), ctx->dir); 250 251 skip_enc: 252 /* 253 * Job Descriptor and Shared Descriptors 254 * must all fit into the 64-word Descriptor h/w Buffer 255 */ 256 if (desc_inline_query(DESC_AEAD_DEC_LEN + 257 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 258 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 259 ARRAY_SIZE(data_len)) < 0) 260 return -EINVAL; 261 262 if (inl_mask & 1) 263 ctx->adata.key_virt = ctx->key; 264 else 265 ctx->adata.key_dma = ctx->key_dma; 266 267 if (inl_mask & 2) 268 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 269 else 270 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 271 272 ctx->adata.key_inline = !!(inl_mask & 1); 273 ctx->cdata.key_inline = !!(inl_mask & 2); 274 275 /* aead_decrypt shared descriptor */ 276 desc = ctx->sh_desc_dec; 277 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 278 ctx->authsize, alg->caam.geniv, is_rfc3686, 279 nonce, ctx1_iv_off, false, ctrlpriv->era); 280 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 281 desc_bytes(desc), ctx->dir); 282 283 if (!alg->caam.geniv) 284 goto skip_givenc; 285 286 /* 287 * Job Descriptor and Shared Descriptors 288 * must all fit into the 64-word Descriptor h/w Buffer 289 */ 290 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 291 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 292 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 293 ARRAY_SIZE(data_len)) < 0) 294 return -EINVAL; 295 296 if (inl_mask & 1) 297 ctx->adata.key_virt = ctx->key; 298 else 299 ctx->adata.key_dma = ctx->key_dma; 300 301 if (inl_mask & 2) 302 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 303 else 304 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 305 306 ctx->adata.key_inline = !!(inl_mask & 1); 307 ctx->cdata.key_inline = !!(inl_mask & 2); 308 309 /* aead_givencrypt shared descriptor */ 310 desc = ctx->sh_desc_enc; 311 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 312 ctx->authsize, is_rfc3686, nonce, 313 ctx1_iv_off, false, ctrlpriv->era); 314 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 315 desc_bytes(desc), ctx->dir); 316 317 skip_givenc: 318 return 0; 319 } 320 321 static int aead_setauthsize(struct crypto_aead *authenc, 322 unsigned int authsize) 323 { 324 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 325 326 ctx->authsize = authsize; 327 aead_set_sh_desc(authenc); 328 329 return 0; 330 } 331 332 static int gcm_set_sh_desc(struct crypto_aead *aead) 333 { 334 struct caam_ctx *ctx = crypto_aead_ctx(aead); 335 struct device *jrdev = ctx->jrdev; 336 unsigned int ivsize = crypto_aead_ivsize(aead); 337 u32 *desc; 338 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 339 ctx->cdata.keylen; 340 341 if (!ctx->cdata.keylen || !ctx->authsize) 342 return 0; 343 344 /* 345 * AES GCM encrypt shared descriptor 346 * Job Descriptor and Shared Descriptor 347 * must fit into the 64-word Descriptor h/w Buffer 348 */ 349 if (rem_bytes >= DESC_GCM_ENC_LEN) { 350 ctx->cdata.key_inline = true; 351 ctx->cdata.key_virt = ctx->key; 352 } else { 353 ctx->cdata.key_inline = false; 354 ctx->cdata.key_dma = ctx->key_dma; 355 } 356 357 desc = ctx->sh_desc_enc; 358 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 359 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 360 desc_bytes(desc), ctx->dir); 361 362 /* 363 * Job Descriptor and Shared Descriptors 364 * must all fit into the 64-word Descriptor h/w Buffer 365 */ 366 if (rem_bytes >= DESC_GCM_DEC_LEN) { 367 ctx->cdata.key_inline = true; 368 ctx->cdata.key_virt = ctx->key; 369 } else { 370 ctx->cdata.key_inline = false; 371 ctx->cdata.key_dma = ctx->key_dma; 372 } 373 374 desc = ctx->sh_desc_dec; 375 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 376 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 377 desc_bytes(desc), ctx->dir); 378 379 return 0; 380 } 381 382 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 383 { 384 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 385 386 ctx->authsize = authsize; 387 gcm_set_sh_desc(authenc); 388 389 return 0; 390 } 391 392 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 393 { 394 struct caam_ctx *ctx = crypto_aead_ctx(aead); 395 struct device *jrdev = ctx->jrdev; 396 unsigned int ivsize = crypto_aead_ivsize(aead); 397 u32 *desc; 398 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 399 ctx->cdata.keylen; 400 401 if (!ctx->cdata.keylen || !ctx->authsize) 402 return 0; 403 404 /* 405 * RFC4106 encrypt shared descriptor 406 * Job Descriptor and Shared Descriptor 407 * must fit into the 64-word Descriptor h/w Buffer 408 */ 409 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 410 ctx->cdata.key_inline = true; 411 ctx->cdata.key_virt = ctx->key; 412 } else { 413 ctx->cdata.key_inline = false; 414 ctx->cdata.key_dma = ctx->key_dma; 415 } 416 417 desc = ctx->sh_desc_enc; 418 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 419 false); 420 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 421 desc_bytes(desc), ctx->dir); 422 423 /* 424 * Job Descriptor and Shared Descriptors 425 * must all fit into the 64-word Descriptor h/w Buffer 426 */ 427 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 428 ctx->cdata.key_inline = true; 429 ctx->cdata.key_virt = ctx->key; 430 } else { 431 ctx->cdata.key_inline = false; 432 ctx->cdata.key_dma = ctx->key_dma; 433 } 434 435 desc = ctx->sh_desc_dec; 436 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 437 false); 438 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 439 desc_bytes(desc), ctx->dir); 440 441 return 0; 442 } 443 444 static int rfc4106_setauthsize(struct crypto_aead *authenc, 445 unsigned int authsize) 446 { 447 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 448 449 ctx->authsize = authsize; 450 rfc4106_set_sh_desc(authenc); 451 452 return 0; 453 } 454 455 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 456 { 457 struct caam_ctx *ctx = crypto_aead_ctx(aead); 458 struct device *jrdev = ctx->jrdev; 459 unsigned int ivsize = crypto_aead_ivsize(aead); 460 u32 *desc; 461 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 462 ctx->cdata.keylen; 463 464 if (!ctx->cdata.keylen || !ctx->authsize) 465 return 0; 466 467 /* 468 * RFC4543 encrypt shared descriptor 469 * Job Descriptor and Shared Descriptor 470 * must fit into the 64-word Descriptor h/w Buffer 471 */ 472 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 473 ctx->cdata.key_inline = true; 474 ctx->cdata.key_virt = ctx->key; 475 } else { 476 ctx->cdata.key_inline = false; 477 ctx->cdata.key_dma = ctx->key_dma; 478 } 479 480 desc = ctx->sh_desc_enc; 481 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 482 false); 483 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 484 desc_bytes(desc), ctx->dir); 485 486 /* 487 * Job Descriptor and Shared Descriptors 488 * must all fit into the 64-word Descriptor h/w Buffer 489 */ 490 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 491 ctx->cdata.key_inline = true; 492 ctx->cdata.key_virt = ctx->key; 493 } else { 494 ctx->cdata.key_inline = false; 495 ctx->cdata.key_dma = ctx->key_dma; 496 } 497 498 desc = ctx->sh_desc_dec; 499 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 500 false); 501 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 502 desc_bytes(desc), ctx->dir); 503 504 return 0; 505 } 506 507 static int rfc4543_setauthsize(struct crypto_aead *authenc, 508 unsigned int authsize) 509 { 510 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 511 512 ctx->authsize = authsize; 513 rfc4543_set_sh_desc(authenc); 514 515 return 0; 516 } 517 518 static int chachapoly_set_sh_desc(struct crypto_aead *aead) 519 { 520 struct caam_ctx *ctx = crypto_aead_ctx(aead); 521 struct device *jrdev = ctx->jrdev; 522 unsigned int ivsize = crypto_aead_ivsize(aead); 523 u32 *desc; 524 525 if (!ctx->cdata.keylen || !ctx->authsize) 526 return 0; 527 528 desc = ctx->sh_desc_enc; 529 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 530 ctx->authsize, true, false); 531 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 532 desc_bytes(desc), ctx->dir); 533 534 desc = ctx->sh_desc_dec; 535 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize, 536 ctx->authsize, false, false); 537 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 538 desc_bytes(desc), ctx->dir); 539 540 return 0; 541 } 542 543 static int chachapoly_setauthsize(struct crypto_aead *aead, 544 unsigned int authsize) 545 { 546 struct caam_ctx *ctx = crypto_aead_ctx(aead); 547 548 if (authsize != POLY1305_DIGEST_SIZE) 549 return -EINVAL; 550 551 ctx->authsize = authsize; 552 return chachapoly_set_sh_desc(aead); 553 } 554 555 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key, 556 unsigned int keylen) 557 { 558 struct caam_ctx *ctx = crypto_aead_ctx(aead); 559 unsigned int ivsize = crypto_aead_ivsize(aead); 560 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize; 561 562 if (keylen != CHACHA_KEY_SIZE + saltlen) { 563 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 564 return -EINVAL; 565 } 566 567 ctx->cdata.key_virt = key; 568 ctx->cdata.keylen = keylen - saltlen; 569 570 return chachapoly_set_sh_desc(aead); 571 } 572 573 static int aead_setkey(struct crypto_aead *aead, 574 const u8 *key, unsigned int keylen) 575 { 576 struct caam_ctx *ctx = crypto_aead_ctx(aead); 577 struct device *jrdev = ctx->jrdev; 578 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 579 struct crypto_authenc_keys keys; 580 int ret = 0; 581 582 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 583 goto badkey; 584 585 #ifdef DEBUG 586 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 587 keys.authkeylen + keys.enckeylen, keys.enckeylen, 588 keys.authkeylen); 589 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 590 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 591 #endif 592 593 /* 594 * If DKP is supported, use it in the shared descriptor to generate 595 * the split key. 596 */ 597 if (ctrlpriv->era >= 6) { 598 ctx->adata.keylen = keys.authkeylen; 599 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 600 OP_ALG_ALGSEL_MASK); 601 602 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 603 goto badkey; 604 605 memcpy(ctx->key, keys.authkey, keys.authkeylen); 606 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 607 keys.enckeylen); 608 dma_sync_single_for_device(jrdev, ctx->key_dma, 609 ctx->adata.keylen_pad + 610 keys.enckeylen, ctx->dir); 611 goto skip_split_key; 612 } 613 614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 615 keys.authkeylen, CAAM_MAX_KEY_SIZE - 616 keys.enckeylen); 617 if (ret) { 618 goto badkey; 619 } 620 621 /* postpend encryption key to auth split key */ 622 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 624 keys.enckeylen, ctx->dir); 625 #ifdef DEBUG 626 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 627 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 628 ctx->adata.keylen_pad + keys.enckeylen, 1); 629 #endif 630 631 skip_split_key: 632 ctx->cdata.keylen = keys.enckeylen; 633 memzero_explicit(&keys, sizeof(keys)); 634 return aead_set_sh_desc(aead); 635 badkey: 636 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 637 memzero_explicit(&keys, sizeof(keys)); 638 return -EINVAL; 639 } 640 641 static int gcm_setkey(struct crypto_aead *aead, 642 const u8 *key, unsigned int keylen) 643 { 644 struct caam_ctx *ctx = crypto_aead_ctx(aead); 645 struct device *jrdev = ctx->jrdev; 646 647 #ifdef DEBUG 648 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 649 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 650 #endif 651 652 memcpy(ctx->key, key, keylen); 653 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 654 ctx->cdata.keylen = keylen; 655 656 return gcm_set_sh_desc(aead); 657 } 658 659 static int rfc4106_setkey(struct crypto_aead *aead, 660 const u8 *key, unsigned int keylen) 661 { 662 struct caam_ctx *ctx = crypto_aead_ctx(aead); 663 struct device *jrdev = ctx->jrdev; 664 665 if (keylen < 4) 666 return -EINVAL; 667 668 #ifdef DEBUG 669 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 670 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 671 #endif 672 673 memcpy(ctx->key, key, keylen); 674 675 /* 676 * The last four bytes of the key material are used as the salt value 677 * in the nonce. Update the AES key length. 678 */ 679 ctx->cdata.keylen = keylen - 4; 680 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 681 ctx->dir); 682 return rfc4106_set_sh_desc(aead); 683 } 684 685 static int rfc4543_setkey(struct crypto_aead *aead, 686 const u8 *key, unsigned int keylen) 687 { 688 struct caam_ctx *ctx = crypto_aead_ctx(aead); 689 struct device *jrdev = ctx->jrdev; 690 691 if (keylen < 4) 692 return -EINVAL; 693 694 #ifdef DEBUG 695 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 696 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 697 #endif 698 699 memcpy(ctx->key, key, keylen); 700 701 /* 702 * The last four bytes of the key material are used as the salt value 703 * in the nonce. Update the AES key length. 704 */ 705 ctx->cdata.keylen = keylen - 4; 706 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 707 ctx->dir); 708 return rfc4543_set_sh_desc(aead); 709 } 710 711 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 712 unsigned int keylen) 713 { 714 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 715 struct caam_skcipher_alg *alg = 716 container_of(crypto_skcipher_alg(skcipher), typeof(*alg), 717 skcipher); 718 struct device *jrdev = ctx->jrdev; 719 unsigned int ivsize = crypto_skcipher_ivsize(skcipher); 720 u32 *desc; 721 u32 ctx1_iv_off = 0; 722 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 723 OP_ALG_AAI_CTR_MOD128); 724 const bool is_rfc3686 = alg->caam.rfc3686; 725 726 #ifdef DEBUG 727 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 728 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 729 #endif 730 /* 731 * AES-CTR needs to load IV in CONTEXT1 reg 732 * at an offset of 128bits (16bytes) 733 * CONTEXT1[255:128] = IV 734 */ 735 if (ctr_mode) 736 ctx1_iv_off = 16; 737 738 /* 739 * RFC3686 specific: 740 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 741 * | *key = {KEY, NONCE} 742 */ 743 if (is_rfc3686) { 744 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 745 keylen -= CTR_RFC3686_NONCE_SIZE; 746 } 747 748 ctx->cdata.keylen = keylen; 749 ctx->cdata.key_virt = key; 750 ctx->cdata.key_inline = true; 751 752 /* skcipher_encrypt shared descriptor */ 753 desc = ctx->sh_desc_enc; 754 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 755 ctx1_iv_off); 756 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 757 desc_bytes(desc), ctx->dir); 758 759 /* skcipher_decrypt shared descriptor */ 760 desc = ctx->sh_desc_dec; 761 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 762 ctx1_iv_off); 763 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 764 desc_bytes(desc), ctx->dir); 765 766 return 0; 767 } 768 769 static int des_skcipher_setkey(struct crypto_skcipher *skcipher, 770 const u8 *key, unsigned int keylen) 771 { 772 u32 tmp[DES3_EDE_EXPKEY_WORDS]; 773 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 774 775 if (keylen == DES3_EDE_KEY_SIZE && 776 __des3_ede_setkey(tmp, &tfm->crt_flags, key, DES3_EDE_KEY_SIZE)) { 777 return -EINVAL; 778 } 779 780 if (!des_ekey(tmp, key) && (crypto_skcipher_get_flags(skcipher) & 781 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) { 782 crypto_skcipher_set_flags(skcipher, 783 CRYPTO_TFM_RES_WEAK_KEY); 784 return -EINVAL; 785 } 786 787 return skcipher_setkey(skcipher, key, keylen); 788 } 789 790 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key, 791 unsigned int keylen) 792 { 793 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 794 struct device *jrdev = ctx->jrdev; 795 u32 *desc; 796 797 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 798 crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_RES_BAD_KEY_LEN); 799 dev_err(jrdev, "key size mismatch\n"); 800 return -EINVAL; 801 } 802 803 ctx->cdata.keylen = keylen; 804 ctx->cdata.key_virt = key; 805 ctx->cdata.key_inline = true; 806 807 /* xts_skcipher_encrypt shared descriptor */ 808 desc = ctx->sh_desc_enc; 809 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata); 810 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 811 desc_bytes(desc), ctx->dir); 812 813 /* xts_skcipher_decrypt shared descriptor */ 814 desc = ctx->sh_desc_dec; 815 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata); 816 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 817 desc_bytes(desc), ctx->dir); 818 819 return 0; 820 } 821 822 /* 823 * aead_edesc - s/w-extended aead descriptor 824 * @src_nents: number of segments in input s/w scatterlist 825 * @dst_nents: number of segments in output s/w scatterlist 826 * @mapped_src_nents: number of segments in input h/w link table 827 * @mapped_dst_nents: number of segments in output h/w link table 828 * @sec4_sg_bytes: length of dma mapped sec4_sg space 829 * @sec4_sg_dma: bus physical mapped address of h/w link table 830 * @sec4_sg: pointer to h/w link table 831 * @hw_desc: the h/w job descriptor followed by any referenced link tables 832 */ 833 struct aead_edesc { 834 int src_nents; 835 int dst_nents; 836 int mapped_src_nents; 837 int mapped_dst_nents; 838 int sec4_sg_bytes; 839 dma_addr_t sec4_sg_dma; 840 struct sec4_sg_entry *sec4_sg; 841 u32 hw_desc[]; 842 }; 843 844 /* 845 * skcipher_edesc - s/w-extended skcipher descriptor 846 * @src_nents: number of segments in input s/w scatterlist 847 * @dst_nents: number of segments in output s/w scatterlist 848 * @mapped_src_nents: number of segments in input h/w link table 849 * @mapped_dst_nents: number of segments in output h/w link table 850 * @iv_dma: dma address of iv for checking continuity and link table 851 * @sec4_sg_bytes: length of dma mapped sec4_sg space 852 * @sec4_sg_dma: bus physical mapped address of h/w link table 853 * @sec4_sg: pointer to h/w link table 854 * @hw_desc: the h/w job descriptor followed by any referenced link tables 855 * and IV 856 */ 857 struct skcipher_edesc { 858 int src_nents; 859 int dst_nents; 860 int mapped_src_nents; 861 int mapped_dst_nents; 862 dma_addr_t iv_dma; 863 int sec4_sg_bytes; 864 dma_addr_t sec4_sg_dma; 865 struct sec4_sg_entry *sec4_sg; 866 u32 hw_desc[0]; 867 }; 868 869 static void caam_unmap(struct device *dev, struct scatterlist *src, 870 struct scatterlist *dst, int src_nents, 871 int dst_nents, 872 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 873 int sec4_sg_bytes) 874 { 875 if (dst != src) { 876 if (src_nents) 877 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 878 if (dst_nents) 879 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 880 } else { 881 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 882 } 883 884 if (iv_dma) 885 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 886 if (sec4_sg_bytes) 887 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 888 DMA_TO_DEVICE); 889 } 890 891 static void aead_unmap(struct device *dev, 892 struct aead_edesc *edesc, 893 struct aead_request *req) 894 { 895 caam_unmap(dev, req->src, req->dst, 896 edesc->src_nents, edesc->dst_nents, 0, 0, 897 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 898 } 899 900 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc, 901 struct skcipher_request *req) 902 { 903 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 904 int ivsize = crypto_skcipher_ivsize(skcipher); 905 906 caam_unmap(dev, req->src, req->dst, 907 edesc->src_nents, edesc->dst_nents, 908 edesc->iv_dma, ivsize, 909 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 910 } 911 912 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 913 void *context) 914 { 915 struct aead_request *req = context; 916 struct aead_edesc *edesc; 917 918 #ifdef DEBUG 919 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 920 #endif 921 922 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 923 924 if (err) 925 caam_jr_strstatus(jrdev, err); 926 927 aead_unmap(jrdev, edesc, req); 928 929 kfree(edesc); 930 931 aead_request_complete(req, err); 932 } 933 934 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 935 void *context) 936 { 937 struct aead_request *req = context; 938 struct aead_edesc *edesc; 939 940 #ifdef DEBUG 941 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 942 #endif 943 944 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 945 946 if (err) 947 caam_jr_strstatus(jrdev, err); 948 949 aead_unmap(jrdev, edesc, req); 950 951 /* 952 * verify hw auth check passed else return -EBADMSG 953 */ 954 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 955 err = -EBADMSG; 956 957 kfree(edesc); 958 959 aead_request_complete(req, err); 960 } 961 962 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 963 void *context) 964 { 965 struct skcipher_request *req = context; 966 struct skcipher_edesc *edesc; 967 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 968 int ivsize = crypto_skcipher_ivsize(skcipher); 969 970 #ifdef DEBUG 971 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 972 #endif 973 974 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 975 976 if (err) 977 caam_jr_strstatus(jrdev, err); 978 979 #ifdef DEBUG 980 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 981 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, 982 edesc->src_nents > 1 ? 100 : ivsize, 1); 983 #endif 984 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 985 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 986 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 987 988 skcipher_unmap(jrdev, edesc, req); 989 990 /* 991 * The crypto API expects us to set the IV (req->iv) to the last 992 * ciphertext block. This is used e.g. by the CTS mode. 993 */ 994 if (ivsize) 995 scatterwalk_map_and_copy(req->iv, req->dst, req->cryptlen - 996 ivsize, ivsize, 0); 997 998 kfree(edesc); 999 1000 skcipher_request_complete(req, err); 1001 } 1002 1003 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 1004 void *context) 1005 { 1006 struct skcipher_request *req = context; 1007 struct skcipher_edesc *edesc; 1008 #ifdef DEBUG 1009 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1010 int ivsize = crypto_skcipher_ivsize(skcipher); 1011 1012 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 1013 #endif 1014 1015 edesc = container_of(desc, struct skcipher_edesc, hw_desc[0]); 1016 if (err) 1017 caam_jr_strstatus(jrdev, err); 1018 1019 #ifdef DEBUG 1020 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 1021 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1022 #endif 1023 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 1024 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 1025 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); 1026 1027 skcipher_unmap(jrdev, edesc, req); 1028 kfree(edesc); 1029 1030 skcipher_request_complete(req, err); 1031 } 1032 1033 /* 1034 * Fill in aead job descriptor 1035 */ 1036 static void init_aead_job(struct aead_request *req, 1037 struct aead_edesc *edesc, 1038 bool all_contig, bool encrypt) 1039 { 1040 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1041 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1042 int authsize = ctx->authsize; 1043 u32 *desc = edesc->hw_desc; 1044 u32 out_options, in_options; 1045 dma_addr_t dst_dma, src_dma; 1046 int len, sec4_sg_index = 0; 1047 dma_addr_t ptr; 1048 u32 *sh_desc; 1049 1050 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1051 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1052 1053 len = desc_len(sh_desc); 1054 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1055 1056 if (all_contig) { 1057 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) : 1058 0; 1059 in_options = 0; 1060 } else { 1061 src_dma = edesc->sec4_sg_dma; 1062 sec4_sg_index += edesc->mapped_src_nents; 1063 in_options = LDST_SGF; 1064 } 1065 1066 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 1067 in_options); 1068 1069 dst_dma = src_dma; 1070 out_options = in_options; 1071 1072 if (unlikely(req->src != req->dst)) { 1073 if (!edesc->mapped_dst_nents) { 1074 dst_dma = 0; 1075 } else if (edesc->mapped_dst_nents == 1) { 1076 dst_dma = sg_dma_address(req->dst); 1077 out_options = 0; 1078 } else { 1079 dst_dma = edesc->sec4_sg_dma + 1080 sec4_sg_index * 1081 sizeof(struct sec4_sg_entry); 1082 out_options = LDST_SGF; 1083 } 1084 } 1085 1086 if (encrypt) 1087 append_seq_out_ptr(desc, dst_dma, 1088 req->assoclen + req->cryptlen + authsize, 1089 out_options); 1090 else 1091 append_seq_out_ptr(desc, dst_dma, 1092 req->assoclen + req->cryptlen - authsize, 1093 out_options); 1094 } 1095 1096 static void init_gcm_job(struct aead_request *req, 1097 struct aead_edesc *edesc, 1098 bool all_contig, bool encrypt) 1099 { 1100 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1101 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1102 unsigned int ivsize = crypto_aead_ivsize(aead); 1103 u32 *desc = edesc->hw_desc; 1104 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1105 unsigned int last; 1106 1107 init_aead_job(req, edesc, all_contig, encrypt); 1108 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1109 1110 /* BUG This should not be specific to generic GCM. */ 1111 last = 0; 1112 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1113 last = FIFOLD_TYPE_LAST1; 1114 1115 /* Read GCM IV */ 1116 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1117 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1118 /* Append Salt */ 1119 if (!generic_gcm) 1120 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1121 /* Append IV */ 1122 append_data(desc, req->iv, ivsize); 1123 /* End of blank commands */ 1124 } 1125 1126 static void init_chachapoly_job(struct aead_request *req, 1127 struct aead_edesc *edesc, bool all_contig, 1128 bool encrypt) 1129 { 1130 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1131 unsigned int ivsize = crypto_aead_ivsize(aead); 1132 unsigned int assoclen = req->assoclen; 1133 u32 *desc = edesc->hw_desc; 1134 u32 ctx_iv_off = 4; 1135 1136 init_aead_job(req, edesc, all_contig, encrypt); 1137 1138 if (ivsize != CHACHAPOLY_IV_SIZE) { 1139 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */ 1140 ctx_iv_off += 4; 1141 1142 /* 1143 * The associated data comes already with the IV but we need 1144 * to skip it when we authenticate or encrypt... 1145 */ 1146 assoclen -= ivsize; 1147 } 1148 1149 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); 1150 1151 /* 1152 * For IPsec load the IV further in the same register. 1153 * For RFC7539 simply load the 12 bytes nonce in a single operation 1154 */ 1155 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB | 1156 LDST_SRCDST_BYTE_CONTEXT | 1157 ctx_iv_off << LDST_OFFSET_SHIFT); 1158 } 1159 1160 static void init_authenc_job(struct aead_request *req, 1161 struct aead_edesc *edesc, 1162 bool all_contig, bool encrypt) 1163 { 1164 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1165 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1166 struct caam_aead_alg, aead); 1167 unsigned int ivsize = crypto_aead_ivsize(aead); 1168 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1169 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1170 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1171 OP_ALG_AAI_CTR_MOD128); 1172 const bool is_rfc3686 = alg->caam.rfc3686; 1173 u32 *desc = edesc->hw_desc; 1174 u32 ivoffset = 0; 1175 1176 /* 1177 * AES-CTR needs to load IV in CONTEXT1 reg 1178 * at an offset of 128bits (16bytes) 1179 * CONTEXT1[255:128] = IV 1180 */ 1181 if (ctr_mode) 1182 ivoffset = 16; 1183 1184 /* 1185 * RFC3686 specific: 1186 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1187 */ 1188 if (is_rfc3686) 1189 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1190 1191 init_aead_job(req, edesc, all_contig, encrypt); 1192 1193 /* 1194 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1195 * having DPOVRD as destination. 1196 */ 1197 if (ctrlpriv->era < 3) 1198 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1199 else 1200 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1201 1202 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1203 append_load_as_imm(desc, req->iv, ivsize, 1204 LDST_CLASS_1_CCB | 1205 LDST_SRCDST_BYTE_CONTEXT | 1206 (ivoffset << LDST_OFFSET_SHIFT)); 1207 } 1208 1209 /* 1210 * Fill in skcipher job descriptor 1211 */ 1212 static void init_skcipher_job(struct skcipher_request *req, 1213 struct skcipher_edesc *edesc, 1214 const bool encrypt) 1215 { 1216 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1217 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1218 int ivsize = crypto_skcipher_ivsize(skcipher); 1219 u32 *desc = edesc->hw_desc; 1220 u32 *sh_desc; 1221 u32 in_options = 0, out_options = 0; 1222 dma_addr_t src_dma, dst_dma, ptr; 1223 int len, sec4_sg_index = 0; 1224 1225 #ifdef DEBUG 1226 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1227 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1); 1228 pr_err("asked=%d, cryptlen%d\n", 1229 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); 1230 #endif 1231 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1232 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1233 edesc->src_nents > 1 ? 100 : req->cryptlen, 1); 1234 1235 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 1236 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 1237 1238 len = desc_len(sh_desc); 1239 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1240 1241 if (ivsize || edesc->mapped_src_nents > 1) { 1242 src_dma = edesc->sec4_sg_dma; 1243 sec4_sg_index = edesc->mapped_src_nents + !!ivsize; 1244 in_options = LDST_SGF; 1245 } else { 1246 src_dma = sg_dma_address(req->src); 1247 } 1248 1249 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); 1250 1251 if (likely(req->src == req->dst)) { 1252 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry); 1253 out_options = in_options; 1254 } else if (edesc->mapped_dst_nents == 1) { 1255 dst_dma = sg_dma_address(req->dst); 1256 } else { 1257 dst_dma = edesc->sec4_sg_dma + sec4_sg_index * 1258 sizeof(struct sec4_sg_entry); 1259 out_options = LDST_SGF; 1260 } 1261 1262 append_seq_out_ptr(desc, dst_dma, req->cryptlen, out_options); 1263 } 1264 1265 /* 1266 * allocate and map the aead extended descriptor 1267 */ 1268 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1269 int desc_bytes, bool *all_contig_ptr, 1270 bool encrypt) 1271 { 1272 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1273 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1274 struct device *jrdev = ctx->jrdev; 1275 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1276 GFP_KERNEL : GFP_ATOMIC; 1277 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1278 struct aead_edesc *edesc; 1279 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1280 unsigned int authsize = ctx->authsize; 1281 1282 if (unlikely(req->dst != req->src)) { 1283 src_nents = sg_nents_for_len(req->src, req->assoclen + 1284 req->cryptlen); 1285 if (unlikely(src_nents < 0)) { 1286 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1287 req->assoclen + req->cryptlen); 1288 return ERR_PTR(src_nents); 1289 } 1290 1291 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1292 req->cryptlen + 1293 (encrypt ? authsize : 1294 (-authsize))); 1295 if (unlikely(dst_nents < 0)) { 1296 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1297 req->assoclen + req->cryptlen + 1298 (encrypt ? authsize : (-authsize))); 1299 return ERR_PTR(dst_nents); 1300 } 1301 } else { 1302 src_nents = sg_nents_for_len(req->src, req->assoclen + 1303 req->cryptlen + 1304 (encrypt ? authsize : 0)); 1305 if (unlikely(src_nents < 0)) { 1306 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1307 req->assoclen + req->cryptlen + 1308 (encrypt ? authsize : 0)); 1309 return ERR_PTR(src_nents); 1310 } 1311 } 1312 1313 if (likely(req->src == req->dst)) { 1314 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1315 DMA_BIDIRECTIONAL); 1316 if (unlikely(!mapped_src_nents)) { 1317 dev_err(jrdev, "unable to map source\n"); 1318 return ERR_PTR(-ENOMEM); 1319 } 1320 } else { 1321 /* Cover also the case of null (zero length) input data */ 1322 if (src_nents) { 1323 mapped_src_nents = dma_map_sg(jrdev, req->src, 1324 src_nents, DMA_TO_DEVICE); 1325 if (unlikely(!mapped_src_nents)) { 1326 dev_err(jrdev, "unable to map source\n"); 1327 return ERR_PTR(-ENOMEM); 1328 } 1329 } else { 1330 mapped_src_nents = 0; 1331 } 1332 1333 /* Cover also the case of null (zero length) output data */ 1334 if (dst_nents) { 1335 mapped_dst_nents = dma_map_sg(jrdev, req->dst, 1336 dst_nents, 1337 DMA_FROM_DEVICE); 1338 if (unlikely(!mapped_dst_nents)) { 1339 dev_err(jrdev, "unable to map destination\n"); 1340 dma_unmap_sg(jrdev, req->src, src_nents, 1341 DMA_TO_DEVICE); 1342 return ERR_PTR(-ENOMEM); 1343 } 1344 } else { 1345 mapped_dst_nents = 0; 1346 } 1347 } 1348 1349 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1350 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1351 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1352 1353 /* allocate space for base edesc and hw desc commands, link tables */ 1354 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1355 GFP_DMA | flags); 1356 if (!edesc) { 1357 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1358 0, 0, 0); 1359 return ERR_PTR(-ENOMEM); 1360 } 1361 1362 edesc->src_nents = src_nents; 1363 edesc->dst_nents = dst_nents; 1364 edesc->mapped_src_nents = mapped_src_nents; 1365 edesc->mapped_dst_nents = mapped_dst_nents; 1366 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1367 desc_bytes; 1368 *all_contig_ptr = !(mapped_src_nents > 1); 1369 1370 sec4_sg_index = 0; 1371 if (mapped_src_nents > 1) { 1372 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1373 edesc->sec4_sg + sec4_sg_index, 0); 1374 sec4_sg_index += mapped_src_nents; 1375 } 1376 if (mapped_dst_nents > 1) { 1377 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1378 edesc->sec4_sg + sec4_sg_index, 0); 1379 } 1380 1381 if (!sec4_sg_bytes) 1382 return edesc; 1383 1384 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1385 sec4_sg_bytes, DMA_TO_DEVICE); 1386 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1387 dev_err(jrdev, "unable to map S/G table\n"); 1388 aead_unmap(jrdev, edesc, req); 1389 kfree(edesc); 1390 return ERR_PTR(-ENOMEM); 1391 } 1392 1393 edesc->sec4_sg_bytes = sec4_sg_bytes; 1394 1395 return edesc; 1396 } 1397 1398 static int gcm_encrypt(struct aead_request *req) 1399 { 1400 struct aead_edesc *edesc; 1401 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1402 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1403 struct device *jrdev = ctx->jrdev; 1404 bool all_contig; 1405 u32 *desc; 1406 int ret = 0; 1407 1408 /* allocate extended descriptor */ 1409 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1410 if (IS_ERR(edesc)) 1411 return PTR_ERR(edesc); 1412 1413 /* Create and submit job descriptor */ 1414 init_gcm_job(req, edesc, all_contig, true); 1415 #ifdef DEBUG 1416 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1417 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1418 desc_bytes(edesc->hw_desc), 1); 1419 #endif 1420 1421 desc = edesc->hw_desc; 1422 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1423 if (!ret) { 1424 ret = -EINPROGRESS; 1425 } else { 1426 aead_unmap(jrdev, edesc, req); 1427 kfree(edesc); 1428 } 1429 1430 return ret; 1431 } 1432 1433 static int chachapoly_encrypt(struct aead_request *req) 1434 { 1435 struct aead_edesc *edesc; 1436 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1437 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1438 struct device *jrdev = ctx->jrdev; 1439 bool all_contig; 1440 u32 *desc; 1441 int ret; 1442 1443 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1444 true); 1445 if (IS_ERR(edesc)) 1446 return PTR_ERR(edesc); 1447 1448 desc = edesc->hw_desc; 1449 1450 init_chachapoly_job(req, edesc, all_contig, true); 1451 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1452 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1453 1); 1454 1455 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1456 if (!ret) { 1457 ret = -EINPROGRESS; 1458 } else { 1459 aead_unmap(jrdev, edesc, req); 1460 kfree(edesc); 1461 } 1462 1463 return ret; 1464 } 1465 1466 static int chachapoly_decrypt(struct aead_request *req) 1467 { 1468 struct aead_edesc *edesc; 1469 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1470 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1471 struct device *jrdev = ctx->jrdev; 1472 bool all_contig; 1473 u32 *desc; 1474 int ret; 1475 1476 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig, 1477 false); 1478 if (IS_ERR(edesc)) 1479 return PTR_ERR(edesc); 1480 1481 desc = edesc->hw_desc; 1482 1483 init_chachapoly_job(req, edesc, all_contig, false); 1484 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ", 1485 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1486 1); 1487 1488 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1489 if (!ret) { 1490 ret = -EINPROGRESS; 1491 } else { 1492 aead_unmap(jrdev, edesc, req); 1493 kfree(edesc); 1494 } 1495 1496 return ret; 1497 } 1498 1499 static int ipsec_gcm_encrypt(struct aead_request *req) 1500 { 1501 if (req->assoclen < 8) 1502 return -EINVAL; 1503 1504 return gcm_encrypt(req); 1505 } 1506 1507 static int aead_encrypt(struct aead_request *req) 1508 { 1509 struct aead_edesc *edesc; 1510 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1511 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1512 struct device *jrdev = ctx->jrdev; 1513 bool all_contig; 1514 u32 *desc; 1515 int ret = 0; 1516 1517 /* allocate extended descriptor */ 1518 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1519 &all_contig, true); 1520 if (IS_ERR(edesc)) 1521 return PTR_ERR(edesc); 1522 1523 /* Create and submit job descriptor */ 1524 init_authenc_job(req, edesc, all_contig, true); 1525 #ifdef DEBUG 1526 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1527 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1528 desc_bytes(edesc->hw_desc), 1); 1529 #endif 1530 1531 desc = edesc->hw_desc; 1532 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1533 if (!ret) { 1534 ret = -EINPROGRESS; 1535 } else { 1536 aead_unmap(jrdev, edesc, req); 1537 kfree(edesc); 1538 } 1539 1540 return ret; 1541 } 1542 1543 static int gcm_decrypt(struct aead_request *req) 1544 { 1545 struct aead_edesc *edesc; 1546 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1547 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1548 struct device *jrdev = ctx->jrdev; 1549 bool all_contig; 1550 u32 *desc; 1551 int ret = 0; 1552 1553 /* allocate extended descriptor */ 1554 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1555 if (IS_ERR(edesc)) 1556 return PTR_ERR(edesc); 1557 1558 /* Create and submit job descriptor*/ 1559 init_gcm_job(req, edesc, all_contig, false); 1560 #ifdef DEBUG 1561 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1562 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1563 desc_bytes(edesc->hw_desc), 1); 1564 #endif 1565 1566 desc = edesc->hw_desc; 1567 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1568 if (!ret) { 1569 ret = -EINPROGRESS; 1570 } else { 1571 aead_unmap(jrdev, edesc, req); 1572 kfree(edesc); 1573 } 1574 1575 return ret; 1576 } 1577 1578 static int ipsec_gcm_decrypt(struct aead_request *req) 1579 { 1580 if (req->assoclen < 8) 1581 return -EINVAL; 1582 1583 return gcm_decrypt(req); 1584 } 1585 1586 static int aead_decrypt(struct aead_request *req) 1587 { 1588 struct aead_edesc *edesc; 1589 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1590 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1591 struct device *jrdev = ctx->jrdev; 1592 bool all_contig; 1593 u32 *desc; 1594 int ret = 0; 1595 1596 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1597 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1598 req->assoclen + req->cryptlen, 1); 1599 1600 /* allocate extended descriptor */ 1601 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1602 &all_contig, false); 1603 if (IS_ERR(edesc)) 1604 return PTR_ERR(edesc); 1605 1606 /* Create and submit job descriptor*/ 1607 init_authenc_job(req, edesc, all_contig, false); 1608 #ifdef DEBUG 1609 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1610 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1611 desc_bytes(edesc->hw_desc), 1); 1612 #endif 1613 1614 desc = edesc->hw_desc; 1615 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1616 if (!ret) { 1617 ret = -EINPROGRESS; 1618 } else { 1619 aead_unmap(jrdev, edesc, req); 1620 kfree(edesc); 1621 } 1622 1623 return ret; 1624 } 1625 1626 /* 1627 * allocate and map the skcipher extended descriptor for skcipher 1628 */ 1629 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req, 1630 int desc_bytes) 1631 { 1632 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1633 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1634 struct device *jrdev = ctx->jrdev; 1635 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1636 GFP_KERNEL : GFP_ATOMIC; 1637 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1638 struct skcipher_edesc *edesc; 1639 dma_addr_t iv_dma = 0; 1640 u8 *iv; 1641 int ivsize = crypto_skcipher_ivsize(skcipher); 1642 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1643 1644 src_nents = sg_nents_for_len(req->src, req->cryptlen); 1645 if (unlikely(src_nents < 0)) { 1646 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1647 req->cryptlen); 1648 return ERR_PTR(src_nents); 1649 } 1650 1651 if (req->dst != req->src) { 1652 dst_nents = sg_nents_for_len(req->dst, req->cryptlen); 1653 if (unlikely(dst_nents < 0)) { 1654 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1655 req->cryptlen); 1656 return ERR_PTR(dst_nents); 1657 } 1658 } 1659 1660 if (likely(req->src == req->dst)) { 1661 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1662 DMA_BIDIRECTIONAL); 1663 if (unlikely(!mapped_src_nents)) { 1664 dev_err(jrdev, "unable to map source\n"); 1665 return ERR_PTR(-ENOMEM); 1666 } 1667 } else { 1668 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1669 DMA_TO_DEVICE); 1670 if (unlikely(!mapped_src_nents)) { 1671 dev_err(jrdev, "unable to map source\n"); 1672 return ERR_PTR(-ENOMEM); 1673 } 1674 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1675 DMA_FROM_DEVICE); 1676 if (unlikely(!mapped_dst_nents)) { 1677 dev_err(jrdev, "unable to map destination\n"); 1678 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1679 return ERR_PTR(-ENOMEM); 1680 } 1681 } 1682 1683 if (!ivsize && mapped_src_nents == 1) 1684 sec4_sg_ents = 0; // no need for an input hw s/g table 1685 else 1686 sec4_sg_ents = mapped_src_nents + !!ivsize; 1687 dst_sg_idx = sec4_sg_ents; 1688 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1689 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1690 1691 /* 1692 * allocate space for base edesc and hw desc commands, link tables, IV 1693 */ 1694 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize, 1695 GFP_DMA | flags); 1696 if (!edesc) { 1697 dev_err(jrdev, "could not allocate extended descriptor\n"); 1698 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1699 0, 0, 0); 1700 return ERR_PTR(-ENOMEM); 1701 } 1702 1703 edesc->src_nents = src_nents; 1704 edesc->dst_nents = dst_nents; 1705 edesc->mapped_src_nents = mapped_src_nents; 1706 edesc->mapped_dst_nents = mapped_dst_nents; 1707 edesc->sec4_sg_bytes = sec4_sg_bytes; 1708 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc + 1709 desc_bytes); 1710 1711 /* Make sure IV is located in a DMAable area */ 1712 if (ivsize) { 1713 iv = (u8 *)edesc->hw_desc + desc_bytes + sec4_sg_bytes; 1714 memcpy(iv, req->iv, ivsize); 1715 1716 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_TO_DEVICE); 1717 if (dma_mapping_error(jrdev, iv_dma)) { 1718 dev_err(jrdev, "unable to map IV\n"); 1719 caam_unmap(jrdev, req->src, req->dst, src_nents, 1720 dst_nents, 0, 0, 0, 0); 1721 kfree(edesc); 1722 return ERR_PTR(-ENOMEM); 1723 } 1724 1725 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1726 } 1727 if (dst_sg_idx) 1728 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg + 1729 !!ivsize, 0); 1730 1731 if (mapped_dst_nents > 1) { 1732 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1733 edesc->sec4_sg + dst_sg_idx, 0); 1734 } 1735 1736 if (sec4_sg_bytes) { 1737 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1738 sec4_sg_bytes, 1739 DMA_TO_DEVICE); 1740 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1741 dev_err(jrdev, "unable to map S/G table\n"); 1742 caam_unmap(jrdev, req->src, req->dst, src_nents, 1743 dst_nents, iv_dma, ivsize, 0, 0); 1744 kfree(edesc); 1745 return ERR_PTR(-ENOMEM); 1746 } 1747 } 1748 1749 edesc->iv_dma = iv_dma; 1750 1751 #ifdef DEBUG 1752 print_hex_dump(KERN_ERR, "skcipher sec4_sg@" __stringify(__LINE__)": ", 1753 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1754 sec4_sg_bytes, 1); 1755 #endif 1756 1757 return edesc; 1758 } 1759 1760 static int skcipher_encrypt(struct skcipher_request *req) 1761 { 1762 struct skcipher_edesc *edesc; 1763 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1764 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1765 struct device *jrdev = ctx->jrdev; 1766 u32 *desc; 1767 int ret = 0; 1768 1769 /* allocate extended descriptor */ 1770 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1771 if (IS_ERR(edesc)) 1772 return PTR_ERR(edesc); 1773 1774 /* Create and submit job descriptor*/ 1775 init_skcipher_job(req, edesc, true); 1776 #ifdef DEBUG 1777 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1778 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1779 desc_bytes(edesc->hw_desc), 1); 1780 #endif 1781 desc = edesc->hw_desc; 1782 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); 1783 1784 if (!ret) { 1785 ret = -EINPROGRESS; 1786 } else { 1787 skcipher_unmap(jrdev, edesc, req); 1788 kfree(edesc); 1789 } 1790 1791 return ret; 1792 } 1793 1794 static int skcipher_decrypt(struct skcipher_request *req) 1795 { 1796 struct skcipher_edesc *edesc; 1797 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1798 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher); 1799 int ivsize = crypto_skcipher_ivsize(skcipher); 1800 struct device *jrdev = ctx->jrdev; 1801 u32 *desc; 1802 int ret = 0; 1803 1804 /* allocate extended descriptor */ 1805 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ); 1806 if (IS_ERR(edesc)) 1807 return PTR_ERR(edesc); 1808 1809 /* 1810 * The crypto API expects us to set the IV (req->iv) to the last 1811 * ciphertext block. 1812 */ 1813 if (ivsize) 1814 scatterwalk_map_and_copy(req->iv, req->src, req->cryptlen - 1815 ivsize, ivsize, 0); 1816 1817 /* Create and submit job descriptor*/ 1818 init_skcipher_job(req, edesc, false); 1819 desc = edesc->hw_desc; 1820 #ifdef DEBUG 1821 print_hex_dump(KERN_ERR, "skcipher jobdesc@" __stringify(__LINE__)": ", 1822 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1823 desc_bytes(edesc->hw_desc), 1); 1824 #endif 1825 1826 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); 1827 if (!ret) { 1828 ret = -EINPROGRESS; 1829 } else { 1830 skcipher_unmap(jrdev, edesc, req); 1831 kfree(edesc); 1832 } 1833 1834 return ret; 1835 } 1836 1837 static struct caam_skcipher_alg driver_algs[] = { 1838 { 1839 .skcipher = { 1840 .base = { 1841 .cra_name = "cbc(aes)", 1842 .cra_driver_name = "cbc-aes-caam", 1843 .cra_blocksize = AES_BLOCK_SIZE, 1844 }, 1845 .setkey = skcipher_setkey, 1846 .encrypt = skcipher_encrypt, 1847 .decrypt = skcipher_decrypt, 1848 .min_keysize = AES_MIN_KEY_SIZE, 1849 .max_keysize = AES_MAX_KEY_SIZE, 1850 .ivsize = AES_BLOCK_SIZE, 1851 }, 1852 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1853 }, 1854 { 1855 .skcipher = { 1856 .base = { 1857 .cra_name = "cbc(des3_ede)", 1858 .cra_driver_name = "cbc-3des-caam", 1859 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1860 }, 1861 .setkey = des_skcipher_setkey, 1862 .encrypt = skcipher_encrypt, 1863 .decrypt = skcipher_decrypt, 1864 .min_keysize = DES3_EDE_KEY_SIZE, 1865 .max_keysize = DES3_EDE_KEY_SIZE, 1866 .ivsize = DES3_EDE_BLOCK_SIZE, 1867 }, 1868 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1869 }, 1870 { 1871 .skcipher = { 1872 .base = { 1873 .cra_name = "cbc(des)", 1874 .cra_driver_name = "cbc-des-caam", 1875 .cra_blocksize = DES_BLOCK_SIZE, 1876 }, 1877 .setkey = des_skcipher_setkey, 1878 .encrypt = skcipher_encrypt, 1879 .decrypt = skcipher_decrypt, 1880 .min_keysize = DES_KEY_SIZE, 1881 .max_keysize = DES_KEY_SIZE, 1882 .ivsize = DES_BLOCK_SIZE, 1883 }, 1884 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1885 }, 1886 { 1887 .skcipher = { 1888 .base = { 1889 .cra_name = "ctr(aes)", 1890 .cra_driver_name = "ctr-aes-caam", 1891 .cra_blocksize = 1, 1892 }, 1893 .setkey = skcipher_setkey, 1894 .encrypt = skcipher_encrypt, 1895 .decrypt = skcipher_decrypt, 1896 .min_keysize = AES_MIN_KEY_SIZE, 1897 .max_keysize = AES_MAX_KEY_SIZE, 1898 .ivsize = AES_BLOCK_SIZE, 1899 .chunksize = AES_BLOCK_SIZE, 1900 }, 1901 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | 1902 OP_ALG_AAI_CTR_MOD128, 1903 }, 1904 { 1905 .skcipher = { 1906 .base = { 1907 .cra_name = "rfc3686(ctr(aes))", 1908 .cra_driver_name = "rfc3686-ctr-aes-caam", 1909 .cra_blocksize = 1, 1910 }, 1911 .setkey = skcipher_setkey, 1912 .encrypt = skcipher_encrypt, 1913 .decrypt = skcipher_decrypt, 1914 .min_keysize = AES_MIN_KEY_SIZE + 1915 CTR_RFC3686_NONCE_SIZE, 1916 .max_keysize = AES_MAX_KEY_SIZE + 1917 CTR_RFC3686_NONCE_SIZE, 1918 .ivsize = CTR_RFC3686_IV_SIZE, 1919 .chunksize = AES_BLOCK_SIZE, 1920 }, 1921 .caam = { 1922 .class1_alg_type = OP_ALG_ALGSEL_AES | 1923 OP_ALG_AAI_CTR_MOD128, 1924 .rfc3686 = true, 1925 }, 1926 }, 1927 { 1928 .skcipher = { 1929 .base = { 1930 .cra_name = "xts(aes)", 1931 .cra_driver_name = "xts-aes-caam", 1932 .cra_blocksize = AES_BLOCK_SIZE, 1933 }, 1934 .setkey = xts_skcipher_setkey, 1935 .encrypt = skcipher_encrypt, 1936 .decrypt = skcipher_decrypt, 1937 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1938 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1939 .ivsize = AES_BLOCK_SIZE, 1940 }, 1941 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1942 }, 1943 { 1944 .skcipher = { 1945 .base = { 1946 .cra_name = "ecb(des)", 1947 .cra_driver_name = "ecb-des-caam", 1948 .cra_blocksize = DES_BLOCK_SIZE, 1949 }, 1950 .setkey = des_skcipher_setkey, 1951 .encrypt = skcipher_encrypt, 1952 .decrypt = skcipher_decrypt, 1953 .min_keysize = DES_KEY_SIZE, 1954 .max_keysize = DES_KEY_SIZE, 1955 }, 1956 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB, 1957 }, 1958 { 1959 .skcipher = { 1960 .base = { 1961 .cra_name = "ecb(aes)", 1962 .cra_driver_name = "ecb-aes-caam", 1963 .cra_blocksize = AES_BLOCK_SIZE, 1964 }, 1965 .setkey = skcipher_setkey, 1966 .encrypt = skcipher_encrypt, 1967 .decrypt = skcipher_decrypt, 1968 .min_keysize = AES_MIN_KEY_SIZE, 1969 .max_keysize = AES_MAX_KEY_SIZE, 1970 }, 1971 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB, 1972 }, 1973 { 1974 .skcipher = { 1975 .base = { 1976 .cra_name = "ecb(des3_ede)", 1977 .cra_driver_name = "ecb-des3-caam", 1978 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 1979 }, 1980 .setkey = des_skcipher_setkey, 1981 .encrypt = skcipher_encrypt, 1982 .decrypt = skcipher_decrypt, 1983 .min_keysize = DES3_EDE_KEY_SIZE, 1984 .max_keysize = DES3_EDE_KEY_SIZE, 1985 }, 1986 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB, 1987 }, 1988 { 1989 .skcipher = { 1990 .base = { 1991 .cra_name = "ecb(arc4)", 1992 .cra_driver_name = "ecb-arc4-caam", 1993 .cra_blocksize = ARC4_BLOCK_SIZE, 1994 }, 1995 .setkey = skcipher_setkey, 1996 .encrypt = skcipher_encrypt, 1997 .decrypt = skcipher_decrypt, 1998 .min_keysize = ARC4_MIN_KEY_SIZE, 1999 .max_keysize = ARC4_MAX_KEY_SIZE, 2000 }, 2001 .caam.class1_alg_type = OP_ALG_ALGSEL_ARC4 | OP_ALG_AAI_ECB, 2002 }, 2003 }; 2004 2005 static struct caam_aead_alg driver_aeads[] = { 2006 { 2007 .aead = { 2008 .base = { 2009 .cra_name = "rfc4106(gcm(aes))", 2010 .cra_driver_name = "rfc4106-gcm-aes-caam", 2011 .cra_blocksize = 1, 2012 }, 2013 .setkey = rfc4106_setkey, 2014 .setauthsize = rfc4106_setauthsize, 2015 .encrypt = ipsec_gcm_encrypt, 2016 .decrypt = ipsec_gcm_decrypt, 2017 .ivsize = GCM_RFC4106_IV_SIZE, 2018 .maxauthsize = AES_BLOCK_SIZE, 2019 }, 2020 .caam = { 2021 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2022 }, 2023 }, 2024 { 2025 .aead = { 2026 .base = { 2027 .cra_name = "rfc4543(gcm(aes))", 2028 .cra_driver_name = "rfc4543-gcm-aes-caam", 2029 .cra_blocksize = 1, 2030 }, 2031 .setkey = rfc4543_setkey, 2032 .setauthsize = rfc4543_setauthsize, 2033 .encrypt = ipsec_gcm_encrypt, 2034 .decrypt = ipsec_gcm_decrypt, 2035 .ivsize = GCM_RFC4543_IV_SIZE, 2036 .maxauthsize = AES_BLOCK_SIZE, 2037 }, 2038 .caam = { 2039 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2040 }, 2041 }, 2042 /* Galois Counter Mode */ 2043 { 2044 .aead = { 2045 .base = { 2046 .cra_name = "gcm(aes)", 2047 .cra_driver_name = "gcm-aes-caam", 2048 .cra_blocksize = 1, 2049 }, 2050 .setkey = gcm_setkey, 2051 .setauthsize = gcm_setauthsize, 2052 .encrypt = gcm_encrypt, 2053 .decrypt = gcm_decrypt, 2054 .ivsize = GCM_AES_IV_SIZE, 2055 .maxauthsize = AES_BLOCK_SIZE, 2056 }, 2057 .caam = { 2058 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2059 }, 2060 }, 2061 /* single-pass ipsec_esp descriptor */ 2062 { 2063 .aead = { 2064 .base = { 2065 .cra_name = "authenc(hmac(md5)," 2066 "ecb(cipher_null))", 2067 .cra_driver_name = "authenc-hmac-md5-" 2068 "ecb-cipher_null-caam", 2069 .cra_blocksize = NULL_BLOCK_SIZE, 2070 }, 2071 .setkey = aead_setkey, 2072 .setauthsize = aead_setauthsize, 2073 .encrypt = aead_encrypt, 2074 .decrypt = aead_decrypt, 2075 .ivsize = NULL_IV_SIZE, 2076 .maxauthsize = MD5_DIGEST_SIZE, 2077 }, 2078 .caam = { 2079 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2080 OP_ALG_AAI_HMAC_PRECOMP, 2081 }, 2082 }, 2083 { 2084 .aead = { 2085 .base = { 2086 .cra_name = "authenc(hmac(sha1)," 2087 "ecb(cipher_null))", 2088 .cra_driver_name = "authenc-hmac-sha1-" 2089 "ecb-cipher_null-caam", 2090 .cra_blocksize = NULL_BLOCK_SIZE, 2091 }, 2092 .setkey = aead_setkey, 2093 .setauthsize = aead_setauthsize, 2094 .encrypt = aead_encrypt, 2095 .decrypt = aead_decrypt, 2096 .ivsize = NULL_IV_SIZE, 2097 .maxauthsize = SHA1_DIGEST_SIZE, 2098 }, 2099 .caam = { 2100 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2101 OP_ALG_AAI_HMAC_PRECOMP, 2102 }, 2103 }, 2104 { 2105 .aead = { 2106 .base = { 2107 .cra_name = "authenc(hmac(sha224)," 2108 "ecb(cipher_null))", 2109 .cra_driver_name = "authenc-hmac-sha224-" 2110 "ecb-cipher_null-caam", 2111 .cra_blocksize = NULL_BLOCK_SIZE, 2112 }, 2113 .setkey = aead_setkey, 2114 .setauthsize = aead_setauthsize, 2115 .encrypt = aead_encrypt, 2116 .decrypt = aead_decrypt, 2117 .ivsize = NULL_IV_SIZE, 2118 .maxauthsize = SHA224_DIGEST_SIZE, 2119 }, 2120 .caam = { 2121 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2122 OP_ALG_AAI_HMAC_PRECOMP, 2123 }, 2124 }, 2125 { 2126 .aead = { 2127 .base = { 2128 .cra_name = "authenc(hmac(sha256)," 2129 "ecb(cipher_null))", 2130 .cra_driver_name = "authenc-hmac-sha256-" 2131 "ecb-cipher_null-caam", 2132 .cra_blocksize = NULL_BLOCK_SIZE, 2133 }, 2134 .setkey = aead_setkey, 2135 .setauthsize = aead_setauthsize, 2136 .encrypt = aead_encrypt, 2137 .decrypt = aead_decrypt, 2138 .ivsize = NULL_IV_SIZE, 2139 .maxauthsize = SHA256_DIGEST_SIZE, 2140 }, 2141 .caam = { 2142 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2143 OP_ALG_AAI_HMAC_PRECOMP, 2144 }, 2145 }, 2146 { 2147 .aead = { 2148 .base = { 2149 .cra_name = "authenc(hmac(sha384)," 2150 "ecb(cipher_null))", 2151 .cra_driver_name = "authenc-hmac-sha384-" 2152 "ecb-cipher_null-caam", 2153 .cra_blocksize = NULL_BLOCK_SIZE, 2154 }, 2155 .setkey = aead_setkey, 2156 .setauthsize = aead_setauthsize, 2157 .encrypt = aead_encrypt, 2158 .decrypt = aead_decrypt, 2159 .ivsize = NULL_IV_SIZE, 2160 .maxauthsize = SHA384_DIGEST_SIZE, 2161 }, 2162 .caam = { 2163 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2164 OP_ALG_AAI_HMAC_PRECOMP, 2165 }, 2166 }, 2167 { 2168 .aead = { 2169 .base = { 2170 .cra_name = "authenc(hmac(sha512)," 2171 "ecb(cipher_null))", 2172 .cra_driver_name = "authenc-hmac-sha512-" 2173 "ecb-cipher_null-caam", 2174 .cra_blocksize = NULL_BLOCK_SIZE, 2175 }, 2176 .setkey = aead_setkey, 2177 .setauthsize = aead_setauthsize, 2178 .encrypt = aead_encrypt, 2179 .decrypt = aead_decrypt, 2180 .ivsize = NULL_IV_SIZE, 2181 .maxauthsize = SHA512_DIGEST_SIZE, 2182 }, 2183 .caam = { 2184 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2185 OP_ALG_AAI_HMAC_PRECOMP, 2186 }, 2187 }, 2188 { 2189 .aead = { 2190 .base = { 2191 .cra_name = "authenc(hmac(md5),cbc(aes))", 2192 .cra_driver_name = "authenc-hmac-md5-" 2193 "cbc-aes-caam", 2194 .cra_blocksize = AES_BLOCK_SIZE, 2195 }, 2196 .setkey = aead_setkey, 2197 .setauthsize = aead_setauthsize, 2198 .encrypt = aead_encrypt, 2199 .decrypt = aead_decrypt, 2200 .ivsize = AES_BLOCK_SIZE, 2201 .maxauthsize = MD5_DIGEST_SIZE, 2202 }, 2203 .caam = { 2204 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2205 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2206 OP_ALG_AAI_HMAC_PRECOMP, 2207 }, 2208 }, 2209 { 2210 .aead = { 2211 .base = { 2212 .cra_name = "echainiv(authenc(hmac(md5)," 2213 "cbc(aes)))", 2214 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2215 "cbc-aes-caam", 2216 .cra_blocksize = AES_BLOCK_SIZE, 2217 }, 2218 .setkey = aead_setkey, 2219 .setauthsize = aead_setauthsize, 2220 .encrypt = aead_encrypt, 2221 .decrypt = aead_decrypt, 2222 .ivsize = AES_BLOCK_SIZE, 2223 .maxauthsize = MD5_DIGEST_SIZE, 2224 }, 2225 .caam = { 2226 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2227 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2228 OP_ALG_AAI_HMAC_PRECOMP, 2229 .geniv = true, 2230 }, 2231 }, 2232 { 2233 .aead = { 2234 .base = { 2235 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2236 .cra_driver_name = "authenc-hmac-sha1-" 2237 "cbc-aes-caam", 2238 .cra_blocksize = AES_BLOCK_SIZE, 2239 }, 2240 .setkey = aead_setkey, 2241 .setauthsize = aead_setauthsize, 2242 .encrypt = aead_encrypt, 2243 .decrypt = aead_decrypt, 2244 .ivsize = AES_BLOCK_SIZE, 2245 .maxauthsize = SHA1_DIGEST_SIZE, 2246 }, 2247 .caam = { 2248 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2249 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2250 OP_ALG_AAI_HMAC_PRECOMP, 2251 }, 2252 }, 2253 { 2254 .aead = { 2255 .base = { 2256 .cra_name = "echainiv(authenc(hmac(sha1)," 2257 "cbc(aes)))", 2258 .cra_driver_name = "echainiv-authenc-" 2259 "hmac-sha1-cbc-aes-caam", 2260 .cra_blocksize = AES_BLOCK_SIZE, 2261 }, 2262 .setkey = aead_setkey, 2263 .setauthsize = aead_setauthsize, 2264 .encrypt = aead_encrypt, 2265 .decrypt = aead_decrypt, 2266 .ivsize = AES_BLOCK_SIZE, 2267 .maxauthsize = SHA1_DIGEST_SIZE, 2268 }, 2269 .caam = { 2270 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2271 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2272 OP_ALG_AAI_HMAC_PRECOMP, 2273 .geniv = true, 2274 }, 2275 }, 2276 { 2277 .aead = { 2278 .base = { 2279 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2280 .cra_driver_name = "authenc-hmac-sha224-" 2281 "cbc-aes-caam", 2282 .cra_blocksize = AES_BLOCK_SIZE, 2283 }, 2284 .setkey = aead_setkey, 2285 .setauthsize = aead_setauthsize, 2286 .encrypt = aead_encrypt, 2287 .decrypt = aead_decrypt, 2288 .ivsize = AES_BLOCK_SIZE, 2289 .maxauthsize = SHA224_DIGEST_SIZE, 2290 }, 2291 .caam = { 2292 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2293 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2294 OP_ALG_AAI_HMAC_PRECOMP, 2295 }, 2296 }, 2297 { 2298 .aead = { 2299 .base = { 2300 .cra_name = "echainiv(authenc(hmac(sha224)," 2301 "cbc(aes)))", 2302 .cra_driver_name = "echainiv-authenc-" 2303 "hmac-sha224-cbc-aes-caam", 2304 .cra_blocksize = AES_BLOCK_SIZE, 2305 }, 2306 .setkey = aead_setkey, 2307 .setauthsize = aead_setauthsize, 2308 .encrypt = aead_encrypt, 2309 .decrypt = aead_decrypt, 2310 .ivsize = AES_BLOCK_SIZE, 2311 .maxauthsize = SHA224_DIGEST_SIZE, 2312 }, 2313 .caam = { 2314 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2315 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2316 OP_ALG_AAI_HMAC_PRECOMP, 2317 .geniv = true, 2318 }, 2319 }, 2320 { 2321 .aead = { 2322 .base = { 2323 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2324 .cra_driver_name = "authenc-hmac-sha256-" 2325 "cbc-aes-caam", 2326 .cra_blocksize = AES_BLOCK_SIZE, 2327 }, 2328 .setkey = aead_setkey, 2329 .setauthsize = aead_setauthsize, 2330 .encrypt = aead_encrypt, 2331 .decrypt = aead_decrypt, 2332 .ivsize = AES_BLOCK_SIZE, 2333 .maxauthsize = SHA256_DIGEST_SIZE, 2334 }, 2335 .caam = { 2336 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2337 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2338 OP_ALG_AAI_HMAC_PRECOMP, 2339 }, 2340 }, 2341 { 2342 .aead = { 2343 .base = { 2344 .cra_name = "echainiv(authenc(hmac(sha256)," 2345 "cbc(aes)))", 2346 .cra_driver_name = "echainiv-authenc-" 2347 "hmac-sha256-cbc-aes-caam", 2348 .cra_blocksize = AES_BLOCK_SIZE, 2349 }, 2350 .setkey = aead_setkey, 2351 .setauthsize = aead_setauthsize, 2352 .encrypt = aead_encrypt, 2353 .decrypt = aead_decrypt, 2354 .ivsize = AES_BLOCK_SIZE, 2355 .maxauthsize = SHA256_DIGEST_SIZE, 2356 }, 2357 .caam = { 2358 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2359 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2360 OP_ALG_AAI_HMAC_PRECOMP, 2361 .geniv = true, 2362 }, 2363 }, 2364 { 2365 .aead = { 2366 .base = { 2367 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2368 .cra_driver_name = "authenc-hmac-sha384-" 2369 "cbc-aes-caam", 2370 .cra_blocksize = AES_BLOCK_SIZE, 2371 }, 2372 .setkey = aead_setkey, 2373 .setauthsize = aead_setauthsize, 2374 .encrypt = aead_encrypt, 2375 .decrypt = aead_decrypt, 2376 .ivsize = AES_BLOCK_SIZE, 2377 .maxauthsize = SHA384_DIGEST_SIZE, 2378 }, 2379 .caam = { 2380 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2381 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2382 OP_ALG_AAI_HMAC_PRECOMP, 2383 }, 2384 }, 2385 { 2386 .aead = { 2387 .base = { 2388 .cra_name = "echainiv(authenc(hmac(sha384)," 2389 "cbc(aes)))", 2390 .cra_driver_name = "echainiv-authenc-" 2391 "hmac-sha384-cbc-aes-caam", 2392 .cra_blocksize = AES_BLOCK_SIZE, 2393 }, 2394 .setkey = aead_setkey, 2395 .setauthsize = aead_setauthsize, 2396 .encrypt = aead_encrypt, 2397 .decrypt = aead_decrypt, 2398 .ivsize = AES_BLOCK_SIZE, 2399 .maxauthsize = SHA384_DIGEST_SIZE, 2400 }, 2401 .caam = { 2402 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2403 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2404 OP_ALG_AAI_HMAC_PRECOMP, 2405 .geniv = true, 2406 }, 2407 }, 2408 { 2409 .aead = { 2410 .base = { 2411 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2412 .cra_driver_name = "authenc-hmac-sha512-" 2413 "cbc-aes-caam", 2414 .cra_blocksize = AES_BLOCK_SIZE, 2415 }, 2416 .setkey = aead_setkey, 2417 .setauthsize = aead_setauthsize, 2418 .encrypt = aead_encrypt, 2419 .decrypt = aead_decrypt, 2420 .ivsize = AES_BLOCK_SIZE, 2421 .maxauthsize = SHA512_DIGEST_SIZE, 2422 }, 2423 .caam = { 2424 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2425 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2426 OP_ALG_AAI_HMAC_PRECOMP, 2427 }, 2428 }, 2429 { 2430 .aead = { 2431 .base = { 2432 .cra_name = "echainiv(authenc(hmac(sha512)," 2433 "cbc(aes)))", 2434 .cra_driver_name = "echainiv-authenc-" 2435 "hmac-sha512-cbc-aes-caam", 2436 .cra_blocksize = AES_BLOCK_SIZE, 2437 }, 2438 .setkey = aead_setkey, 2439 .setauthsize = aead_setauthsize, 2440 .encrypt = aead_encrypt, 2441 .decrypt = aead_decrypt, 2442 .ivsize = AES_BLOCK_SIZE, 2443 .maxauthsize = SHA512_DIGEST_SIZE, 2444 }, 2445 .caam = { 2446 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2447 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2448 OP_ALG_AAI_HMAC_PRECOMP, 2449 .geniv = true, 2450 }, 2451 }, 2452 { 2453 .aead = { 2454 .base = { 2455 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2456 .cra_driver_name = "authenc-hmac-md5-" 2457 "cbc-des3_ede-caam", 2458 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2459 }, 2460 .setkey = aead_setkey, 2461 .setauthsize = aead_setauthsize, 2462 .encrypt = aead_encrypt, 2463 .decrypt = aead_decrypt, 2464 .ivsize = DES3_EDE_BLOCK_SIZE, 2465 .maxauthsize = MD5_DIGEST_SIZE, 2466 }, 2467 .caam = { 2468 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2469 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2470 OP_ALG_AAI_HMAC_PRECOMP, 2471 } 2472 }, 2473 { 2474 .aead = { 2475 .base = { 2476 .cra_name = "echainiv(authenc(hmac(md5)," 2477 "cbc(des3_ede)))", 2478 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2479 "cbc-des3_ede-caam", 2480 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2481 }, 2482 .setkey = aead_setkey, 2483 .setauthsize = aead_setauthsize, 2484 .encrypt = aead_encrypt, 2485 .decrypt = aead_decrypt, 2486 .ivsize = DES3_EDE_BLOCK_SIZE, 2487 .maxauthsize = MD5_DIGEST_SIZE, 2488 }, 2489 .caam = { 2490 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2491 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2492 OP_ALG_AAI_HMAC_PRECOMP, 2493 .geniv = true, 2494 } 2495 }, 2496 { 2497 .aead = { 2498 .base = { 2499 .cra_name = "authenc(hmac(sha1)," 2500 "cbc(des3_ede))", 2501 .cra_driver_name = "authenc-hmac-sha1-" 2502 "cbc-des3_ede-caam", 2503 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2504 }, 2505 .setkey = aead_setkey, 2506 .setauthsize = aead_setauthsize, 2507 .encrypt = aead_encrypt, 2508 .decrypt = aead_decrypt, 2509 .ivsize = DES3_EDE_BLOCK_SIZE, 2510 .maxauthsize = SHA1_DIGEST_SIZE, 2511 }, 2512 .caam = { 2513 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2514 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2515 OP_ALG_AAI_HMAC_PRECOMP, 2516 }, 2517 }, 2518 { 2519 .aead = { 2520 .base = { 2521 .cra_name = "echainiv(authenc(hmac(sha1)," 2522 "cbc(des3_ede)))", 2523 .cra_driver_name = "echainiv-authenc-" 2524 "hmac-sha1-" 2525 "cbc-des3_ede-caam", 2526 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2527 }, 2528 .setkey = aead_setkey, 2529 .setauthsize = aead_setauthsize, 2530 .encrypt = aead_encrypt, 2531 .decrypt = aead_decrypt, 2532 .ivsize = DES3_EDE_BLOCK_SIZE, 2533 .maxauthsize = SHA1_DIGEST_SIZE, 2534 }, 2535 .caam = { 2536 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2537 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2538 OP_ALG_AAI_HMAC_PRECOMP, 2539 .geniv = true, 2540 }, 2541 }, 2542 { 2543 .aead = { 2544 .base = { 2545 .cra_name = "authenc(hmac(sha224)," 2546 "cbc(des3_ede))", 2547 .cra_driver_name = "authenc-hmac-sha224-" 2548 "cbc-des3_ede-caam", 2549 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2550 }, 2551 .setkey = aead_setkey, 2552 .setauthsize = aead_setauthsize, 2553 .encrypt = aead_encrypt, 2554 .decrypt = aead_decrypt, 2555 .ivsize = DES3_EDE_BLOCK_SIZE, 2556 .maxauthsize = SHA224_DIGEST_SIZE, 2557 }, 2558 .caam = { 2559 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2560 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2561 OP_ALG_AAI_HMAC_PRECOMP, 2562 }, 2563 }, 2564 { 2565 .aead = { 2566 .base = { 2567 .cra_name = "echainiv(authenc(hmac(sha224)," 2568 "cbc(des3_ede)))", 2569 .cra_driver_name = "echainiv-authenc-" 2570 "hmac-sha224-" 2571 "cbc-des3_ede-caam", 2572 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2573 }, 2574 .setkey = aead_setkey, 2575 .setauthsize = aead_setauthsize, 2576 .encrypt = aead_encrypt, 2577 .decrypt = aead_decrypt, 2578 .ivsize = DES3_EDE_BLOCK_SIZE, 2579 .maxauthsize = SHA224_DIGEST_SIZE, 2580 }, 2581 .caam = { 2582 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2583 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2584 OP_ALG_AAI_HMAC_PRECOMP, 2585 .geniv = true, 2586 }, 2587 }, 2588 { 2589 .aead = { 2590 .base = { 2591 .cra_name = "authenc(hmac(sha256)," 2592 "cbc(des3_ede))", 2593 .cra_driver_name = "authenc-hmac-sha256-" 2594 "cbc-des3_ede-caam", 2595 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2596 }, 2597 .setkey = aead_setkey, 2598 .setauthsize = aead_setauthsize, 2599 .encrypt = aead_encrypt, 2600 .decrypt = aead_decrypt, 2601 .ivsize = DES3_EDE_BLOCK_SIZE, 2602 .maxauthsize = SHA256_DIGEST_SIZE, 2603 }, 2604 .caam = { 2605 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2606 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2607 OP_ALG_AAI_HMAC_PRECOMP, 2608 }, 2609 }, 2610 { 2611 .aead = { 2612 .base = { 2613 .cra_name = "echainiv(authenc(hmac(sha256)," 2614 "cbc(des3_ede)))", 2615 .cra_driver_name = "echainiv-authenc-" 2616 "hmac-sha256-" 2617 "cbc-des3_ede-caam", 2618 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2619 }, 2620 .setkey = aead_setkey, 2621 .setauthsize = aead_setauthsize, 2622 .encrypt = aead_encrypt, 2623 .decrypt = aead_decrypt, 2624 .ivsize = DES3_EDE_BLOCK_SIZE, 2625 .maxauthsize = SHA256_DIGEST_SIZE, 2626 }, 2627 .caam = { 2628 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2629 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2630 OP_ALG_AAI_HMAC_PRECOMP, 2631 .geniv = true, 2632 }, 2633 }, 2634 { 2635 .aead = { 2636 .base = { 2637 .cra_name = "authenc(hmac(sha384)," 2638 "cbc(des3_ede))", 2639 .cra_driver_name = "authenc-hmac-sha384-" 2640 "cbc-des3_ede-caam", 2641 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2642 }, 2643 .setkey = aead_setkey, 2644 .setauthsize = aead_setauthsize, 2645 .encrypt = aead_encrypt, 2646 .decrypt = aead_decrypt, 2647 .ivsize = DES3_EDE_BLOCK_SIZE, 2648 .maxauthsize = SHA384_DIGEST_SIZE, 2649 }, 2650 .caam = { 2651 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2652 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2653 OP_ALG_AAI_HMAC_PRECOMP, 2654 }, 2655 }, 2656 { 2657 .aead = { 2658 .base = { 2659 .cra_name = "echainiv(authenc(hmac(sha384)," 2660 "cbc(des3_ede)))", 2661 .cra_driver_name = "echainiv-authenc-" 2662 "hmac-sha384-" 2663 "cbc-des3_ede-caam", 2664 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2665 }, 2666 .setkey = aead_setkey, 2667 .setauthsize = aead_setauthsize, 2668 .encrypt = aead_encrypt, 2669 .decrypt = aead_decrypt, 2670 .ivsize = DES3_EDE_BLOCK_SIZE, 2671 .maxauthsize = SHA384_DIGEST_SIZE, 2672 }, 2673 .caam = { 2674 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2675 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2676 OP_ALG_AAI_HMAC_PRECOMP, 2677 .geniv = true, 2678 }, 2679 }, 2680 { 2681 .aead = { 2682 .base = { 2683 .cra_name = "authenc(hmac(sha512)," 2684 "cbc(des3_ede))", 2685 .cra_driver_name = "authenc-hmac-sha512-" 2686 "cbc-des3_ede-caam", 2687 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2688 }, 2689 .setkey = aead_setkey, 2690 .setauthsize = aead_setauthsize, 2691 .encrypt = aead_encrypt, 2692 .decrypt = aead_decrypt, 2693 .ivsize = DES3_EDE_BLOCK_SIZE, 2694 .maxauthsize = SHA512_DIGEST_SIZE, 2695 }, 2696 .caam = { 2697 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2698 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2699 OP_ALG_AAI_HMAC_PRECOMP, 2700 }, 2701 }, 2702 { 2703 .aead = { 2704 .base = { 2705 .cra_name = "echainiv(authenc(hmac(sha512)," 2706 "cbc(des3_ede)))", 2707 .cra_driver_name = "echainiv-authenc-" 2708 "hmac-sha512-" 2709 "cbc-des3_ede-caam", 2710 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2711 }, 2712 .setkey = aead_setkey, 2713 .setauthsize = aead_setauthsize, 2714 .encrypt = aead_encrypt, 2715 .decrypt = aead_decrypt, 2716 .ivsize = DES3_EDE_BLOCK_SIZE, 2717 .maxauthsize = SHA512_DIGEST_SIZE, 2718 }, 2719 .caam = { 2720 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2721 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2722 OP_ALG_AAI_HMAC_PRECOMP, 2723 .geniv = true, 2724 }, 2725 }, 2726 { 2727 .aead = { 2728 .base = { 2729 .cra_name = "authenc(hmac(md5),cbc(des))", 2730 .cra_driver_name = "authenc-hmac-md5-" 2731 "cbc-des-caam", 2732 .cra_blocksize = DES_BLOCK_SIZE, 2733 }, 2734 .setkey = aead_setkey, 2735 .setauthsize = aead_setauthsize, 2736 .encrypt = aead_encrypt, 2737 .decrypt = aead_decrypt, 2738 .ivsize = DES_BLOCK_SIZE, 2739 .maxauthsize = MD5_DIGEST_SIZE, 2740 }, 2741 .caam = { 2742 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2743 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2744 OP_ALG_AAI_HMAC_PRECOMP, 2745 }, 2746 }, 2747 { 2748 .aead = { 2749 .base = { 2750 .cra_name = "echainiv(authenc(hmac(md5)," 2751 "cbc(des)))", 2752 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2753 "cbc-des-caam", 2754 .cra_blocksize = DES_BLOCK_SIZE, 2755 }, 2756 .setkey = aead_setkey, 2757 .setauthsize = aead_setauthsize, 2758 .encrypt = aead_encrypt, 2759 .decrypt = aead_decrypt, 2760 .ivsize = DES_BLOCK_SIZE, 2761 .maxauthsize = MD5_DIGEST_SIZE, 2762 }, 2763 .caam = { 2764 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2765 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2766 OP_ALG_AAI_HMAC_PRECOMP, 2767 .geniv = true, 2768 }, 2769 }, 2770 { 2771 .aead = { 2772 .base = { 2773 .cra_name = "authenc(hmac(sha1),cbc(des))", 2774 .cra_driver_name = "authenc-hmac-sha1-" 2775 "cbc-des-caam", 2776 .cra_blocksize = DES_BLOCK_SIZE, 2777 }, 2778 .setkey = aead_setkey, 2779 .setauthsize = aead_setauthsize, 2780 .encrypt = aead_encrypt, 2781 .decrypt = aead_decrypt, 2782 .ivsize = DES_BLOCK_SIZE, 2783 .maxauthsize = SHA1_DIGEST_SIZE, 2784 }, 2785 .caam = { 2786 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2787 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2788 OP_ALG_AAI_HMAC_PRECOMP, 2789 }, 2790 }, 2791 { 2792 .aead = { 2793 .base = { 2794 .cra_name = "echainiv(authenc(hmac(sha1)," 2795 "cbc(des)))", 2796 .cra_driver_name = "echainiv-authenc-" 2797 "hmac-sha1-cbc-des-caam", 2798 .cra_blocksize = DES_BLOCK_SIZE, 2799 }, 2800 .setkey = aead_setkey, 2801 .setauthsize = aead_setauthsize, 2802 .encrypt = aead_encrypt, 2803 .decrypt = aead_decrypt, 2804 .ivsize = DES_BLOCK_SIZE, 2805 .maxauthsize = SHA1_DIGEST_SIZE, 2806 }, 2807 .caam = { 2808 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2809 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2810 OP_ALG_AAI_HMAC_PRECOMP, 2811 .geniv = true, 2812 }, 2813 }, 2814 { 2815 .aead = { 2816 .base = { 2817 .cra_name = "authenc(hmac(sha224),cbc(des))", 2818 .cra_driver_name = "authenc-hmac-sha224-" 2819 "cbc-des-caam", 2820 .cra_blocksize = DES_BLOCK_SIZE, 2821 }, 2822 .setkey = aead_setkey, 2823 .setauthsize = aead_setauthsize, 2824 .encrypt = aead_encrypt, 2825 .decrypt = aead_decrypt, 2826 .ivsize = DES_BLOCK_SIZE, 2827 .maxauthsize = SHA224_DIGEST_SIZE, 2828 }, 2829 .caam = { 2830 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2831 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2832 OP_ALG_AAI_HMAC_PRECOMP, 2833 }, 2834 }, 2835 { 2836 .aead = { 2837 .base = { 2838 .cra_name = "echainiv(authenc(hmac(sha224)," 2839 "cbc(des)))", 2840 .cra_driver_name = "echainiv-authenc-" 2841 "hmac-sha224-cbc-des-caam", 2842 .cra_blocksize = DES_BLOCK_SIZE, 2843 }, 2844 .setkey = aead_setkey, 2845 .setauthsize = aead_setauthsize, 2846 .encrypt = aead_encrypt, 2847 .decrypt = aead_decrypt, 2848 .ivsize = DES_BLOCK_SIZE, 2849 .maxauthsize = SHA224_DIGEST_SIZE, 2850 }, 2851 .caam = { 2852 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2853 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2854 OP_ALG_AAI_HMAC_PRECOMP, 2855 .geniv = true, 2856 }, 2857 }, 2858 { 2859 .aead = { 2860 .base = { 2861 .cra_name = "authenc(hmac(sha256),cbc(des))", 2862 .cra_driver_name = "authenc-hmac-sha256-" 2863 "cbc-des-caam", 2864 .cra_blocksize = DES_BLOCK_SIZE, 2865 }, 2866 .setkey = aead_setkey, 2867 .setauthsize = aead_setauthsize, 2868 .encrypt = aead_encrypt, 2869 .decrypt = aead_decrypt, 2870 .ivsize = DES_BLOCK_SIZE, 2871 .maxauthsize = SHA256_DIGEST_SIZE, 2872 }, 2873 .caam = { 2874 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2875 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2876 OP_ALG_AAI_HMAC_PRECOMP, 2877 }, 2878 }, 2879 { 2880 .aead = { 2881 .base = { 2882 .cra_name = "echainiv(authenc(hmac(sha256)," 2883 "cbc(des)))", 2884 .cra_driver_name = "echainiv-authenc-" 2885 "hmac-sha256-cbc-des-caam", 2886 .cra_blocksize = DES_BLOCK_SIZE, 2887 }, 2888 .setkey = aead_setkey, 2889 .setauthsize = aead_setauthsize, 2890 .encrypt = aead_encrypt, 2891 .decrypt = aead_decrypt, 2892 .ivsize = DES_BLOCK_SIZE, 2893 .maxauthsize = SHA256_DIGEST_SIZE, 2894 }, 2895 .caam = { 2896 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2897 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2898 OP_ALG_AAI_HMAC_PRECOMP, 2899 .geniv = true, 2900 }, 2901 }, 2902 { 2903 .aead = { 2904 .base = { 2905 .cra_name = "authenc(hmac(sha384),cbc(des))", 2906 .cra_driver_name = "authenc-hmac-sha384-" 2907 "cbc-des-caam", 2908 .cra_blocksize = DES_BLOCK_SIZE, 2909 }, 2910 .setkey = aead_setkey, 2911 .setauthsize = aead_setauthsize, 2912 .encrypt = aead_encrypt, 2913 .decrypt = aead_decrypt, 2914 .ivsize = DES_BLOCK_SIZE, 2915 .maxauthsize = SHA384_DIGEST_SIZE, 2916 }, 2917 .caam = { 2918 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2919 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2920 OP_ALG_AAI_HMAC_PRECOMP, 2921 }, 2922 }, 2923 { 2924 .aead = { 2925 .base = { 2926 .cra_name = "echainiv(authenc(hmac(sha384)," 2927 "cbc(des)))", 2928 .cra_driver_name = "echainiv-authenc-" 2929 "hmac-sha384-cbc-des-caam", 2930 .cra_blocksize = DES_BLOCK_SIZE, 2931 }, 2932 .setkey = aead_setkey, 2933 .setauthsize = aead_setauthsize, 2934 .encrypt = aead_encrypt, 2935 .decrypt = aead_decrypt, 2936 .ivsize = DES_BLOCK_SIZE, 2937 .maxauthsize = SHA384_DIGEST_SIZE, 2938 }, 2939 .caam = { 2940 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2941 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2942 OP_ALG_AAI_HMAC_PRECOMP, 2943 .geniv = true, 2944 }, 2945 }, 2946 { 2947 .aead = { 2948 .base = { 2949 .cra_name = "authenc(hmac(sha512),cbc(des))", 2950 .cra_driver_name = "authenc-hmac-sha512-" 2951 "cbc-des-caam", 2952 .cra_blocksize = DES_BLOCK_SIZE, 2953 }, 2954 .setkey = aead_setkey, 2955 .setauthsize = aead_setauthsize, 2956 .encrypt = aead_encrypt, 2957 .decrypt = aead_decrypt, 2958 .ivsize = DES_BLOCK_SIZE, 2959 .maxauthsize = SHA512_DIGEST_SIZE, 2960 }, 2961 .caam = { 2962 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2963 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2964 OP_ALG_AAI_HMAC_PRECOMP, 2965 }, 2966 }, 2967 { 2968 .aead = { 2969 .base = { 2970 .cra_name = "echainiv(authenc(hmac(sha512)," 2971 "cbc(des)))", 2972 .cra_driver_name = "echainiv-authenc-" 2973 "hmac-sha512-cbc-des-caam", 2974 .cra_blocksize = DES_BLOCK_SIZE, 2975 }, 2976 .setkey = aead_setkey, 2977 .setauthsize = aead_setauthsize, 2978 .encrypt = aead_encrypt, 2979 .decrypt = aead_decrypt, 2980 .ivsize = DES_BLOCK_SIZE, 2981 .maxauthsize = SHA512_DIGEST_SIZE, 2982 }, 2983 .caam = { 2984 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2985 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2986 OP_ALG_AAI_HMAC_PRECOMP, 2987 .geniv = true, 2988 }, 2989 }, 2990 { 2991 .aead = { 2992 .base = { 2993 .cra_name = "authenc(hmac(md5)," 2994 "rfc3686(ctr(aes)))", 2995 .cra_driver_name = "authenc-hmac-md5-" 2996 "rfc3686-ctr-aes-caam", 2997 .cra_blocksize = 1, 2998 }, 2999 .setkey = aead_setkey, 3000 .setauthsize = aead_setauthsize, 3001 .encrypt = aead_encrypt, 3002 .decrypt = aead_decrypt, 3003 .ivsize = CTR_RFC3686_IV_SIZE, 3004 .maxauthsize = MD5_DIGEST_SIZE, 3005 }, 3006 .caam = { 3007 .class1_alg_type = OP_ALG_ALGSEL_AES | 3008 OP_ALG_AAI_CTR_MOD128, 3009 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3010 OP_ALG_AAI_HMAC_PRECOMP, 3011 .rfc3686 = true, 3012 }, 3013 }, 3014 { 3015 .aead = { 3016 .base = { 3017 .cra_name = "seqiv(authenc(" 3018 "hmac(md5),rfc3686(ctr(aes))))", 3019 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3020 "rfc3686-ctr-aes-caam", 3021 .cra_blocksize = 1, 3022 }, 3023 .setkey = aead_setkey, 3024 .setauthsize = aead_setauthsize, 3025 .encrypt = aead_encrypt, 3026 .decrypt = aead_decrypt, 3027 .ivsize = CTR_RFC3686_IV_SIZE, 3028 .maxauthsize = MD5_DIGEST_SIZE, 3029 }, 3030 .caam = { 3031 .class1_alg_type = OP_ALG_ALGSEL_AES | 3032 OP_ALG_AAI_CTR_MOD128, 3033 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3034 OP_ALG_AAI_HMAC_PRECOMP, 3035 .rfc3686 = true, 3036 .geniv = true, 3037 }, 3038 }, 3039 { 3040 .aead = { 3041 .base = { 3042 .cra_name = "authenc(hmac(sha1)," 3043 "rfc3686(ctr(aes)))", 3044 .cra_driver_name = "authenc-hmac-sha1-" 3045 "rfc3686-ctr-aes-caam", 3046 .cra_blocksize = 1, 3047 }, 3048 .setkey = aead_setkey, 3049 .setauthsize = aead_setauthsize, 3050 .encrypt = aead_encrypt, 3051 .decrypt = aead_decrypt, 3052 .ivsize = CTR_RFC3686_IV_SIZE, 3053 .maxauthsize = SHA1_DIGEST_SIZE, 3054 }, 3055 .caam = { 3056 .class1_alg_type = OP_ALG_ALGSEL_AES | 3057 OP_ALG_AAI_CTR_MOD128, 3058 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3059 OP_ALG_AAI_HMAC_PRECOMP, 3060 .rfc3686 = true, 3061 }, 3062 }, 3063 { 3064 .aead = { 3065 .base = { 3066 .cra_name = "seqiv(authenc(" 3067 "hmac(sha1),rfc3686(ctr(aes))))", 3068 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3069 "rfc3686-ctr-aes-caam", 3070 .cra_blocksize = 1, 3071 }, 3072 .setkey = aead_setkey, 3073 .setauthsize = aead_setauthsize, 3074 .encrypt = aead_encrypt, 3075 .decrypt = aead_decrypt, 3076 .ivsize = CTR_RFC3686_IV_SIZE, 3077 .maxauthsize = SHA1_DIGEST_SIZE, 3078 }, 3079 .caam = { 3080 .class1_alg_type = OP_ALG_ALGSEL_AES | 3081 OP_ALG_AAI_CTR_MOD128, 3082 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3083 OP_ALG_AAI_HMAC_PRECOMP, 3084 .rfc3686 = true, 3085 .geniv = true, 3086 }, 3087 }, 3088 { 3089 .aead = { 3090 .base = { 3091 .cra_name = "authenc(hmac(sha224)," 3092 "rfc3686(ctr(aes)))", 3093 .cra_driver_name = "authenc-hmac-sha224-" 3094 "rfc3686-ctr-aes-caam", 3095 .cra_blocksize = 1, 3096 }, 3097 .setkey = aead_setkey, 3098 .setauthsize = aead_setauthsize, 3099 .encrypt = aead_encrypt, 3100 .decrypt = aead_decrypt, 3101 .ivsize = CTR_RFC3686_IV_SIZE, 3102 .maxauthsize = SHA224_DIGEST_SIZE, 3103 }, 3104 .caam = { 3105 .class1_alg_type = OP_ALG_ALGSEL_AES | 3106 OP_ALG_AAI_CTR_MOD128, 3107 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3108 OP_ALG_AAI_HMAC_PRECOMP, 3109 .rfc3686 = true, 3110 }, 3111 }, 3112 { 3113 .aead = { 3114 .base = { 3115 .cra_name = "seqiv(authenc(" 3116 "hmac(sha224),rfc3686(ctr(aes))))", 3117 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3118 "rfc3686-ctr-aes-caam", 3119 .cra_blocksize = 1, 3120 }, 3121 .setkey = aead_setkey, 3122 .setauthsize = aead_setauthsize, 3123 .encrypt = aead_encrypt, 3124 .decrypt = aead_decrypt, 3125 .ivsize = CTR_RFC3686_IV_SIZE, 3126 .maxauthsize = SHA224_DIGEST_SIZE, 3127 }, 3128 .caam = { 3129 .class1_alg_type = OP_ALG_ALGSEL_AES | 3130 OP_ALG_AAI_CTR_MOD128, 3131 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3132 OP_ALG_AAI_HMAC_PRECOMP, 3133 .rfc3686 = true, 3134 .geniv = true, 3135 }, 3136 }, 3137 { 3138 .aead = { 3139 .base = { 3140 .cra_name = "authenc(hmac(sha256)," 3141 "rfc3686(ctr(aes)))", 3142 .cra_driver_name = "authenc-hmac-sha256-" 3143 "rfc3686-ctr-aes-caam", 3144 .cra_blocksize = 1, 3145 }, 3146 .setkey = aead_setkey, 3147 .setauthsize = aead_setauthsize, 3148 .encrypt = aead_encrypt, 3149 .decrypt = aead_decrypt, 3150 .ivsize = CTR_RFC3686_IV_SIZE, 3151 .maxauthsize = SHA256_DIGEST_SIZE, 3152 }, 3153 .caam = { 3154 .class1_alg_type = OP_ALG_ALGSEL_AES | 3155 OP_ALG_AAI_CTR_MOD128, 3156 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3157 OP_ALG_AAI_HMAC_PRECOMP, 3158 .rfc3686 = true, 3159 }, 3160 }, 3161 { 3162 .aead = { 3163 .base = { 3164 .cra_name = "seqiv(authenc(hmac(sha256)," 3165 "rfc3686(ctr(aes))))", 3166 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3167 "rfc3686-ctr-aes-caam", 3168 .cra_blocksize = 1, 3169 }, 3170 .setkey = aead_setkey, 3171 .setauthsize = aead_setauthsize, 3172 .encrypt = aead_encrypt, 3173 .decrypt = aead_decrypt, 3174 .ivsize = CTR_RFC3686_IV_SIZE, 3175 .maxauthsize = SHA256_DIGEST_SIZE, 3176 }, 3177 .caam = { 3178 .class1_alg_type = OP_ALG_ALGSEL_AES | 3179 OP_ALG_AAI_CTR_MOD128, 3180 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3181 OP_ALG_AAI_HMAC_PRECOMP, 3182 .rfc3686 = true, 3183 .geniv = true, 3184 }, 3185 }, 3186 { 3187 .aead = { 3188 .base = { 3189 .cra_name = "authenc(hmac(sha384)," 3190 "rfc3686(ctr(aes)))", 3191 .cra_driver_name = "authenc-hmac-sha384-" 3192 "rfc3686-ctr-aes-caam", 3193 .cra_blocksize = 1, 3194 }, 3195 .setkey = aead_setkey, 3196 .setauthsize = aead_setauthsize, 3197 .encrypt = aead_encrypt, 3198 .decrypt = aead_decrypt, 3199 .ivsize = CTR_RFC3686_IV_SIZE, 3200 .maxauthsize = SHA384_DIGEST_SIZE, 3201 }, 3202 .caam = { 3203 .class1_alg_type = OP_ALG_ALGSEL_AES | 3204 OP_ALG_AAI_CTR_MOD128, 3205 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3206 OP_ALG_AAI_HMAC_PRECOMP, 3207 .rfc3686 = true, 3208 }, 3209 }, 3210 { 3211 .aead = { 3212 .base = { 3213 .cra_name = "seqiv(authenc(hmac(sha384)," 3214 "rfc3686(ctr(aes))))", 3215 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3216 "rfc3686-ctr-aes-caam", 3217 .cra_blocksize = 1, 3218 }, 3219 .setkey = aead_setkey, 3220 .setauthsize = aead_setauthsize, 3221 .encrypt = aead_encrypt, 3222 .decrypt = aead_decrypt, 3223 .ivsize = CTR_RFC3686_IV_SIZE, 3224 .maxauthsize = SHA384_DIGEST_SIZE, 3225 }, 3226 .caam = { 3227 .class1_alg_type = OP_ALG_ALGSEL_AES | 3228 OP_ALG_AAI_CTR_MOD128, 3229 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3230 OP_ALG_AAI_HMAC_PRECOMP, 3231 .rfc3686 = true, 3232 .geniv = true, 3233 }, 3234 }, 3235 { 3236 .aead = { 3237 .base = { 3238 .cra_name = "authenc(hmac(sha512)," 3239 "rfc3686(ctr(aes)))", 3240 .cra_driver_name = "authenc-hmac-sha512-" 3241 "rfc3686-ctr-aes-caam", 3242 .cra_blocksize = 1, 3243 }, 3244 .setkey = aead_setkey, 3245 .setauthsize = aead_setauthsize, 3246 .encrypt = aead_encrypt, 3247 .decrypt = aead_decrypt, 3248 .ivsize = CTR_RFC3686_IV_SIZE, 3249 .maxauthsize = SHA512_DIGEST_SIZE, 3250 }, 3251 .caam = { 3252 .class1_alg_type = OP_ALG_ALGSEL_AES | 3253 OP_ALG_AAI_CTR_MOD128, 3254 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3255 OP_ALG_AAI_HMAC_PRECOMP, 3256 .rfc3686 = true, 3257 }, 3258 }, 3259 { 3260 .aead = { 3261 .base = { 3262 .cra_name = "seqiv(authenc(hmac(sha512)," 3263 "rfc3686(ctr(aes))))", 3264 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3265 "rfc3686-ctr-aes-caam", 3266 .cra_blocksize = 1, 3267 }, 3268 .setkey = aead_setkey, 3269 .setauthsize = aead_setauthsize, 3270 .encrypt = aead_encrypt, 3271 .decrypt = aead_decrypt, 3272 .ivsize = CTR_RFC3686_IV_SIZE, 3273 .maxauthsize = SHA512_DIGEST_SIZE, 3274 }, 3275 .caam = { 3276 .class1_alg_type = OP_ALG_ALGSEL_AES | 3277 OP_ALG_AAI_CTR_MOD128, 3278 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3279 OP_ALG_AAI_HMAC_PRECOMP, 3280 .rfc3686 = true, 3281 .geniv = true, 3282 }, 3283 }, 3284 { 3285 .aead = { 3286 .base = { 3287 .cra_name = "rfc7539(chacha20,poly1305)", 3288 .cra_driver_name = "rfc7539-chacha20-poly1305-" 3289 "caam", 3290 .cra_blocksize = 1, 3291 }, 3292 .setkey = chachapoly_setkey, 3293 .setauthsize = chachapoly_setauthsize, 3294 .encrypt = chachapoly_encrypt, 3295 .decrypt = chachapoly_decrypt, 3296 .ivsize = CHACHAPOLY_IV_SIZE, 3297 .maxauthsize = POLY1305_DIGEST_SIZE, 3298 }, 3299 .caam = { 3300 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3301 OP_ALG_AAI_AEAD, 3302 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3303 OP_ALG_AAI_AEAD, 3304 }, 3305 }, 3306 { 3307 .aead = { 3308 .base = { 3309 .cra_name = "rfc7539esp(chacha20,poly1305)", 3310 .cra_driver_name = "rfc7539esp-chacha20-" 3311 "poly1305-caam", 3312 .cra_blocksize = 1, 3313 }, 3314 .setkey = chachapoly_setkey, 3315 .setauthsize = chachapoly_setauthsize, 3316 .encrypt = chachapoly_encrypt, 3317 .decrypt = chachapoly_decrypt, 3318 .ivsize = 8, 3319 .maxauthsize = POLY1305_DIGEST_SIZE, 3320 }, 3321 .caam = { 3322 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 | 3323 OP_ALG_AAI_AEAD, 3324 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 | 3325 OP_ALG_AAI_AEAD, 3326 }, 3327 }, 3328 }; 3329 3330 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3331 bool uses_dkp) 3332 { 3333 dma_addr_t dma_addr; 3334 struct caam_drv_private *priv; 3335 3336 ctx->jrdev = caam_jr_alloc(); 3337 if (IS_ERR(ctx->jrdev)) { 3338 pr_err("Job Ring Device allocation for transform failed\n"); 3339 return PTR_ERR(ctx->jrdev); 3340 } 3341 3342 priv = dev_get_drvdata(ctx->jrdev->parent); 3343 if (priv->era >= 6 && uses_dkp) 3344 ctx->dir = DMA_BIDIRECTIONAL; 3345 else 3346 ctx->dir = DMA_TO_DEVICE; 3347 3348 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3349 offsetof(struct caam_ctx, 3350 sh_desc_enc_dma), 3351 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3352 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3353 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3354 caam_jr_free(ctx->jrdev); 3355 return -ENOMEM; 3356 } 3357 3358 ctx->sh_desc_enc_dma = dma_addr; 3359 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3360 sh_desc_dec); 3361 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3362 3363 /* copy descriptor header template value */ 3364 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3365 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3366 3367 return 0; 3368 } 3369 3370 static int caam_cra_init(struct crypto_skcipher *tfm) 3371 { 3372 struct skcipher_alg *alg = crypto_skcipher_alg(tfm); 3373 struct caam_skcipher_alg *caam_alg = 3374 container_of(alg, typeof(*caam_alg), skcipher); 3375 3376 return caam_init_common(crypto_skcipher_ctx(tfm), &caam_alg->caam, 3377 false); 3378 } 3379 3380 static int caam_aead_init(struct crypto_aead *tfm) 3381 { 3382 struct aead_alg *alg = crypto_aead_alg(tfm); 3383 struct caam_aead_alg *caam_alg = 3384 container_of(alg, struct caam_aead_alg, aead); 3385 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3386 3387 return caam_init_common(ctx, &caam_alg->caam, 3388 alg->setkey == aead_setkey); 3389 } 3390 3391 static void caam_exit_common(struct caam_ctx *ctx) 3392 { 3393 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3394 offsetof(struct caam_ctx, sh_desc_enc_dma), 3395 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3396 caam_jr_free(ctx->jrdev); 3397 } 3398 3399 static void caam_cra_exit(struct crypto_skcipher *tfm) 3400 { 3401 caam_exit_common(crypto_skcipher_ctx(tfm)); 3402 } 3403 3404 static void caam_aead_exit(struct crypto_aead *tfm) 3405 { 3406 caam_exit_common(crypto_aead_ctx(tfm)); 3407 } 3408 3409 static void __exit caam_algapi_exit(void) 3410 { 3411 int i; 3412 3413 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3414 struct caam_aead_alg *t_alg = driver_aeads + i; 3415 3416 if (t_alg->registered) 3417 crypto_unregister_aead(&t_alg->aead); 3418 } 3419 3420 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3421 struct caam_skcipher_alg *t_alg = driver_algs + i; 3422 3423 if (t_alg->registered) 3424 crypto_unregister_skcipher(&t_alg->skcipher); 3425 } 3426 } 3427 3428 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg) 3429 { 3430 struct skcipher_alg *alg = &t_alg->skcipher; 3431 3432 alg->base.cra_module = THIS_MODULE; 3433 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3434 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3435 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3436 3437 alg->init = caam_cra_init; 3438 alg->exit = caam_cra_exit; 3439 } 3440 3441 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3442 { 3443 struct aead_alg *alg = &t_alg->aead; 3444 3445 alg->base.cra_module = THIS_MODULE; 3446 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3447 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3448 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3449 3450 alg->init = caam_aead_init; 3451 alg->exit = caam_aead_exit; 3452 } 3453 3454 static int __init caam_algapi_init(void) 3455 { 3456 struct device_node *dev_node; 3457 struct platform_device *pdev; 3458 struct caam_drv_private *priv; 3459 int i = 0, err = 0; 3460 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst; 3461 u32 arc4_inst; 3462 unsigned int md_limit = SHA512_DIGEST_SIZE; 3463 bool registered = false; 3464 3465 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3466 if (!dev_node) { 3467 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3468 if (!dev_node) 3469 return -ENODEV; 3470 } 3471 3472 pdev = of_find_device_by_node(dev_node); 3473 if (!pdev) { 3474 of_node_put(dev_node); 3475 return -ENODEV; 3476 } 3477 3478 priv = dev_get_drvdata(&pdev->dev); 3479 of_node_put(dev_node); 3480 3481 /* 3482 * If priv is NULL, it's probably because the caam driver wasn't 3483 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3484 */ 3485 if (!priv) { 3486 err = -ENODEV; 3487 goto out_put_dev; 3488 } 3489 3490 3491 /* 3492 * Register crypto algorithms the device supports. 3493 * First, detect presence and attributes of DES, AES, and MD blocks. 3494 */ 3495 if (priv->era < 10) { 3496 u32 cha_vid, cha_inst; 3497 3498 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3499 aes_vid = cha_vid & CHA_ID_LS_AES_MASK; 3500 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3501 3502 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3503 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> 3504 CHA_ID_LS_DES_SHIFT; 3505 aes_inst = cha_inst & CHA_ID_LS_AES_MASK; 3506 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3507 arc4_inst = (cha_inst & CHA_ID_LS_ARC4_MASK) >> 3508 CHA_ID_LS_ARC4_SHIFT; 3509 ccha_inst = 0; 3510 ptha_inst = 0; 3511 } else { 3512 u32 aesa, mdha; 3513 3514 aesa = rd_reg32(&priv->ctrl->vreg.aesa); 3515 mdha = rd_reg32(&priv->ctrl->vreg.mdha); 3516 3517 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3518 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT; 3519 3520 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK; 3521 aes_inst = aesa & CHA_VER_NUM_MASK; 3522 md_inst = mdha & CHA_VER_NUM_MASK; 3523 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK; 3524 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK; 3525 arc4_inst = rd_reg32(&priv->ctrl->vreg.afha) & CHA_VER_NUM_MASK; 3526 } 3527 3528 /* If MD is present, limit digest size based on LP256 */ 3529 if (md_inst && md_vid == CHA_VER_VID_MD_LP256) 3530 md_limit = SHA256_DIGEST_SIZE; 3531 3532 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3533 struct caam_skcipher_alg *t_alg = driver_algs + i; 3534 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK; 3535 3536 /* Skip DES algorithms if not supported by device */ 3537 if (!des_inst && 3538 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3539 (alg_sel == OP_ALG_ALGSEL_DES))) 3540 continue; 3541 3542 /* Skip AES algorithms if not supported by device */ 3543 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3544 continue; 3545 3546 /* Skip ARC4 algorithms if not supported by device */ 3547 if (!arc4_inst && alg_sel == OP_ALG_ALGSEL_ARC4) 3548 continue; 3549 3550 /* 3551 * Check support for AES modes not available 3552 * on LP devices. 3553 */ 3554 if (aes_vid == CHA_VER_VID_AES_LP && 3555 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) == 3556 OP_ALG_AAI_XTS) 3557 continue; 3558 3559 caam_skcipher_alg_init(t_alg); 3560 3561 err = crypto_register_skcipher(&t_alg->skcipher); 3562 if (err) { 3563 pr_warn("%s alg registration failed\n", 3564 t_alg->skcipher.base.cra_driver_name); 3565 continue; 3566 } 3567 3568 t_alg->registered = true; 3569 registered = true; 3570 } 3571 3572 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3573 struct caam_aead_alg *t_alg = driver_aeads + i; 3574 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3575 OP_ALG_ALGSEL_MASK; 3576 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3577 OP_ALG_ALGSEL_MASK; 3578 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3579 3580 /* Skip DES algorithms if not supported by device */ 3581 if (!des_inst && 3582 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3583 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3584 continue; 3585 3586 /* Skip AES algorithms if not supported by device */ 3587 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3588 continue; 3589 3590 /* Skip CHACHA20 algorithms if not supported by device */ 3591 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst) 3592 continue; 3593 3594 /* Skip POLY1305 algorithms if not supported by device */ 3595 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst) 3596 continue; 3597 3598 /* 3599 * Check support for AES algorithms not available 3600 * on LP devices. 3601 */ 3602 if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM) 3603 continue; 3604 3605 /* 3606 * Skip algorithms requiring message digests 3607 * if MD or MD size is not supported by device. 3608 */ 3609 if (is_mdha(c2_alg_sel) && 3610 (!md_inst || t_alg->aead.maxauthsize > md_limit)) 3611 continue; 3612 3613 caam_aead_alg_init(t_alg); 3614 3615 err = crypto_register_aead(&t_alg->aead); 3616 if (err) { 3617 pr_warn("%s alg registration failed\n", 3618 t_alg->aead.base.cra_driver_name); 3619 continue; 3620 } 3621 3622 t_alg->registered = true; 3623 registered = true; 3624 } 3625 3626 if (registered) 3627 pr_info("caam algorithms registered in /proc/crypto\n"); 3628 3629 out_put_dev: 3630 put_device(&pdev->dev); 3631 return err; 3632 } 3633 3634 module_init(caam_algapi_init); 3635 module_exit(caam_algapi_exit); 3636 3637 MODULE_LICENSE("GPL"); 3638 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3639 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3640