1 /* 2 * caam - Freescale FSL CAAM support for crypto API 3 * 4 * Copyright 2008-2011 Freescale Semiconductor, Inc. 5 * Copyright 2016 NXP 6 * 7 * Based on talitos crypto API driver. 8 * 9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008): 10 * 11 * --------------- --------------- 12 * | JobDesc #1 |-------------------->| ShareDesc | 13 * | *(packet 1) | | (PDB) | 14 * --------------- |------------->| (hashKey) | 15 * . | | (cipherKey) | 16 * . | |-------->| (operation) | 17 * --------------- | | --------------- 18 * | JobDesc #2 |------| | 19 * | *(packet 2) | | 20 * --------------- | 21 * . | 22 * . | 23 * --------------- | 24 * | JobDesc #3 |------------ 25 * | *(packet 3) | 26 * --------------- 27 * 28 * The SharedDesc never changes for a connection unless rekeyed, but 29 * each packet will likely be in a different place. So all we need 30 * to know to process the packet is where the input is, where the 31 * output goes, and what context we want to process with. Context is 32 * in the SharedDesc, packet references in the JobDesc. 33 * 34 * So, a job desc looks like: 35 * 36 * --------------------- 37 * | Header | 38 * | ShareDesc Pointer | 39 * | SEQ_OUT_PTR | 40 * | (output buffer) | 41 * | (output length) | 42 * | SEQ_IN_PTR | 43 * | (input buffer) | 44 * | (input length) | 45 * --------------------- 46 */ 47 48 #include "compat.h" 49 50 #include "regs.h" 51 #include "intern.h" 52 #include "desc_constr.h" 53 #include "jr.h" 54 #include "error.h" 55 #include "sg_sw_sec4.h" 56 #include "key_gen.h" 57 #include "caamalg_desc.h" 58 59 /* 60 * crypto alg 61 */ 62 #define CAAM_CRA_PRIORITY 3000 63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */ 64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \ 65 CTR_RFC3686_NONCE_SIZE + \ 66 SHA512_DIGEST_SIZE * 2) 67 68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2) 69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 70 CAAM_CMD_SZ * 4) 71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \ 72 CAAM_CMD_SZ * 5) 73 74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN) 75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ) 76 77 #ifdef DEBUG 78 /* for print_hex_dumps with line references */ 79 #define debug(format, arg...) printk(format, arg) 80 #else 81 #define debug(format, arg...) 82 #endif 83 84 static struct list_head alg_list; 85 86 struct caam_alg_entry { 87 int class1_alg_type; 88 int class2_alg_type; 89 bool rfc3686; 90 bool geniv; 91 }; 92 93 struct caam_aead_alg { 94 struct aead_alg aead; 95 struct caam_alg_entry caam; 96 bool registered; 97 }; 98 99 /* 100 * per-session context 101 */ 102 struct caam_ctx { 103 u32 sh_desc_enc[DESC_MAX_USED_LEN]; 104 u32 sh_desc_dec[DESC_MAX_USED_LEN]; 105 u32 sh_desc_givenc[DESC_MAX_USED_LEN]; 106 u8 key[CAAM_MAX_KEY_SIZE]; 107 dma_addr_t sh_desc_enc_dma; 108 dma_addr_t sh_desc_dec_dma; 109 dma_addr_t sh_desc_givenc_dma; 110 dma_addr_t key_dma; 111 enum dma_data_direction dir; 112 struct device *jrdev; 113 struct alginfo adata; 114 struct alginfo cdata; 115 unsigned int authsize; 116 }; 117 118 static int aead_null_set_sh_desc(struct crypto_aead *aead) 119 { 120 struct caam_ctx *ctx = crypto_aead_ctx(aead); 121 struct device *jrdev = ctx->jrdev; 122 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 123 u32 *desc; 124 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN - 125 ctx->adata.keylen_pad; 126 127 /* 128 * Job Descriptor and Shared Descriptors 129 * must all fit into the 64-word Descriptor h/w Buffer 130 */ 131 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) { 132 ctx->adata.key_inline = true; 133 ctx->adata.key_virt = ctx->key; 134 } else { 135 ctx->adata.key_inline = false; 136 ctx->adata.key_dma = ctx->key_dma; 137 } 138 139 /* aead_encrypt shared descriptor */ 140 desc = ctx->sh_desc_enc; 141 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize, 142 ctrlpriv->era); 143 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 144 desc_bytes(desc), ctx->dir); 145 146 /* 147 * Job Descriptor and Shared Descriptors 148 * must all fit into the 64-word Descriptor h/w Buffer 149 */ 150 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) { 151 ctx->adata.key_inline = true; 152 ctx->adata.key_virt = ctx->key; 153 } else { 154 ctx->adata.key_inline = false; 155 ctx->adata.key_dma = ctx->key_dma; 156 } 157 158 /* aead_decrypt shared descriptor */ 159 desc = ctx->sh_desc_dec; 160 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize, 161 ctrlpriv->era); 162 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 163 desc_bytes(desc), ctx->dir); 164 165 return 0; 166 } 167 168 static int aead_set_sh_desc(struct crypto_aead *aead) 169 { 170 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 171 struct caam_aead_alg, aead); 172 unsigned int ivsize = crypto_aead_ivsize(aead); 173 struct caam_ctx *ctx = crypto_aead_ctx(aead); 174 struct device *jrdev = ctx->jrdev; 175 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 176 u32 ctx1_iv_off = 0; 177 u32 *desc, *nonce = NULL; 178 u32 inl_mask; 179 unsigned int data_len[2]; 180 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 181 OP_ALG_AAI_CTR_MOD128); 182 const bool is_rfc3686 = alg->caam.rfc3686; 183 184 if (!ctx->authsize) 185 return 0; 186 187 /* NULL encryption / decryption */ 188 if (!ctx->cdata.keylen) 189 return aead_null_set_sh_desc(aead); 190 191 /* 192 * AES-CTR needs to load IV in CONTEXT1 reg 193 * at an offset of 128bits (16bytes) 194 * CONTEXT1[255:128] = IV 195 */ 196 if (ctr_mode) 197 ctx1_iv_off = 16; 198 199 /* 200 * RFC3686 specific: 201 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 202 */ 203 if (is_rfc3686) { 204 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 205 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad + 206 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE); 207 } 208 209 data_len[0] = ctx->adata.keylen_pad; 210 data_len[1] = ctx->cdata.keylen; 211 212 if (alg->caam.geniv) 213 goto skip_enc; 214 215 /* 216 * Job Descriptor and Shared Descriptors 217 * must all fit into the 64-word Descriptor h/w Buffer 218 */ 219 if (desc_inline_query(DESC_AEAD_ENC_LEN + 220 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 221 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 222 ARRAY_SIZE(data_len)) < 0) 223 return -EINVAL; 224 225 if (inl_mask & 1) 226 ctx->adata.key_virt = ctx->key; 227 else 228 ctx->adata.key_dma = ctx->key_dma; 229 230 if (inl_mask & 2) 231 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 232 else 233 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 234 235 ctx->adata.key_inline = !!(inl_mask & 1); 236 ctx->cdata.key_inline = !!(inl_mask & 2); 237 238 /* aead_encrypt shared descriptor */ 239 desc = ctx->sh_desc_enc; 240 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, 241 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off, 242 false, ctrlpriv->era); 243 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 244 desc_bytes(desc), ctx->dir); 245 246 skip_enc: 247 /* 248 * Job Descriptor and Shared Descriptors 249 * must all fit into the 64-word Descriptor h/w Buffer 250 */ 251 if (desc_inline_query(DESC_AEAD_DEC_LEN + 252 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 253 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 254 ARRAY_SIZE(data_len)) < 0) 255 return -EINVAL; 256 257 if (inl_mask & 1) 258 ctx->adata.key_virt = ctx->key; 259 else 260 ctx->adata.key_dma = ctx->key_dma; 261 262 if (inl_mask & 2) 263 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 264 else 265 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 266 267 ctx->adata.key_inline = !!(inl_mask & 1); 268 ctx->cdata.key_inline = !!(inl_mask & 2); 269 270 /* aead_decrypt shared descriptor */ 271 desc = ctx->sh_desc_dec; 272 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, 273 ctx->authsize, alg->caam.geniv, is_rfc3686, 274 nonce, ctx1_iv_off, false, ctrlpriv->era); 275 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 276 desc_bytes(desc), ctx->dir); 277 278 if (!alg->caam.geniv) 279 goto skip_givenc; 280 281 /* 282 * Job Descriptor and Shared Descriptors 283 * must all fit into the 64-word Descriptor h/w Buffer 284 */ 285 if (desc_inline_query(DESC_AEAD_GIVENC_LEN + 286 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0), 287 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask, 288 ARRAY_SIZE(data_len)) < 0) 289 return -EINVAL; 290 291 if (inl_mask & 1) 292 ctx->adata.key_virt = ctx->key; 293 else 294 ctx->adata.key_dma = ctx->key_dma; 295 296 if (inl_mask & 2) 297 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad; 298 else 299 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad; 300 301 ctx->adata.key_inline = !!(inl_mask & 1); 302 ctx->cdata.key_inline = !!(inl_mask & 2); 303 304 /* aead_givencrypt shared descriptor */ 305 desc = ctx->sh_desc_enc; 306 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, 307 ctx->authsize, is_rfc3686, nonce, 308 ctx1_iv_off, false, ctrlpriv->era); 309 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 310 desc_bytes(desc), ctx->dir); 311 312 skip_givenc: 313 return 0; 314 } 315 316 static int aead_setauthsize(struct crypto_aead *authenc, 317 unsigned int authsize) 318 { 319 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 320 321 ctx->authsize = authsize; 322 aead_set_sh_desc(authenc); 323 324 return 0; 325 } 326 327 static int gcm_set_sh_desc(struct crypto_aead *aead) 328 { 329 struct caam_ctx *ctx = crypto_aead_ctx(aead); 330 struct device *jrdev = ctx->jrdev; 331 unsigned int ivsize = crypto_aead_ivsize(aead); 332 u32 *desc; 333 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 334 ctx->cdata.keylen; 335 336 if (!ctx->cdata.keylen || !ctx->authsize) 337 return 0; 338 339 /* 340 * AES GCM encrypt shared descriptor 341 * Job Descriptor and Shared Descriptor 342 * must fit into the 64-word Descriptor h/w Buffer 343 */ 344 if (rem_bytes >= DESC_GCM_ENC_LEN) { 345 ctx->cdata.key_inline = true; 346 ctx->cdata.key_virt = ctx->key; 347 } else { 348 ctx->cdata.key_inline = false; 349 ctx->cdata.key_dma = ctx->key_dma; 350 } 351 352 desc = ctx->sh_desc_enc; 353 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 354 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 355 desc_bytes(desc), ctx->dir); 356 357 /* 358 * Job Descriptor and Shared Descriptors 359 * must all fit into the 64-word Descriptor h/w Buffer 360 */ 361 if (rem_bytes >= DESC_GCM_DEC_LEN) { 362 ctx->cdata.key_inline = true; 363 ctx->cdata.key_virt = ctx->key; 364 } else { 365 ctx->cdata.key_inline = false; 366 ctx->cdata.key_dma = ctx->key_dma; 367 } 368 369 desc = ctx->sh_desc_dec; 370 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); 371 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 372 desc_bytes(desc), ctx->dir); 373 374 return 0; 375 } 376 377 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize) 378 { 379 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 380 381 ctx->authsize = authsize; 382 gcm_set_sh_desc(authenc); 383 384 return 0; 385 } 386 387 static int rfc4106_set_sh_desc(struct crypto_aead *aead) 388 { 389 struct caam_ctx *ctx = crypto_aead_ctx(aead); 390 struct device *jrdev = ctx->jrdev; 391 unsigned int ivsize = crypto_aead_ivsize(aead); 392 u32 *desc; 393 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 394 ctx->cdata.keylen; 395 396 if (!ctx->cdata.keylen || !ctx->authsize) 397 return 0; 398 399 /* 400 * RFC4106 encrypt shared descriptor 401 * Job Descriptor and Shared Descriptor 402 * must fit into the 64-word Descriptor h/w Buffer 403 */ 404 if (rem_bytes >= DESC_RFC4106_ENC_LEN) { 405 ctx->cdata.key_inline = true; 406 ctx->cdata.key_virt = ctx->key; 407 } else { 408 ctx->cdata.key_inline = false; 409 ctx->cdata.key_dma = ctx->key_dma; 410 } 411 412 desc = ctx->sh_desc_enc; 413 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 414 false); 415 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 416 desc_bytes(desc), ctx->dir); 417 418 /* 419 * Job Descriptor and Shared Descriptors 420 * must all fit into the 64-word Descriptor h/w Buffer 421 */ 422 if (rem_bytes >= DESC_RFC4106_DEC_LEN) { 423 ctx->cdata.key_inline = true; 424 ctx->cdata.key_virt = ctx->key; 425 } else { 426 ctx->cdata.key_inline = false; 427 ctx->cdata.key_dma = ctx->key_dma; 428 } 429 430 desc = ctx->sh_desc_dec; 431 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 432 false); 433 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 434 desc_bytes(desc), ctx->dir); 435 436 return 0; 437 } 438 439 static int rfc4106_setauthsize(struct crypto_aead *authenc, 440 unsigned int authsize) 441 { 442 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 443 444 ctx->authsize = authsize; 445 rfc4106_set_sh_desc(authenc); 446 447 return 0; 448 } 449 450 static int rfc4543_set_sh_desc(struct crypto_aead *aead) 451 { 452 struct caam_ctx *ctx = crypto_aead_ctx(aead); 453 struct device *jrdev = ctx->jrdev; 454 unsigned int ivsize = crypto_aead_ivsize(aead); 455 u32 *desc; 456 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN - 457 ctx->cdata.keylen; 458 459 if (!ctx->cdata.keylen || !ctx->authsize) 460 return 0; 461 462 /* 463 * RFC4543 encrypt shared descriptor 464 * Job Descriptor and Shared Descriptor 465 * must fit into the 64-word Descriptor h/w Buffer 466 */ 467 if (rem_bytes >= DESC_RFC4543_ENC_LEN) { 468 ctx->cdata.key_inline = true; 469 ctx->cdata.key_virt = ctx->key; 470 } else { 471 ctx->cdata.key_inline = false; 472 ctx->cdata.key_dma = ctx->key_dma; 473 } 474 475 desc = ctx->sh_desc_enc; 476 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize, 477 false); 478 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 479 desc_bytes(desc), ctx->dir); 480 481 /* 482 * Job Descriptor and Shared Descriptors 483 * must all fit into the 64-word Descriptor h/w Buffer 484 */ 485 if (rem_bytes >= DESC_RFC4543_DEC_LEN) { 486 ctx->cdata.key_inline = true; 487 ctx->cdata.key_virt = ctx->key; 488 } else { 489 ctx->cdata.key_inline = false; 490 ctx->cdata.key_dma = ctx->key_dma; 491 } 492 493 desc = ctx->sh_desc_dec; 494 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize, 495 false); 496 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 497 desc_bytes(desc), ctx->dir); 498 499 return 0; 500 } 501 502 static int rfc4543_setauthsize(struct crypto_aead *authenc, 503 unsigned int authsize) 504 { 505 struct caam_ctx *ctx = crypto_aead_ctx(authenc); 506 507 ctx->authsize = authsize; 508 rfc4543_set_sh_desc(authenc); 509 510 return 0; 511 } 512 513 static int aead_setkey(struct crypto_aead *aead, 514 const u8 *key, unsigned int keylen) 515 { 516 struct caam_ctx *ctx = crypto_aead_ctx(aead); 517 struct device *jrdev = ctx->jrdev; 518 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); 519 struct crypto_authenc_keys keys; 520 int ret = 0; 521 522 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) 523 goto badkey; 524 525 #ifdef DEBUG 526 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n", 527 keys.authkeylen + keys.enckeylen, keys.enckeylen, 528 keys.authkeylen); 529 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 530 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 531 #endif 532 533 /* 534 * If DKP is supported, use it in the shared descriptor to generate 535 * the split key. 536 */ 537 if (ctrlpriv->era >= 6) { 538 ctx->adata.keylen = keys.authkeylen; 539 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype & 540 OP_ALG_ALGSEL_MASK); 541 542 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE) 543 goto badkey; 544 545 memcpy(ctx->key, keys.authkey, keys.authkeylen); 546 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, 547 keys.enckeylen); 548 dma_sync_single_for_device(jrdev, ctx->key_dma, 549 ctx->adata.keylen_pad + 550 keys.enckeylen, ctx->dir); 551 goto skip_split_key; 552 } 553 554 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, 555 keys.authkeylen, CAAM_MAX_KEY_SIZE - 556 keys.enckeylen); 557 if (ret) { 558 goto badkey; 559 } 560 561 /* postpend encryption key to auth split key */ 562 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen); 563 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + 564 keys.enckeylen, ctx->dir); 565 #ifdef DEBUG 566 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ", 567 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key, 568 ctx->adata.keylen_pad + keys.enckeylen, 1); 569 #endif 570 571 skip_split_key: 572 ctx->cdata.keylen = keys.enckeylen; 573 memzero_explicit(&keys, sizeof(keys)); 574 return aead_set_sh_desc(aead); 575 badkey: 576 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN); 577 memzero_explicit(&keys, sizeof(keys)); 578 return -EINVAL; 579 } 580 581 static int gcm_setkey(struct crypto_aead *aead, 582 const u8 *key, unsigned int keylen) 583 { 584 struct caam_ctx *ctx = crypto_aead_ctx(aead); 585 struct device *jrdev = ctx->jrdev; 586 587 #ifdef DEBUG 588 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 589 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 590 #endif 591 592 memcpy(ctx->key, key, keylen); 593 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); 594 ctx->cdata.keylen = keylen; 595 596 return gcm_set_sh_desc(aead); 597 } 598 599 static int rfc4106_setkey(struct crypto_aead *aead, 600 const u8 *key, unsigned int keylen) 601 { 602 struct caam_ctx *ctx = crypto_aead_ctx(aead); 603 struct device *jrdev = ctx->jrdev; 604 605 if (keylen < 4) 606 return -EINVAL; 607 608 #ifdef DEBUG 609 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 610 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 611 #endif 612 613 memcpy(ctx->key, key, keylen); 614 615 /* 616 * The last four bytes of the key material are used as the salt value 617 * in the nonce. Update the AES key length. 618 */ 619 ctx->cdata.keylen = keylen - 4; 620 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 621 ctx->dir); 622 return rfc4106_set_sh_desc(aead); 623 } 624 625 static int rfc4543_setkey(struct crypto_aead *aead, 626 const u8 *key, unsigned int keylen) 627 { 628 struct caam_ctx *ctx = crypto_aead_ctx(aead); 629 struct device *jrdev = ctx->jrdev; 630 631 if (keylen < 4) 632 return -EINVAL; 633 634 #ifdef DEBUG 635 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 636 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 637 #endif 638 639 memcpy(ctx->key, key, keylen); 640 641 /* 642 * The last four bytes of the key material are used as the salt value 643 * in the nonce. Update the AES key length. 644 */ 645 ctx->cdata.keylen = keylen - 4; 646 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, 647 ctx->dir); 648 return rfc4543_set_sh_desc(aead); 649 } 650 651 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 652 const u8 *key, unsigned int keylen) 653 { 654 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 655 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher); 656 const char *alg_name = crypto_tfm_alg_name(tfm); 657 struct device *jrdev = ctx->jrdev; 658 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 659 u32 *desc; 660 u32 ctx1_iv_off = 0; 661 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 662 OP_ALG_AAI_CTR_MOD128); 663 const bool is_rfc3686 = (ctr_mode && 664 (strstr(alg_name, "rfc3686") != NULL)); 665 666 #ifdef DEBUG 667 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ", 668 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1); 669 #endif 670 /* 671 * AES-CTR needs to load IV in CONTEXT1 reg 672 * at an offset of 128bits (16bytes) 673 * CONTEXT1[255:128] = IV 674 */ 675 if (ctr_mode) 676 ctx1_iv_off = 16; 677 678 /* 679 * RFC3686 specific: 680 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER} 681 * | *key = {KEY, NONCE} 682 */ 683 if (is_rfc3686) { 684 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE; 685 keylen -= CTR_RFC3686_NONCE_SIZE; 686 } 687 688 ctx->cdata.keylen = keylen; 689 ctx->cdata.key_virt = key; 690 ctx->cdata.key_inline = true; 691 692 /* ablkcipher_encrypt shared descriptor */ 693 desc = ctx->sh_desc_enc; 694 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686, 695 ctx1_iv_off); 696 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 697 desc_bytes(desc), ctx->dir); 698 699 /* ablkcipher_decrypt shared descriptor */ 700 desc = ctx->sh_desc_dec; 701 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686, 702 ctx1_iv_off); 703 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 704 desc_bytes(desc), ctx->dir); 705 706 /* ablkcipher_givencrypt shared descriptor */ 707 desc = ctx->sh_desc_givenc; 708 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686, 709 ctx1_iv_off); 710 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma, 711 desc_bytes(desc), ctx->dir); 712 713 return 0; 714 } 715 716 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher, 717 const u8 *key, unsigned int keylen) 718 { 719 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 720 struct device *jrdev = ctx->jrdev; 721 u32 *desc; 722 723 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) { 724 crypto_ablkcipher_set_flags(ablkcipher, 725 CRYPTO_TFM_RES_BAD_KEY_LEN); 726 dev_err(jrdev, "key size mismatch\n"); 727 return -EINVAL; 728 } 729 730 ctx->cdata.keylen = keylen; 731 ctx->cdata.key_virt = key; 732 ctx->cdata.key_inline = true; 733 734 /* xts_ablkcipher_encrypt shared descriptor */ 735 desc = ctx->sh_desc_enc; 736 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata); 737 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, 738 desc_bytes(desc), ctx->dir); 739 740 /* xts_ablkcipher_decrypt shared descriptor */ 741 desc = ctx->sh_desc_dec; 742 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata); 743 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, 744 desc_bytes(desc), ctx->dir); 745 746 return 0; 747 } 748 749 /* 750 * aead_edesc - s/w-extended aead descriptor 751 * @src_nents: number of segments in input s/w scatterlist 752 * @dst_nents: number of segments in output s/w scatterlist 753 * @sec4_sg_bytes: length of dma mapped sec4_sg space 754 * @sec4_sg_dma: bus physical mapped address of h/w link table 755 * @sec4_sg: pointer to h/w link table 756 * @hw_desc: the h/w job descriptor followed by any referenced link tables 757 */ 758 struct aead_edesc { 759 int src_nents; 760 int dst_nents; 761 int sec4_sg_bytes; 762 dma_addr_t sec4_sg_dma; 763 struct sec4_sg_entry *sec4_sg; 764 u32 hw_desc[]; 765 }; 766 767 /* 768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor 769 * @src_nents: number of segments in input s/w scatterlist 770 * @dst_nents: number of segments in output s/w scatterlist 771 * @iv_dma: dma address of iv for checking continuity and link table 772 * @sec4_sg_bytes: length of dma mapped sec4_sg space 773 * @sec4_sg_dma: bus physical mapped address of h/w link table 774 * @sec4_sg: pointer to h/w link table 775 * @hw_desc: the h/w job descriptor followed by any referenced link tables 776 */ 777 struct ablkcipher_edesc { 778 int src_nents; 779 int dst_nents; 780 dma_addr_t iv_dma; 781 int sec4_sg_bytes; 782 dma_addr_t sec4_sg_dma; 783 struct sec4_sg_entry *sec4_sg; 784 u32 hw_desc[0]; 785 }; 786 787 static void caam_unmap(struct device *dev, struct scatterlist *src, 788 struct scatterlist *dst, int src_nents, 789 int dst_nents, 790 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma, 791 int sec4_sg_bytes) 792 { 793 if (dst != src) { 794 if (src_nents) 795 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE); 796 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE); 797 } else { 798 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL); 799 } 800 801 if (iv_dma) 802 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE); 803 if (sec4_sg_bytes) 804 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes, 805 DMA_TO_DEVICE); 806 } 807 808 static void aead_unmap(struct device *dev, 809 struct aead_edesc *edesc, 810 struct aead_request *req) 811 { 812 caam_unmap(dev, req->src, req->dst, 813 edesc->src_nents, edesc->dst_nents, 0, 0, 814 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 815 } 816 817 static void ablkcipher_unmap(struct device *dev, 818 struct ablkcipher_edesc *edesc, 819 struct ablkcipher_request *req) 820 { 821 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 822 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 823 824 caam_unmap(dev, req->src, req->dst, 825 edesc->src_nents, edesc->dst_nents, 826 edesc->iv_dma, ivsize, 827 edesc->sec4_sg_dma, edesc->sec4_sg_bytes); 828 } 829 830 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 831 void *context) 832 { 833 struct aead_request *req = context; 834 struct aead_edesc *edesc; 835 836 #ifdef DEBUG 837 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 838 #endif 839 840 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 841 842 if (err) 843 caam_jr_strstatus(jrdev, err); 844 845 aead_unmap(jrdev, edesc, req); 846 847 kfree(edesc); 848 849 aead_request_complete(req, err); 850 } 851 852 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 853 void *context) 854 { 855 struct aead_request *req = context; 856 struct aead_edesc *edesc; 857 858 #ifdef DEBUG 859 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 860 #endif 861 862 edesc = container_of(desc, struct aead_edesc, hw_desc[0]); 863 864 if (err) 865 caam_jr_strstatus(jrdev, err); 866 867 aead_unmap(jrdev, edesc, req); 868 869 /* 870 * verify hw auth check passed else return -EBADMSG 871 */ 872 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK) 873 err = -EBADMSG; 874 875 kfree(edesc); 876 877 aead_request_complete(req, err); 878 } 879 880 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, 881 void *context) 882 { 883 struct ablkcipher_request *req = context; 884 struct ablkcipher_edesc *edesc; 885 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 886 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 887 888 #ifdef DEBUG 889 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 890 #endif 891 892 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 893 894 if (err) 895 caam_jr_strstatus(jrdev, err); 896 897 #ifdef DEBUG 898 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 899 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 900 edesc->src_nents > 1 ? 100 : ivsize, 1); 901 #endif 902 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 903 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 904 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 905 906 ablkcipher_unmap(jrdev, edesc, req); 907 908 /* 909 * The crypto API expects us to set the IV (req->info) to the last 910 * ciphertext block. This is used e.g. by the CTS mode. 911 */ 912 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize, 913 ivsize, 0); 914 915 kfree(edesc); 916 917 ablkcipher_request_complete(req, err); 918 } 919 920 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, 921 void *context) 922 { 923 struct ablkcipher_request *req = context; 924 struct ablkcipher_edesc *edesc; 925 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 926 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 927 928 #ifdef DEBUG 929 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); 930 #endif 931 932 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]); 933 if (err) 934 caam_jr_strstatus(jrdev, err); 935 936 #ifdef DEBUG 937 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ", 938 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 939 ivsize, 1); 940 #endif 941 caam_dump_sg(KERN_ERR, "dst @" __stringify(__LINE__)": ", 942 DUMP_PREFIX_ADDRESS, 16, 4, req->dst, 943 edesc->dst_nents > 1 ? 100 : req->nbytes, 1); 944 945 ablkcipher_unmap(jrdev, edesc, req); 946 947 /* 948 * The crypto API expects us to set the IV (req->info) to the last 949 * ciphertext block. 950 */ 951 scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize, 952 ivsize, 0); 953 954 kfree(edesc); 955 956 ablkcipher_request_complete(req, err); 957 } 958 959 /* 960 * Fill in aead job descriptor 961 */ 962 static void init_aead_job(struct aead_request *req, 963 struct aead_edesc *edesc, 964 bool all_contig, bool encrypt) 965 { 966 struct crypto_aead *aead = crypto_aead_reqtfm(req); 967 struct caam_ctx *ctx = crypto_aead_ctx(aead); 968 int authsize = ctx->authsize; 969 u32 *desc = edesc->hw_desc; 970 u32 out_options, in_options; 971 dma_addr_t dst_dma, src_dma; 972 int len, sec4_sg_index = 0; 973 dma_addr_t ptr; 974 u32 *sh_desc; 975 976 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec; 977 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma; 978 979 len = desc_len(sh_desc); 980 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 981 982 if (all_contig) { 983 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0; 984 in_options = 0; 985 } else { 986 src_dma = edesc->sec4_sg_dma; 987 sec4_sg_index += edesc->src_nents; 988 in_options = LDST_SGF; 989 } 990 991 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, 992 in_options); 993 994 dst_dma = src_dma; 995 out_options = in_options; 996 997 if (unlikely(req->src != req->dst)) { 998 if (edesc->dst_nents == 1) { 999 dst_dma = sg_dma_address(req->dst); 1000 } else { 1001 dst_dma = edesc->sec4_sg_dma + 1002 sec4_sg_index * 1003 sizeof(struct sec4_sg_entry); 1004 out_options = LDST_SGF; 1005 } 1006 } 1007 1008 if (encrypt) 1009 append_seq_out_ptr(desc, dst_dma, 1010 req->assoclen + req->cryptlen + authsize, 1011 out_options); 1012 else 1013 append_seq_out_ptr(desc, dst_dma, 1014 req->assoclen + req->cryptlen - authsize, 1015 out_options); 1016 } 1017 1018 static void init_gcm_job(struct aead_request *req, 1019 struct aead_edesc *edesc, 1020 bool all_contig, bool encrypt) 1021 { 1022 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1023 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1024 unsigned int ivsize = crypto_aead_ivsize(aead); 1025 u32 *desc = edesc->hw_desc; 1026 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE); 1027 unsigned int last; 1028 1029 init_aead_job(req, edesc, all_contig, encrypt); 1030 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1031 1032 /* BUG This should not be specific to generic GCM. */ 1033 last = 0; 1034 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) 1035 last = FIFOLD_TYPE_LAST1; 1036 1037 /* Read GCM IV */ 1038 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE | 1039 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last); 1040 /* Append Salt */ 1041 if (!generic_gcm) 1042 append_data(desc, ctx->key + ctx->cdata.keylen, 4); 1043 /* Append IV */ 1044 append_data(desc, req->iv, ivsize); 1045 /* End of blank commands */ 1046 } 1047 1048 static void init_authenc_job(struct aead_request *req, 1049 struct aead_edesc *edesc, 1050 bool all_contig, bool encrypt) 1051 { 1052 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1053 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead), 1054 struct caam_aead_alg, aead); 1055 unsigned int ivsize = crypto_aead_ivsize(aead); 1056 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1057 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); 1058 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) == 1059 OP_ALG_AAI_CTR_MOD128); 1060 const bool is_rfc3686 = alg->caam.rfc3686; 1061 u32 *desc = edesc->hw_desc; 1062 u32 ivoffset = 0; 1063 1064 /* 1065 * AES-CTR needs to load IV in CONTEXT1 reg 1066 * at an offset of 128bits (16bytes) 1067 * CONTEXT1[255:128] = IV 1068 */ 1069 if (ctr_mode) 1070 ivoffset = 16; 1071 1072 /* 1073 * RFC3686 specific: 1074 * CONTEXT1[255:128] = {NONCE, IV, COUNTER} 1075 */ 1076 if (is_rfc3686) 1077 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE; 1078 1079 init_aead_job(req, edesc, all_contig, encrypt); 1080 1081 /* 1082 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports 1083 * having DPOVRD as destination. 1084 */ 1085 if (ctrlpriv->era < 3) 1086 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); 1087 else 1088 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); 1089 1090 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv)) 1091 append_load_as_imm(desc, req->iv, ivsize, 1092 LDST_CLASS_1_CCB | 1093 LDST_SRCDST_BYTE_CONTEXT | 1094 (ivoffset << LDST_OFFSET_SHIFT)); 1095 } 1096 1097 /* 1098 * Fill in ablkcipher job descriptor 1099 */ 1100 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr, 1101 struct ablkcipher_edesc *edesc, 1102 struct ablkcipher_request *req, 1103 bool iv_contig) 1104 { 1105 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1106 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1107 u32 *desc = edesc->hw_desc; 1108 u32 out_options = 0, in_options; 1109 dma_addr_t dst_dma, src_dma; 1110 int len, sec4_sg_index = 0; 1111 1112 #ifdef DEBUG 1113 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ", 1114 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1115 ivsize, 1); 1116 pr_err("asked=%d, nbytes%d\n", 1117 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes); 1118 #endif 1119 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__)": ", 1120 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1121 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1122 1123 len = desc_len(sh_desc); 1124 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1125 1126 if (iv_contig) { 1127 src_dma = edesc->iv_dma; 1128 in_options = 0; 1129 } else { 1130 src_dma = edesc->sec4_sg_dma; 1131 sec4_sg_index += edesc->src_nents + 1; 1132 in_options = LDST_SGF; 1133 } 1134 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options); 1135 1136 if (likely(req->src == req->dst)) { 1137 if (edesc->src_nents == 1 && iv_contig) { 1138 dst_dma = sg_dma_address(req->src); 1139 } else { 1140 dst_dma = edesc->sec4_sg_dma + 1141 sizeof(struct sec4_sg_entry); 1142 out_options = LDST_SGF; 1143 } 1144 } else { 1145 if (edesc->dst_nents == 1) { 1146 dst_dma = sg_dma_address(req->dst); 1147 } else { 1148 dst_dma = edesc->sec4_sg_dma + 1149 sec4_sg_index * sizeof(struct sec4_sg_entry); 1150 out_options = LDST_SGF; 1151 } 1152 } 1153 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options); 1154 } 1155 1156 /* 1157 * Fill in ablkcipher givencrypt job descriptor 1158 */ 1159 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr, 1160 struct ablkcipher_edesc *edesc, 1161 struct ablkcipher_request *req, 1162 bool iv_contig) 1163 { 1164 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1165 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1166 u32 *desc = edesc->hw_desc; 1167 u32 out_options, in_options; 1168 dma_addr_t dst_dma, src_dma; 1169 int len, sec4_sg_index = 0; 1170 1171 #ifdef DEBUG 1172 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ", 1173 DUMP_PREFIX_ADDRESS, 16, 4, req->info, 1174 ivsize, 1); 1175 #endif 1176 caam_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ", 1177 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1178 edesc->src_nents > 1 ? 100 : req->nbytes, 1); 1179 1180 len = desc_len(sh_desc); 1181 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE); 1182 1183 if (edesc->src_nents == 1) { 1184 src_dma = sg_dma_address(req->src); 1185 in_options = 0; 1186 } else { 1187 src_dma = edesc->sec4_sg_dma; 1188 sec4_sg_index += edesc->src_nents; 1189 in_options = LDST_SGF; 1190 } 1191 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options); 1192 1193 if (iv_contig) { 1194 dst_dma = edesc->iv_dma; 1195 out_options = 0; 1196 } else { 1197 dst_dma = edesc->sec4_sg_dma + 1198 sec4_sg_index * sizeof(struct sec4_sg_entry); 1199 out_options = LDST_SGF; 1200 } 1201 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options); 1202 } 1203 1204 /* 1205 * allocate and map the aead extended descriptor 1206 */ 1207 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req, 1208 int desc_bytes, bool *all_contig_ptr, 1209 bool encrypt) 1210 { 1211 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1212 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1213 struct device *jrdev = ctx->jrdev; 1214 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1215 GFP_KERNEL : GFP_ATOMIC; 1216 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1217 struct aead_edesc *edesc; 1218 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes; 1219 unsigned int authsize = ctx->authsize; 1220 1221 if (unlikely(req->dst != req->src)) { 1222 src_nents = sg_nents_for_len(req->src, req->assoclen + 1223 req->cryptlen); 1224 if (unlikely(src_nents < 0)) { 1225 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1226 req->assoclen + req->cryptlen); 1227 return ERR_PTR(src_nents); 1228 } 1229 1230 dst_nents = sg_nents_for_len(req->dst, req->assoclen + 1231 req->cryptlen + 1232 (encrypt ? authsize : 1233 (-authsize))); 1234 if (unlikely(dst_nents < 0)) { 1235 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1236 req->assoclen + req->cryptlen + 1237 (encrypt ? authsize : (-authsize))); 1238 return ERR_PTR(dst_nents); 1239 } 1240 } else { 1241 src_nents = sg_nents_for_len(req->src, req->assoclen + 1242 req->cryptlen + 1243 (encrypt ? authsize : 0)); 1244 if (unlikely(src_nents < 0)) { 1245 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1246 req->assoclen + req->cryptlen + 1247 (encrypt ? authsize : 0)); 1248 return ERR_PTR(src_nents); 1249 } 1250 } 1251 1252 if (likely(req->src == req->dst)) { 1253 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1254 DMA_BIDIRECTIONAL); 1255 if (unlikely(!mapped_src_nents)) { 1256 dev_err(jrdev, "unable to map source\n"); 1257 return ERR_PTR(-ENOMEM); 1258 } 1259 } else { 1260 /* Cover also the case of null (zero length) input data */ 1261 if (src_nents) { 1262 mapped_src_nents = dma_map_sg(jrdev, req->src, 1263 src_nents, DMA_TO_DEVICE); 1264 if (unlikely(!mapped_src_nents)) { 1265 dev_err(jrdev, "unable to map source\n"); 1266 return ERR_PTR(-ENOMEM); 1267 } 1268 } else { 1269 mapped_src_nents = 0; 1270 } 1271 1272 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1273 DMA_FROM_DEVICE); 1274 if (unlikely(!mapped_dst_nents)) { 1275 dev_err(jrdev, "unable to map destination\n"); 1276 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1277 return ERR_PTR(-ENOMEM); 1278 } 1279 } 1280 1281 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0; 1282 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1283 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry); 1284 1285 /* allocate space for base edesc and hw desc commands, link tables */ 1286 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1287 GFP_DMA | flags); 1288 if (!edesc) { 1289 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1290 0, 0, 0); 1291 return ERR_PTR(-ENOMEM); 1292 } 1293 1294 edesc->src_nents = src_nents; 1295 edesc->dst_nents = dst_nents; 1296 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) + 1297 desc_bytes; 1298 *all_contig_ptr = !(mapped_src_nents > 1); 1299 1300 sec4_sg_index = 0; 1301 if (mapped_src_nents > 1) { 1302 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1303 edesc->sec4_sg + sec4_sg_index, 0); 1304 sec4_sg_index += mapped_src_nents; 1305 } 1306 if (mapped_dst_nents > 1) { 1307 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1308 edesc->sec4_sg + sec4_sg_index, 0); 1309 } 1310 1311 if (!sec4_sg_bytes) 1312 return edesc; 1313 1314 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1315 sec4_sg_bytes, DMA_TO_DEVICE); 1316 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1317 dev_err(jrdev, "unable to map S/G table\n"); 1318 aead_unmap(jrdev, edesc, req); 1319 kfree(edesc); 1320 return ERR_PTR(-ENOMEM); 1321 } 1322 1323 edesc->sec4_sg_bytes = sec4_sg_bytes; 1324 1325 return edesc; 1326 } 1327 1328 static int gcm_encrypt(struct aead_request *req) 1329 { 1330 struct aead_edesc *edesc; 1331 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1332 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1333 struct device *jrdev = ctx->jrdev; 1334 bool all_contig; 1335 u32 *desc; 1336 int ret = 0; 1337 1338 /* allocate extended descriptor */ 1339 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true); 1340 if (IS_ERR(edesc)) 1341 return PTR_ERR(edesc); 1342 1343 /* Create and submit job descriptor */ 1344 init_gcm_job(req, edesc, all_contig, true); 1345 #ifdef DEBUG 1346 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1347 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1348 desc_bytes(edesc->hw_desc), 1); 1349 #endif 1350 1351 desc = edesc->hw_desc; 1352 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1353 if (!ret) { 1354 ret = -EINPROGRESS; 1355 } else { 1356 aead_unmap(jrdev, edesc, req); 1357 kfree(edesc); 1358 } 1359 1360 return ret; 1361 } 1362 1363 static int ipsec_gcm_encrypt(struct aead_request *req) 1364 { 1365 if (req->assoclen < 8) 1366 return -EINVAL; 1367 1368 return gcm_encrypt(req); 1369 } 1370 1371 static int aead_encrypt(struct aead_request *req) 1372 { 1373 struct aead_edesc *edesc; 1374 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1375 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1376 struct device *jrdev = ctx->jrdev; 1377 bool all_contig; 1378 u32 *desc; 1379 int ret = 0; 1380 1381 /* allocate extended descriptor */ 1382 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1383 &all_contig, true); 1384 if (IS_ERR(edesc)) 1385 return PTR_ERR(edesc); 1386 1387 /* Create and submit job descriptor */ 1388 init_authenc_job(req, edesc, all_contig, true); 1389 #ifdef DEBUG 1390 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1391 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1392 desc_bytes(edesc->hw_desc), 1); 1393 #endif 1394 1395 desc = edesc->hw_desc; 1396 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); 1397 if (!ret) { 1398 ret = -EINPROGRESS; 1399 } else { 1400 aead_unmap(jrdev, edesc, req); 1401 kfree(edesc); 1402 } 1403 1404 return ret; 1405 } 1406 1407 static int gcm_decrypt(struct aead_request *req) 1408 { 1409 struct aead_edesc *edesc; 1410 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1411 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1412 struct device *jrdev = ctx->jrdev; 1413 bool all_contig; 1414 u32 *desc; 1415 int ret = 0; 1416 1417 /* allocate extended descriptor */ 1418 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false); 1419 if (IS_ERR(edesc)) 1420 return PTR_ERR(edesc); 1421 1422 /* Create and submit job descriptor*/ 1423 init_gcm_job(req, edesc, all_contig, false); 1424 #ifdef DEBUG 1425 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1426 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1427 desc_bytes(edesc->hw_desc), 1); 1428 #endif 1429 1430 desc = edesc->hw_desc; 1431 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1432 if (!ret) { 1433 ret = -EINPROGRESS; 1434 } else { 1435 aead_unmap(jrdev, edesc, req); 1436 kfree(edesc); 1437 } 1438 1439 return ret; 1440 } 1441 1442 static int ipsec_gcm_decrypt(struct aead_request *req) 1443 { 1444 if (req->assoclen < 8) 1445 return -EINVAL; 1446 1447 return gcm_decrypt(req); 1448 } 1449 1450 static int aead_decrypt(struct aead_request *req) 1451 { 1452 struct aead_edesc *edesc; 1453 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1454 struct caam_ctx *ctx = crypto_aead_ctx(aead); 1455 struct device *jrdev = ctx->jrdev; 1456 bool all_contig; 1457 u32 *desc; 1458 int ret = 0; 1459 1460 caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ", 1461 DUMP_PREFIX_ADDRESS, 16, 4, req->src, 1462 req->assoclen + req->cryptlen, 1); 1463 1464 /* allocate extended descriptor */ 1465 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN, 1466 &all_contig, false); 1467 if (IS_ERR(edesc)) 1468 return PTR_ERR(edesc); 1469 1470 /* Create and submit job descriptor*/ 1471 init_authenc_job(req, edesc, all_contig, false); 1472 #ifdef DEBUG 1473 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ", 1474 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1475 desc_bytes(edesc->hw_desc), 1); 1476 #endif 1477 1478 desc = edesc->hw_desc; 1479 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); 1480 if (!ret) { 1481 ret = -EINPROGRESS; 1482 } else { 1483 aead_unmap(jrdev, edesc, req); 1484 kfree(edesc); 1485 } 1486 1487 return ret; 1488 } 1489 1490 /* 1491 * allocate and map the ablkcipher extended descriptor for ablkcipher 1492 */ 1493 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request 1494 *req, int desc_bytes, 1495 bool *iv_contig_out) 1496 { 1497 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1498 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1499 struct device *jrdev = ctx->jrdev; 1500 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1501 GFP_KERNEL : GFP_ATOMIC; 1502 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0; 1503 struct ablkcipher_edesc *edesc; 1504 dma_addr_t iv_dma = 0; 1505 bool in_contig; 1506 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1507 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1508 1509 src_nents = sg_nents_for_len(req->src, req->nbytes); 1510 if (unlikely(src_nents < 0)) { 1511 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1512 req->nbytes); 1513 return ERR_PTR(src_nents); 1514 } 1515 1516 if (req->dst != req->src) { 1517 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1518 if (unlikely(dst_nents < 0)) { 1519 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1520 req->nbytes); 1521 return ERR_PTR(dst_nents); 1522 } 1523 } 1524 1525 if (likely(req->src == req->dst)) { 1526 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1527 DMA_BIDIRECTIONAL); 1528 if (unlikely(!mapped_src_nents)) { 1529 dev_err(jrdev, "unable to map source\n"); 1530 return ERR_PTR(-ENOMEM); 1531 } 1532 } else { 1533 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1534 DMA_TO_DEVICE); 1535 if (unlikely(!mapped_src_nents)) { 1536 dev_err(jrdev, "unable to map source\n"); 1537 return ERR_PTR(-ENOMEM); 1538 } 1539 1540 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1541 DMA_FROM_DEVICE); 1542 if (unlikely(!mapped_dst_nents)) { 1543 dev_err(jrdev, "unable to map destination\n"); 1544 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1545 return ERR_PTR(-ENOMEM); 1546 } 1547 } 1548 1549 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE); 1550 if (dma_mapping_error(jrdev, iv_dma)) { 1551 dev_err(jrdev, "unable to map IV\n"); 1552 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1553 0, 0, 0); 1554 return ERR_PTR(-ENOMEM); 1555 } 1556 1557 if (mapped_src_nents == 1 && 1558 iv_dma + ivsize == sg_dma_address(req->src)) { 1559 in_contig = true; 1560 sec4_sg_ents = 0; 1561 } else { 1562 in_contig = false; 1563 sec4_sg_ents = 1 + mapped_src_nents; 1564 } 1565 dst_sg_idx = sec4_sg_ents; 1566 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0; 1567 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1568 1569 /* allocate space for base edesc and hw desc commands, link tables */ 1570 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1571 GFP_DMA | flags); 1572 if (!edesc) { 1573 dev_err(jrdev, "could not allocate extended descriptor\n"); 1574 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1575 iv_dma, ivsize, 0, 0); 1576 return ERR_PTR(-ENOMEM); 1577 } 1578 1579 edesc->src_nents = src_nents; 1580 edesc->dst_nents = dst_nents; 1581 edesc->sec4_sg_bytes = sec4_sg_bytes; 1582 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1583 desc_bytes; 1584 1585 if (!in_contig) { 1586 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0); 1587 sg_to_sec4_sg_last(req->src, mapped_src_nents, 1588 edesc->sec4_sg + 1, 0); 1589 } 1590 1591 if (mapped_dst_nents > 1) { 1592 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1593 edesc->sec4_sg + dst_sg_idx, 0); 1594 } 1595 1596 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1597 sec4_sg_bytes, DMA_TO_DEVICE); 1598 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1599 dev_err(jrdev, "unable to map S/G table\n"); 1600 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1601 iv_dma, ivsize, 0, 0); 1602 kfree(edesc); 1603 return ERR_PTR(-ENOMEM); 1604 } 1605 1606 edesc->iv_dma = iv_dma; 1607 1608 #ifdef DEBUG 1609 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ", 1610 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1611 sec4_sg_bytes, 1); 1612 #endif 1613 1614 *iv_contig_out = in_contig; 1615 return edesc; 1616 } 1617 1618 static int ablkcipher_encrypt(struct ablkcipher_request *req) 1619 { 1620 struct ablkcipher_edesc *edesc; 1621 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1622 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1623 struct device *jrdev = ctx->jrdev; 1624 bool iv_contig; 1625 u32 *desc; 1626 int ret = 0; 1627 1628 /* allocate extended descriptor */ 1629 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1630 CAAM_CMD_SZ, &iv_contig); 1631 if (IS_ERR(edesc)) 1632 return PTR_ERR(edesc); 1633 1634 /* Create and submit job descriptor*/ 1635 init_ablkcipher_job(ctx->sh_desc_enc, 1636 ctx->sh_desc_enc_dma, edesc, req, iv_contig); 1637 #ifdef DEBUG 1638 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1639 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1640 desc_bytes(edesc->hw_desc), 1); 1641 #endif 1642 desc = edesc->hw_desc; 1643 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1644 1645 if (!ret) { 1646 ret = -EINPROGRESS; 1647 } else { 1648 ablkcipher_unmap(jrdev, edesc, req); 1649 kfree(edesc); 1650 } 1651 1652 return ret; 1653 } 1654 1655 static int ablkcipher_decrypt(struct ablkcipher_request *req) 1656 { 1657 struct ablkcipher_edesc *edesc; 1658 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1659 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1660 struct device *jrdev = ctx->jrdev; 1661 bool iv_contig; 1662 u32 *desc; 1663 int ret = 0; 1664 1665 /* allocate extended descriptor */ 1666 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN * 1667 CAAM_CMD_SZ, &iv_contig); 1668 if (IS_ERR(edesc)) 1669 return PTR_ERR(edesc); 1670 1671 /* Create and submit job descriptor*/ 1672 init_ablkcipher_job(ctx->sh_desc_dec, 1673 ctx->sh_desc_dec_dma, edesc, req, iv_contig); 1674 desc = edesc->hw_desc; 1675 #ifdef DEBUG 1676 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ", 1677 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1678 desc_bytes(edesc->hw_desc), 1); 1679 #endif 1680 1681 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req); 1682 if (!ret) { 1683 ret = -EINPROGRESS; 1684 } else { 1685 ablkcipher_unmap(jrdev, edesc, req); 1686 kfree(edesc); 1687 } 1688 1689 return ret; 1690 } 1691 1692 /* 1693 * allocate and map the ablkcipher extended descriptor 1694 * for ablkcipher givencrypt 1695 */ 1696 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc( 1697 struct skcipher_givcrypt_request *greq, 1698 int desc_bytes, 1699 bool *iv_contig_out) 1700 { 1701 struct ablkcipher_request *req = &greq->creq; 1702 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1703 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1704 struct device *jrdev = ctx->jrdev; 1705 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 1706 GFP_KERNEL : GFP_ATOMIC; 1707 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents; 1708 struct ablkcipher_edesc *edesc; 1709 dma_addr_t iv_dma = 0; 1710 bool out_contig; 1711 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); 1712 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes; 1713 1714 src_nents = sg_nents_for_len(req->src, req->nbytes); 1715 if (unlikely(src_nents < 0)) { 1716 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", 1717 req->nbytes); 1718 return ERR_PTR(src_nents); 1719 } 1720 1721 if (likely(req->src == req->dst)) { 1722 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1723 DMA_BIDIRECTIONAL); 1724 if (unlikely(!mapped_src_nents)) { 1725 dev_err(jrdev, "unable to map source\n"); 1726 return ERR_PTR(-ENOMEM); 1727 } 1728 1729 dst_nents = src_nents; 1730 mapped_dst_nents = src_nents; 1731 } else { 1732 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, 1733 DMA_TO_DEVICE); 1734 if (unlikely(!mapped_src_nents)) { 1735 dev_err(jrdev, "unable to map source\n"); 1736 return ERR_PTR(-ENOMEM); 1737 } 1738 1739 dst_nents = sg_nents_for_len(req->dst, req->nbytes); 1740 if (unlikely(dst_nents < 0)) { 1741 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", 1742 req->nbytes); 1743 return ERR_PTR(dst_nents); 1744 } 1745 1746 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, 1747 DMA_FROM_DEVICE); 1748 if (unlikely(!mapped_dst_nents)) { 1749 dev_err(jrdev, "unable to map destination\n"); 1750 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); 1751 return ERR_PTR(-ENOMEM); 1752 } 1753 } 1754 1755 /* 1756 * Check if iv can be contiguous with source and destination. 1757 * If so, include it. If not, create scatterlist. 1758 */ 1759 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE); 1760 if (dma_mapping_error(jrdev, iv_dma)) { 1761 dev_err(jrdev, "unable to map IV\n"); 1762 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, 1763 0, 0, 0); 1764 return ERR_PTR(-ENOMEM); 1765 } 1766 1767 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0; 1768 dst_sg_idx = sec4_sg_ents; 1769 if (mapped_dst_nents == 1 && 1770 iv_dma + ivsize == sg_dma_address(req->dst)) { 1771 out_contig = true; 1772 } else { 1773 out_contig = false; 1774 sec4_sg_ents += 1 + mapped_dst_nents; 1775 } 1776 1777 /* allocate space for base edesc and hw desc commands, link tables */ 1778 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry); 1779 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, 1780 GFP_DMA | flags); 1781 if (!edesc) { 1782 dev_err(jrdev, "could not allocate extended descriptor\n"); 1783 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1784 iv_dma, ivsize, 0, 0); 1785 return ERR_PTR(-ENOMEM); 1786 } 1787 1788 edesc->src_nents = src_nents; 1789 edesc->dst_nents = dst_nents; 1790 edesc->sec4_sg_bytes = sec4_sg_bytes; 1791 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) + 1792 desc_bytes; 1793 1794 if (mapped_src_nents > 1) 1795 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg, 1796 0); 1797 1798 if (!out_contig) { 1799 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx, 1800 iv_dma, ivsize, 0); 1801 sg_to_sec4_sg_last(req->dst, mapped_dst_nents, 1802 edesc->sec4_sg + dst_sg_idx + 1, 0); 1803 } 1804 1805 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, 1806 sec4_sg_bytes, DMA_TO_DEVICE); 1807 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { 1808 dev_err(jrdev, "unable to map S/G table\n"); 1809 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 1810 iv_dma, ivsize, 0, 0); 1811 kfree(edesc); 1812 return ERR_PTR(-ENOMEM); 1813 } 1814 edesc->iv_dma = iv_dma; 1815 1816 #ifdef DEBUG 1817 print_hex_dump(KERN_ERR, 1818 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ", 1819 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg, 1820 sec4_sg_bytes, 1); 1821 #endif 1822 1823 *iv_contig_out = out_contig; 1824 return edesc; 1825 } 1826 1827 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq) 1828 { 1829 struct ablkcipher_request *req = &creq->creq; 1830 struct ablkcipher_edesc *edesc; 1831 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req); 1832 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher); 1833 struct device *jrdev = ctx->jrdev; 1834 bool iv_contig = false; 1835 u32 *desc; 1836 int ret = 0; 1837 1838 /* allocate extended descriptor */ 1839 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN * 1840 CAAM_CMD_SZ, &iv_contig); 1841 if (IS_ERR(edesc)) 1842 return PTR_ERR(edesc); 1843 1844 /* Create and submit job descriptor*/ 1845 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma, 1846 edesc, req, iv_contig); 1847 #ifdef DEBUG 1848 print_hex_dump(KERN_ERR, 1849 "ablkcipher jobdesc@" __stringify(__LINE__) ": ", 1850 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc, 1851 desc_bytes(edesc->hw_desc), 1); 1852 #endif 1853 desc = edesc->hw_desc; 1854 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req); 1855 1856 if (!ret) { 1857 ret = -EINPROGRESS; 1858 } else { 1859 ablkcipher_unmap(jrdev, edesc, req); 1860 kfree(edesc); 1861 } 1862 1863 return ret; 1864 } 1865 1866 #define template_aead template_u.aead 1867 #define template_ablkcipher template_u.ablkcipher 1868 struct caam_alg_template { 1869 char name[CRYPTO_MAX_ALG_NAME]; 1870 char driver_name[CRYPTO_MAX_ALG_NAME]; 1871 unsigned int blocksize; 1872 u32 type; 1873 union { 1874 struct ablkcipher_alg ablkcipher; 1875 } template_u; 1876 u32 class1_alg_type; 1877 u32 class2_alg_type; 1878 }; 1879 1880 static struct caam_alg_template driver_algs[] = { 1881 /* ablkcipher descriptor */ 1882 { 1883 .name = "cbc(aes)", 1884 .driver_name = "cbc-aes-caam", 1885 .blocksize = AES_BLOCK_SIZE, 1886 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1887 .template_ablkcipher = { 1888 .setkey = ablkcipher_setkey, 1889 .encrypt = ablkcipher_encrypt, 1890 .decrypt = ablkcipher_decrypt, 1891 .givencrypt = ablkcipher_givencrypt, 1892 .geniv = "<built-in>", 1893 .min_keysize = AES_MIN_KEY_SIZE, 1894 .max_keysize = AES_MAX_KEY_SIZE, 1895 .ivsize = AES_BLOCK_SIZE, 1896 }, 1897 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 1898 }, 1899 { 1900 .name = "cbc(des3_ede)", 1901 .driver_name = "cbc-3des-caam", 1902 .blocksize = DES3_EDE_BLOCK_SIZE, 1903 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1904 .template_ablkcipher = { 1905 .setkey = ablkcipher_setkey, 1906 .encrypt = ablkcipher_encrypt, 1907 .decrypt = ablkcipher_decrypt, 1908 .givencrypt = ablkcipher_givencrypt, 1909 .geniv = "<built-in>", 1910 .min_keysize = DES3_EDE_KEY_SIZE, 1911 .max_keysize = DES3_EDE_KEY_SIZE, 1912 .ivsize = DES3_EDE_BLOCK_SIZE, 1913 }, 1914 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 1915 }, 1916 { 1917 .name = "cbc(des)", 1918 .driver_name = "cbc-des-caam", 1919 .blocksize = DES_BLOCK_SIZE, 1920 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1921 .template_ablkcipher = { 1922 .setkey = ablkcipher_setkey, 1923 .encrypt = ablkcipher_encrypt, 1924 .decrypt = ablkcipher_decrypt, 1925 .givencrypt = ablkcipher_givencrypt, 1926 .geniv = "<built-in>", 1927 .min_keysize = DES_KEY_SIZE, 1928 .max_keysize = DES_KEY_SIZE, 1929 .ivsize = DES_BLOCK_SIZE, 1930 }, 1931 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 1932 }, 1933 { 1934 .name = "ctr(aes)", 1935 .driver_name = "ctr-aes-caam", 1936 .blocksize = 1, 1937 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1938 .template_ablkcipher = { 1939 .setkey = ablkcipher_setkey, 1940 .encrypt = ablkcipher_encrypt, 1941 .decrypt = ablkcipher_decrypt, 1942 .geniv = "chainiv", 1943 .min_keysize = AES_MIN_KEY_SIZE, 1944 .max_keysize = AES_MAX_KEY_SIZE, 1945 .ivsize = AES_BLOCK_SIZE, 1946 }, 1947 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1948 }, 1949 { 1950 .name = "rfc3686(ctr(aes))", 1951 .driver_name = "rfc3686-ctr-aes-caam", 1952 .blocksize = 1, 1953 .type = CRYPTO_ALG_TYPE_GIVCIPHER, 1954 .template_ablkcipher = { 1955 .setkey = ablkcipher_setkey, 1956 .encrypt = ablkcipher_encrypt, 1957 .decrypt = ablkcipher_decrypt, 1958 .givencrypt = ablkcipher_givencrypt, 1959 .geniv = "<built-in>", 1960 .min_keysize = AES_MIN_KEY_SIZE + 1961 CTR_RFC3686_NONCE_SIZE, 1962 .max_keysize = AES_MAX_KEY_SIZE + 1963 CTR_RFC3686_NONCE_SIZE, 1964 .ivsize = CTR_RFC3686_IV_SIZE, 1965 }, 1966 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128, 1967 }, 1968 { 1969 .name = "xts(aes)", 1970 .driver_name = "xts-aes-caam", 1971 .blocksize = AES_BLOCK_SIZE, 1972 .type = CRYPTO_ALG_TYPE_ABLKCIPHER, 1973 .template_ablkcipher = { 1974 .setkey = xts_ablkcipher_setkey, 1975 .encrypt = ablkcipher_encrypt, 1976 .decrypt = ablkcipher_decrypt, 1977 .geniv = "eseqiv", 1978 .min_keysize = 2 * AES_MIN_KEY_SIZE, 1979 .max_keysize = 2 * AES_MAX_KEY_SIZE, 1980 .ivsize = AES_BLOCK_SIZE, 1981 }, 1982 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS, 1983 }, 1984 }; 1985 1986 static struct caam_aead_alg driver_aeads[] = { 1987 { 1988 .aead = { 1989 .base = { 1990 .cra_name = "rfc4106(gcm(aes))", 1991 .cra_driver_name = "rfc4106-gcm-aes-caam", 1992 .cra_blocksize = 1, 1993 }, 1994 .setkey = rfc4106_setkey, 1995 .setauthsize = rfc4106_setauthsize, 1996 .encrypt = ipsec_gcm_encrypt, 1997 .decrypt = ipsec_gcm_decrypt, 1998 .ivsize = GCM_RFC4106_IV_SIZE, 1999 .maxauthsize = AES_BLOCK_SIZE, 2000 }, 2001 .caam = { 2002 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2003 }, 2004 }, 2005 { 2006 .aead = { 2007 .base = { 2008 .cra_name = "rfc4543(gcm(aes))", 2009 .cra_driver_name = "rfc4543-gcm-aes-caam", 2010 .cra_blocksize = 1, 2011 }, 2012 .setkey = rfc4543_setkey, 2013 .setauthsize = rfc4543_setauthsize, 2014 .encrypt = ipsec_gcm_encrypt, 2015 .decrypt = ipsec_gcm_decrypt, 2016 .ivsize = GCM_RFC4543_IV_SIZE, 2017 .maxauthsize = AES_BLOCK_SIZE, 2018 }, 2019 .caam = { 2020 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2021 }, 2022 }, 2023 /* Galois Counter Mode */ 2024 { 2025 .aead = { 2026 .base = { 2027 .cra_name = "gcm(aes)", 2028 .cra_driver_name = "gcm-aes-caam", 2029 .cra_blocksize = 1, 2030 }, 2031 .setkey = gcm_setkey, 2032 .setauthsize = gcm_setauthsize, 2033 .encrypt = gcm_encrypt, 2034 .decrypt = gcm_decrypt, 2035 .ivsize = GCM_AES_IV_SIZE, 2036 .maxauthsize = AES_BLOCK_SIZE, 2037 }, 2038 .caam = { 2039 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM, 2040 }, 2041 }, 2042 /* single-pass ipsec_esp descriptor */ 2043 { 2044 .aead = { 2045 .base = { 2046 .cra_name = "authenc(hmac(md5)," 2047 "ecb(cipher_null))", 2048 .cra_driver_name = "authenc-hmac-md5-" 2049 "ecb-cipher_null-caam", 2050 .cra_blocksize = NULL_BLOCK_SIZE, 2051 }, 2052 .setkey = aead_setkey, 2053 .setauthsize = aead_setauthsize, 2054 .encrypt = aead_encrypt, 2055 .decrypt = aead_decrypt, 2056 .ivsize = NULL_IV_SIZE, 2057 .maxauthsize = MD5_DIGEST_SIZE, 2058 }, 2059 .caam = { 2060 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2061 OP_ALG_AAI_HMAC_PRECOMP, 2062 }, 2063 }, 2064 { 2065 .aead = { 2066 .base = { 2067 .cra_name = "authenc(hmac(sha1)," 2068 "ecb(cipher_null))", 2069 .cra_driver_name = "authenc-hmac-sha1-" 2070 "ecb-cipher_null-caam", 2071 .cra_blocksize = NULL_BLOCK_SIZE, 2072 }, 2073 .setkey = aead_setkey, 2074 .setauthsize = aead_setauthsize, 2075 .encrypt = aead_encrypt, 2076 .decrypt = aead_decrypt, 2077 .ivsize = NULL_IV_SIZE, 2078 .maxauthsize = SHA1_DIGEST_SIZE, 2079 }, 2080 .caam = { 2081 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2082 OP_ALG_AAI_HMAC_PRECOMP, 2083 }, 2084 }, 2085 { 2086 .aead = { 2087 .base = { 2088 .cra_name = "authenc(hmac(sha224)," 2089 "ecb(cipher_null))", 2090 .cra_driver_name = "authenc-hmac-sha224-" 2091 "ecb-cipher_null-caam", 2092 .cra_blocksize = NULL_BLOCK_SIZE, 2093 }, 2094 .setkey = aead_setkey, 2095 .setauthsize = aead_setauthsize, 2096 .encrypt = aead_encrypt, 2097 .decrypt = aead_decrypt, 2098 .ivsize = NULL_IV_SIZE, 2099 .maxauthsize = SHA224_DIGEST_SIZE, 2100 }, 2101 .caam = { 2102 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2103 OP_ALG_AAI_HMAC_PRECOMP, 2104 }, 2105 }, 2106 { 2107 .aead = { 2108 .base = { 2109 .cra_name = "authenc(hmac(sha256)," 2110 "ecb(cipher_null))", 2111 .cra_driver_name = "authenc-hmac-sha256-" 2112 "ecb-cipher_null-caam", 2113 .cra_blocksize = NULL_BLOCK_SIZE, 2114 }, 2115 .setkey = aead_setkey, 2116 .setauthsize = aead_setauthsize, 2117 .encrypt = aead_encrypt, 2118 .decrypt = aead_decrypt, 2119 .ivsize = NULL_IV_SIZE, 2120 .maxauthsize = SHA256_DIGEST_SIZE, 2121 }, 2122 .caam = { 2123 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2124 OP_ALG_AAI_HMAC_PRECOMP, 2125 }, 2126 }, 2127 { 2128 .aead = { 2129 .base = { 2130 .cra_name = "authenc(hmac(sha384)," 2131 "ecb(cipher_null))", 2132 .cra_driver_name = "authenc-hmac-sha384-" 2133 "ecb-cipher_null-caam", 2134 .cra_blocksize = NULL_BLOCK_SIZE, 2135 }, 2136 .setkey = aead_setkey, 2137 .setauthsize = aead_setauthsize, 2138 .encrypt = aead_encrypt, 2139 .decrypt = aead_decrypt, 2140 .ivsize = NULL_IV_SIZE, 2141 .maxauthsize = SHA384_DIGEST_SIZE, 2142 }, 2143 .caam = { 2144 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2145 OP_ALG_AAI_HMAC_PRECOMP, 2146 }, 2147 }, 2148 { 2149 .aead = { 2150 .base = { 2151 .cra_name = "authenc(hmac(sha512)," 2152 "ecb(cipher_null))", 2153 .cra_driver_name = "authenc-hmac-sha512-" 2154 "ecb-cipher_null-caam", 2155 .cra_blocksize = NULL_BLOCK_SIZE, 2156 }, 2157 .setkey = aead_setkey, 2158 .setauthsize = aead_setauthsize, 2159 .encrypt = aead_encrypt, 2160 .decrypt = aead_decrypt, 2161 .ivsize = NULL_IV_SIZE, 2162 .maxauthsize = SHA512_DIGEST_SIZE, 2163 }, 2164 .caam = { 2165 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2166 OP_ALG_AAI_HMAC_PRECOMP, 2167 }, 2168 }, 2169 { 2170 .aead = { 2171 .base = { 2172 .cra_name = "authenc(hmac(md5),cbc(aes))", 2173 .cra_driver_name = "authenc-hmac-md5-" 2174 "cbc-aes-caam", 2175 .cra_blocksize = AES_BLOCK_SIZE, 2176 }, 2177 .setkey = aead_setkey, 2178 .setauthsize = aead_setauthsize, 2179 .encrypt = aead_encrypt, 2180 .decrypt = aead_decrypt, 2181 .ivsize = AES_BLOCK_SIZE, 2182 .maxauthsize = MD5_DIGEST_SIZE, 2183 }, 2184 .caam = { 2185 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2186 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2187 OP_ALG_AAI_HMAC_PRECOMP, 2188 }, 2189 }, 2190 { 2191 .aead = { 2192 .base = { 2193 .cra_name = "echainiv(authenc(hmac(md5)," 2194 "cbc(aes)))", 2195 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2196 "cbc-aes-caam", 2197 .cra_blocksize = AES_BLOCK_SIZE, 2198 }, 2199 .setkey = aead_setkey, 2200 .setauthsize = aead_setauthsize, 2201 .encrypt = aead_encrypt, 2202 .decrypt = aead_decrypt, 2203 .ivsize = AES_BLOCK_SIZE, 2204 .maxauthsize = MD5_DIGEST_SIZE, 2205 }, 2206 .caam = { 2207 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2208 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2209 OP_ALG_AAI_HMAC_PRECOMP, 2210 .geniv = true, 2211 }, 2212 }, 2213 { 2214 .aead = { 2215 .base = { 2216 .cra_name = "authenc(hmac(sha1),cbc(aes))", 2217 .cra_driver_name = "authenc-hmac-sha1-" 2218 "cbc-aes-caam", 2219 .cra_blocksize = AES_BLOCK_SIZE, 2220 }, 2221 .setkey = aead_setkey, 2222 .setauthsize = aead_setauthsize, 2223 .encrypt = aead_encrypt, 2224 .decrypt = aead_decrypt, 2225 .ivsize = AES_BLOCK_SIZE, 2226 .maxauthsize = SHA1_DIGEST_SIZE, 2227 }, 2228 .caam = { 2229 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2230 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2231 OP_ALG_AAI_HMAC_PRECOMP, 2232 }, 2233 }, 2234 { 2235 .aead = { 2236 .base = { 2237 .cra_name = "echainiv(authenc(hmac(sha1)," 2238 "cbc(aes)))", 2239 .cra_driver_name = "echainiv-authenc-" 2240 "hmac-sha1-cbc-aes-caam", 2241 .cra_blocksize = AES_BLOCK_SIZE, 2242 }, 2243 .setkey = aead_setkey, 2244 .setauthsize = aead_setauthsize, 2245 .encrypt = aead_encrypt, 2246 .decrypt = aead_decrypt, 2247 .ivsize = AES_BLOCK_SIZE, 2248 .maxauthsize = SHA1_DIGEST_SIZE, 2249 }, 2250 .caam = { 2251 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2252 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2253 OP_ALG_AAI_HMAC_PRECOMP, 2254 .geniv = true, 2255 }, 2256 }, 2257 { 2258 .aead = { 2259 .base = { 2260 .cra_name = "authenc(hmac(sha224),cbc(aes))", 2261 .cra_driver_name = "authenc-hmac-sha224-" 2262 "cbc-aes-caam", 2263 .cra_blocksize = AES_BLOCK_SIZE, 2264 }, 2265 .setkey = aead_setkey, 2266 .setauthsize = aead_setauthsize, 2267 .encrypt = aead_encrypt, 2268 .decrypt = aead_decrypt, 2269 .ivsize = AES_BLOCK_SIZE, 2270 .maxauthsize = SHA224_DIGEST_SIZE, 2271 }, 2272 .caam = { 2273 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2274 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2275 OP_ALG_AAI_HMAC_PRECOMP, 2276 }, 2277 }, 2278 { 2279 .aead = { 2280 .base = { 2281 .cra_name = "echainiv(authenc(hmac(sha224)," 2282 "cbc(aes)))", 2283 .cra_driver_name = "echainiv-authenc-" 2284 "hmac-sha224-cbc-aes-caam", 2285 .cra_blocksize = AES_BLOCK_SIZE, 2286 }, 2287 .setkey = aead_setkey, 2288 .setauthsize = aead_setauthsize, 2289 .encrypt = aead_encrypt, 2290 .decrypt = aead_decrypt, 2291 .ivsize = AES_BLOCK_SIZE, 2292 .maxauthsize = SHA224_DIGEST_SIZE, 2293 }, 2294 .caam = { 2295 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2296 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2297 OP_ALG_AAI_HMAC_PRECOMP, 2298 .geniv = true, 2299 }, 2300 }, 2301 { 2302 .aead = { 2303 .base = { 2304 .cra_name = "authenc(hmac(sha256),cbc(aes))", 2305 .cra_driver_name = "authenc-hmac-sha256-" 2306 "cbc-aes-caam", 2307 .cra_blocksize = AES_BLOCK_SIZE, 2308 }, 2309 .setkey = aead_setkey, 2310 .setauthsize = aead_setauthsize, 2311 .encrypt = aead_encrypt, 2312 .decrypt = aead_decrypt, 2313 .ivsize = AES_BLOCK_SIZE, 2314 .maxauthsize = SHA256_DIGEST_SIZE, 2315 }, 2316 .caam = { 2317 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2318 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2319 OP_ALG_AAI_HMAC_PRECOMP, 2320 }, 2321 }, 2322 { 2323 .aead = { 2324 .base = { 2325 .cra_name = "echainiv(authenc(hmac(sha256)," 2326 "cbc(aes)))", 2327 .cra_driver_name = "echainiv-authenc-" 2328 "hmac-sha256-cbc-aes-caam", 2329 .cra_blocksize = AES_BLOCK_SIZE, 2330 }, 2331 .setkey = aead_setkey, 2332 .setauthsize = aead_setauthsize, 2333 .encrypt = aead_encrypt, 2334 .decrypt = aead_decrypt, 2335 .ivsize = AES_BLOCK_SIZE, 2336 .maxauthsize = SHA256_DIGEST_SIZE, 2337 }, 2338 .caam = { 2339 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2340 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2341 OP_ALG_AAI_HMAC_PRECOMP, 2342 .geniv = true, 2343 }, 2344 }, 2345 { 2346 .aead = { 2347 .base = { 2348 .cra_name = "authenc(hmac(sha384),cbc(aes))", 2349 .cra_driver_name = "authenc-hmac-sha384-" 2350 "cbc-aes-caam", 2351 .cra_blocksize = AES_BLOCK_SIZE, 2352 }, 2353 .setkey = aead_setkey, 2354 .setauthsize = aead_setauthsize, 2355 .encrypt = aead_encrypt, 2356 .decrypt = aead_decrypt, 2357 .ivsize = AES_BLOCK_SIZE, 2358 .maxauthsize = SHA384_DIGEST_SIZE, 2359 }, 2360 .caam = { 2361 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2362 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2363 OP_ALG_AAI_HMAC_PRECOMP, 2364 }, 2365 }, 2366 { 2367 .aead = { 2368 .base = { 2369 .cra_name = "echainiv(authenc(hmac(sha384)," 2370 "cbc(aes)))", 2371 .cra_driver_name = "echainiv-authenc-" 2372 "hmac-sha384-cbc-aes-caam", 2373 .cra_blocksize = AES_BLOCK_SIZE, 2374 }, 2375 .setkey = aead_setkey, 2376 .setauthsize = aead_setauthsize, 2377 .encrypt = aead_encrypt, 2378 .decrypt = aead_decrypt, 2379 .ivsize = AES_BLOCK_SIZE, 2380 .maxauthsize = SHA384_DIGEST_SIZE, 2381 }, 2382 .caam = { 2383 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2384 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2385 OP_ALG_AAI_HMAC_PRECOMP, 2386 .geniv = true, 2387 }, 2388 }, 2389 { 2390 .aead = { 2391 .base = { 2392 .cra_name = "authenc(hmac(sha512),cbc(aes))", 2393 .cra_driver_name = "authenc-hmac-sha512-" 2394 "cbc-aes-caam", 2395 .cra_blocksize = AES_BLOCK_SIZE, 2396 }, 2397 .setkey = aead_setkey, 2398 .setauthsize = aead_setauthsize, 2399 .encrypt = aead_encrypt, 2400 .decrypt = aead_decrypt, 2401 .ivsize = AES_BLOCK_SIZE, 2402 .maxauthsize = SHA512_DIGEST_SIZE, 2403 }, 2404 .caam = { 2405 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2406 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2407 OP_ALG_AAI_HMAC_PRECOMP, 2408 }, 2409 }, 2410 { 2411 .aead = { 2412 .base = { 2413 .cra_name = "echainiv(authenc(hmac(sha512)," 2414 "cbc(aes)))", 2415 .cra_driver_name = "echainiv-authenc-" 2416 "hmac-sha512-cbc-aes-caam", 2417 .cra_blocksize = AES_BLOCK_SIZE, 2418 }, 2419 .setkey = aead_setkey, 2420 .setauthsize = aead_setauthsize, 2421 .encrypt = aead_encrypt, 2422 .decrypt = aead_decrypt, 2423 .ivsize = AES_BLOCK_SIZE, 2424 .maxauthsize = SHA512_DIGEST_SIZE, 2425 }, 2426 .caam = { 2427 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC, 2428 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2429 OP_ALG_AAI_HMAC_PRECOMP, 2430 .geniv = true, 2431 }, 2432 }, 2433 { 2434 .aead = { 2435 .base = { 2436 .cra_name = "authenc(hmac(md5),cbc(des3_ede))", 2437 .cra_driver_name = "authenc-hmac-md5-" 2438 "cbc-des3_ede-caam", 2439 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2440 }, 2441 .setkey = aead_setkey, 2442 .setauthsize = aead_setauthsize, 2443 .encrypt = aead_encrypt, 2444 .decrypt = aead_decrypt, 2445 .ivsize = DES3_EDE_BLOCK_SIZE, 2446 .maxauthsize = MD5_DIGEST_SIZE, 2447 }, 2448 .caam = { 2449 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2450 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2451 OP_ALG_AAI_HMAC_PRECOMP, 2452 } 2453 }, 2454 { 2455 .aead = { 2456 .base = { 2457 .cra_name = "echainiv(authenc(hmac(md5)," 2458 "cbc(des3_ede)))", 2459 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2460 "cbc-des3_ede-caam", 2461 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2462 }, 2463 .setkey = aead_setkey, 2464 .setauthsize = aead_setauthsize, 2465 .encrypt = aead_encrypt, 2466 .decrypt = aead_decrypt, 2467 .ivsize = DES3_EDE_BLOCK_SIZE, 2468 .maxauthsize = MD5_DIGEST_SIZE, 2469 }, 2470 .caam = { 2471 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2472 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2473 OP_ALG_AAI_HMAC_PRECOMP, 2474 .geniv = true, 2475 } 2476 }, 2477 { 2478 .aead = { 2479 .base = { 2480 .cra_name = "authenc(hmac(sha1)," 2481 "cbc(des3_ede))", 2482 .cra_driver_name = "authenc-hmac-sha1-" 2483 "cbc-des3_ede-caam", 2484 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2485 }, 2486 .setkey = aead_setkey, 2487 .setauthsize = aead_setauthsize, 2488 .encrypt = aead_encrypt, 2489 .decrypt = aead_decrypt, 2490 .ivsize = DES3_EDE_BLOCK_SIZE, 2491 .maxauthsize = SHA1_DIGEST_SIZE, 2492 }, 2493 .caam = { 2494 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2495 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2496 OP_ALG_AAI_HMAC_PRECOMP, 2497 }, 2498 }, 2499 { 2500 .aead = { 2501 .base = { 2502 .cra_name = "echainiv(authenc(hmac(sha1)," 2503 "cbc(des3_ede)))", 2504 .cra_driver_name = "echainiv-authenc-" 2505 "hmac-sha1-" 2506 "cbc-des3_ede-caam", 2507 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2508 }, 2509 .setkey = aead_setkey, 2510 .setauthsize = aead_setauthsize, 2511 .encrypt = aead_encrypt, 2512 .decrypt = aead_decrypt, 2513 .ivsize = DES3_EDE_BLOCK_SIZE, 2514 .maxauthsize = SHA1_DIGEST_SIZE, 2515 }, 2516 .caam = { 2517 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2518 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2519 OP_ALG_AAI_HMAC_PRECOMP, 2520 .geniv = true, 2521 }, 2522 }, 2523 { 2524 .aead = { 2525 .base = { 2526 .cra_name = "authenc(hmac(sha224)," 2527 "cbc(des3_ede))", 2528 .cra_driver_name = "authenc-hmac-sha224-" 2529 "cbc-des3_ede-caam", 2530 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2531 }, 2532 .setkey = aead_setkey, 2533 .setauthsize = aead_setauthsize, 2534 .encrypt = aead_encrypt, 2535 .decrypt = aead_decrypt, 2536 .ivsize = DES3_EDE_BLOCK_SIZE, 2537 .maxauthsize = SHA224_DIGEST_SIZE, 2538 }, 2539 .caam = { 2540 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2541 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2542 OP_ALG_AAI_HMAC_PRECOMP, 2543 }, 2544 }, 2545 { 2546 .aead = { 2547 .base = { 2548 .cra_name = "echainiv(authenc(hmac(sha224)," 2549 "cbc(des3_ede)))", 2550 .cra_driver_name = "echainiv-authenc-" 2551 "hmac-sha224-" 2552 "cbc-des3_ede-caam", 2553 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2554 }, 2555 .setkey = aead_setkey, 2556 .setauthsize = aead_setauthsize, 2557 .encrypt = aead_encrypt, 2558 .decrypt = aead_decrypt, 2559 .ivsize = DES3_EDE_BLOCK_SIZE, 2560 .maxauthsize = SHA224_DIGEST_SIZE, 2561 }, 2562 .caam = { 2563 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2564 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2565 OP_ALG_AAI_HMAC_PRECOMP, 2566 .geniv = true, 2567 }, 2568 }, 2569 { 2570 .aead = { 2571 .base = { 2572 .cra_name = "authenc(hmac(sha256)," 2573 "cbc(des3_ede))", 2574 .cra_driver_name = "authenc-hmac-sha256-" 2575 "cbc-des3_ede-caam", 2576 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2577 }, 2578 .setkey = aead_setkey, 2579 .setauthsize = aead_setauthsize, 2580 .encrypt = aead_encrypt, 2581 .decrypt = aead_decrypt, 2582 .ivsize = DES3_EDE_BLOCK_SIZE, 2583 .maxauthsize = SHA256_DIGEST_SIZE, 2584 }, 2585 .caam = { 2586 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2587 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2588 OP_ALG_AAI_HMAC_PRECOMP, 2589 }, 2590 }, 2591 { 2592 .aead = { 2593 .base = { 2594 .cra_name = "echainiv(authenc(hmac(sha256)," 2595 "cbc(des3_ede)))", 2596 .cra_driver_name = "echainiv-authenc-" 2597 "hmac-sha256-" 2598 "cbc-des3_ede-caam", 2599 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2600 }, 2601 .setkey = aead_setkey, 2602 .setauthsize = aead_setauthsize, 2603 .encrypt = aead_encrypt, 2604 .decrypt = aead_decrypt, 2605 .ivsize = DES3_EDE_BLOCK_SIZE, 2606 .maxauthsize = SHA256_DIGEST_SIZE, 2607 }, 2608 .caam = { 2609 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2610 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2611 OP_ALG_AAI_HMAC_PRECOMP, 2612 .geniv = true, 2613 }, 2614 }, 2615 { 2616 .aead = { 2617 .base = { 2618 .cra_name = "authenc(hmac(sha384)," 2619 "cbc(des3_ede))", 2620 .cra_driver_name = "authenc-hmac-sha384-" 2621 "cbc-des3_ede-caam", 2622 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2623 }, 2624 .setkey = aead_setkey, 2625 .setauthsize = aead_setauthsize, 2626 .encrypt = aead_encrypt, 2627 .decrypt = aead_decrypt, 2628 .ivsize = DES3_EDE_BLOCK_SIZE, 2629 .maxauthsize = SHA384_DIGEST_SIZE, 2630 }, 2631 .caam = { 2632 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2633 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2634 OP_ALG_AAI_HMAC_PRECOMP, 2635 }, 2636 }, 2637 { 2638 .aead = { 2639 .base = { 2640 .cra_name = "echainiv(authenc(hmac(sha384)," 2641 "cbc(des3_ede)))", 2642 .cra_driver_name = "echainiv-authenc-" 2643 "hmac-sha384-" 2644 "cbc-des3_ede-caam", 2645 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2646 }, 2647 .setkey = aead_setkey, 2648 .setauthsize = aead_setauthsize, 2649 .encrypt = aead_encrypt, 2650 .decrypt = aead_decrypt, 2651 .ivsize = DES3_EDE_BLOCK_SIZE, 2652 .maxauthsize = SHA384_DIGEST_SIZE, 2653 }, 2654 .caam = { 2655 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2656 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2657 OP_ALG_AAI_HMAC_PRECOMP, 2658 .geniv = true, 2659 }, 2660 }, 2661 { 2662 .aead = { 2663 .base = { 2664 .cra_name = "authenc(hmac(sha512)," 2665 "cbc(des3_ede))", 2666 .cra_driver_name = "authenc-hmac-sha512-" 2667 "cbc-des3_ede-caam", 2668 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2669 }, 2670 .setkey = aead_setkey, 2671 .setauthsize = aead_setauthsize, 2672 .encrypt = aead_encrypt, 2673 .decrypt = aead_decrypt, 2674 .ivsize = DES3_EDE_BLOCK_SIZE, 2675 .maxauthsize = SHA512_DIGEST_SIZE, 2676 }, 2677 .caam = { 2678 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2679 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2680 OP_ALG_AAI_HMAC_PRECOMP, 2681 }, 2682 }, 2683 { 2684 .aead = { 2685 .base = { 2686 .cra_name = "echainiv(authenc(hmac(sha512)," 2687 "cbc(des3_ede)))", 2688 .cra_driver_name = "echainiv-authenc-" 2689 "hmac-sha512-" 2690 "cbc-des3_ede-caam", 2691 .cra_blocksize = DES3_EDE_BLOCK_SIZE, 2692 }, 2693 .setkey = aead_setkey, 2694 .setauthsize = aead_setauthsize, 2695 .encrypt = aead_encrypt, 2696 .decrypt = aead_decrypt, 2697 .ivsize = DES3_EDE_BLOCK_SIZE, 2698 .maxauthsize = SHA512_DIGEST_SIZE, 2699 }, 2700 .caam = { 2701 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC, 2702 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2703 OP_ALG_AAI_HMAC_PRECOMP, 2704 .geniv = true, 2705 }, 2706 }, 2707 { 2708 .aead = { 2709 .base = { 2710 .cra_name = "authenc(hmac(md5),cbc(des))", 2711 .cra_driver_name = "authenc-hmac-md5-" 2712 "cbc-des-caam", 2713 .cra_blocksize = DES_BLOCK_SIZE, 2714 }, 2715 .setkey = aead_setkey, 2716 .setauthsize = aead_setauthsize, 2717 .encrypt = aead_encrypt, 2718 .decrypt = aead_decrypt, 2719 .ivsize = DES_BLOCK_SIZE, 2720 .maxauthsize = MD5_DIGEST_SIZE, 2721 }, 2722 .caam = { 2723 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2724 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2725 OP_ALG_AAI_HMAC_PRECOMP, 2726 }, 2727 }, 2728 { 2729 .aead = { 2730 .base = { 2731 .cra_name = "echainiv(authenc(hmac(md5)," 2732 "cbc(des)))", 2733 .cra_driver_name = "echainiv-authenc-hmac-md5-" 2734 "cbc-des-caam", 2735 .cra_blocksize = DES_BLOCK_SIZE, 2736 }, 2737 .setkey = aead_setkey, 2738 .setauthsize = aead_setauthsize, 2739 .encrypt = aead_encrypt, 2740 .decrypt = aead_decrypt, 2741 .ivsize = DES_BLOCK_SIZE, 2742 .maxauthsize = MD5_DIGEST_SIZE, 2743 }, 2744 .caam = { 2745 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2746 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2747 OP_ALG_AAI_HMAC_PRECOMP, 2748 .geniv = true, 2749 }, 2750 }, 2751 { 2752 .aead = { 2753 .base = { 2754 .cra_name = "authenc(hmac(sha1),cbc(des))", 2755 .cra_driver_name = "authenc-hmac-sha1-" 2756 "cbc-des-caam", 2757 .cra_blocksize = DES_BLOCK_SIZE, 2758 }, 2759 .setkey = aead_setkey, 2760 .setauthsize = aead_setauthsize, 2761 .encrypt = aead_encrypt, 2762 .decrypt = aead_decrypt, 2763 .ivsize = DES_BLOCK_SIZE, 2764 .maxauthsize = SHA1_DIGEST_SIZE, 2765 }, 2766 .caam = { 2767 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2768 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2769 OP_ALG_AAI_HMAC_PRECOMP, 2770 }, 2771 }, 2772 { 2773 .aead = { 2774 .base = { 2775 .cra_name = "echainiv(authenc(hmac(sha1)," 2776 "cbc(des)))", 2777 .cra_driver_name = "echainiv-authenc-" 2778 "hmac-sha1-cbc-des-caam", 2779 .cra_blocksize = DES_BLOCK_SIZE, 2780 }, 2781 .setkey = aead_setkey, 2782 .setauthsize = aead_setauthsize, 2783 .encrypt = aead_encrypt, 2784 .decrypt = aead_decrypt, 2785 .ivsize = DES_BLOCK_SIZE, 2786 .maxauthsize = SHA1_DIGEST_SIZE, 2787 }, 2788 .caam = { 2789 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2790 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 2791 OP_ALG_AAI_HMAC_PRECOMP, 2792 .geniv = true, 2793 }, 2794 }, 2795 { 2796 .aead = { 2797 .base = { 2798 .cra_name = "authenc(hmac(sha224),cbc(des))", 2799 .cra_driver_name = "authenc-hmac-sha224-" 2800 "cbc-des-caam", 2801 .cra_blocksize = DES_BLOCK_SIZE, 2802 }, 2803 .setkey = aead_setkey, 2804 .setauthsize = aead_setauthsize, 2805 .encrypt = aead_encrypt, 2806 .decrypt = aead_decrypt, 2807 .ivsize = DES_BLOCK_SIZE, 2808 .maxauthsize = SHA224_DIGEST_SIZE, 2809 }, 2810 .caam = { 2811 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2812 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2813 OP_ALG_AAI_HMAC_PRECOMP, 2814 }, 2815 }, 2816 { 2817 .aead = { 2818 .base = { 2819 .cra_name = "echainiv(authenc(hmac(sha224)," 2820 "cbc(des)))", 2821 .cra_driver_name = "echainiv-authenc-" 2822 "hmac-sha224-cbc-des-caam", 2823 .cra_blocksize = DES_BLOCK_SIZE, 2824 }, 2825 .setkey = aead_setkey, 2826 .setauthsize = aead_setauthsize, 2827 .encrypt = aead_encrypt, 2828 .decrypt = aead_decrypt, 2829 .ivsize = DES_BLOCK_SIZE, 2830 .maxauthsize = SHA224_DIGEST_SIZE, 2831 }, 2832 .caam = { 2833 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2834 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 2835 OP_ALG_AAI_HMAC_PRECOMP, 2836 .geniv = true, 2837 }, 2838 }, 2839 { 2840 .aead = { 2841 .base = { 2842 .cra_name = "authenc(hmac(sha256),cbc(des))", 2843 .cra_driver_name = "authenc-hmac-sha256-" 2844 "cbc-des-caam", 2845 .cra_blocksize = DES_BLOCK_SIZE, 2846 }, 2847 .setkey = aead_setkey, 2848 .setauthsize = aead_setauthsize, 2849 .encrypt = aead_encrypt, 2850 .decrypt = aead_decrypt, 2851 .ivsize = DES_BLOCK_SIZE, 2852 .maxauthsize = SHA256_DIGEST_SIZE, 2853 }, 2854 .caam = { 2855 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2856 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2857 OP_ALG_AAI_HMAC_PRECOMP, 2858 }, 2859 }, 2860 { 2861 .aead = { 2862 .base = { 2863 .cra_name = "echainiv(authenc(hmac(sha256)," 2864 "cbc(des)))", 2865 .cra_driver_name = "echainiv-authenc-" 2866 "hmac-sha256-cbc-des-caam", 2867 .cra_blocksize = DES_BLOCK_SIZE, 2868 }, 2869 .setkey = aead_setkey, 2870 .setauthsize = aead_setauthsize, 2871 .encrypt = aead_encrypt, 2872 .decrypt = aead_decrypt, 2873 .ivsize = DES_BLOCK_SIZE, 2874 .maxauthsize = SHA256_DIGEST_SIZE, 2875 }, 2876 .caam = { 2877 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2878 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 2879 OP_ALG_AAI_HMAC_PRECOMP, 2880 .geniv = true, 2881 }, 2882 }, 2883 { 2884 .aead = { 2885 .base = { 2886 .cra_name = "authenc(hmac(sha384),cbc(des))", 2887 .cra_driver_name = "authenc-hmac-sha384-" 2888 "cbc-des-caam", 2889 .cra_blocksize = DES_BLOCK_SIZE, 2890 }, 2891 .setkey = aead_setkey, 2892 .setauthsize = aead_setauthsize, 2893 .encrypt = aead_encrypt, 2894 .decrypt = aead_decrypt, 2895 .ivsize = DES_BLOCK_SIZE, 2896 .maxauthsize = SHA384_DIGEST_SIZE, 2897 }, 2898 .caam = { 2899 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2900 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2901 OP_ALG_AAI_HMAC_PRECOMP, 2902 }, 2903 }, 2904 { 2905 .aead = { 2906 .base = { 2907 .cra_name = "echainiv(authenc(hmac(sha384)," 2908 "cbc(des)))", 2909 .cra_driver_name = "echainiv-authenc-" 2910 "hmac-sha384-cbc-des-caam", 2911 .cra_blocksize = DES_BLOCK_SIZE, 2912 }, 2913 .setkey = aead_setkey, 2914 .setauthsize = aead_setauthsize, 2915 .encrypt = aead_encrypt, 2916 .decrypt = aead_decrypt, 2917 .ivsize = DES_BLOCK_SIZE, 2918 .maxauthsize = SHA384_DIGEST_SIZE, 2919 }, 2920 .caam = { 2921 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2922 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 2923 OP_ALG_AAI_HMAC_PRECOMP, 2924 .geniv = true, 2925 }, 2926 }, 2927 { 2928 .aead = { 2929 .base = { 2930 .cra_name = "authenc(hmac(sha512),cbc(des))", 2931 .cra_driver_name = "authenc-hmac-sha512-" 2932 "cbc-des-caam", 2933 .cra_blocksize = DES_BLOCK_SIZE, 2934 }, 2935 .setkey = aead_setkey, 2936 .setauthsize = aead_setauthsize, 2937 .encrypt = aead_encrypt, 2938 .decrypt = aead_decrypt, 2939 .ivsize = DES_BLOCK_SIZE, 2940 .maxauthsize = SHA512_DIGEST_SIZE, 2941 }, 2942 .caam = { 2943 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2944 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2945 OP_ALG_AAI_HMAC_PRECOMP, 2946 }, 2947 }, 2948 { 2949 .aead = { 2950 .base = { 2951 .cra_name = "echainiv(authenc(hmac(sha512)," 2952 "cbc(des)))", 2953 .cra_driver_name = "echainiv-authenc-" 2954 "hmac-sha512-cbc-des-caam", 2955 .cra_blocksize = DES_BLOCK_SIZE, 2956 }, 2957 .setkey = aead_setkey, 2958 .setauthsize = aead_setauthsize, 2959 .encrypt = aead_encrypt, 2960 .decrypt = aead_decrypt, 2961 .ivsize = DES_BLOCK_SIZE, 2962 .maxauthsize = SHA512_DIGEST_SIZE, 2963 }, 2964 .caam = { 2965 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC, 2966 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 2967 OP_ALG_AAI_HMAC_PRECOMP, 2968 .geniv = true, 2969 }, 2970 }, 2971 { 2972 .aead = { 2973 .base = { 2974 .cra_name = "authenc(hmac(md5)," 2975 "rfc3686(ctr(aes)))", 2976 .cra_driver_name = "authenc-hmac-md5-" 2977 "rfc3686-ctr-aes-caam", 2978 .cra_blocksize = 1, 2979 }, 2980 .setkey = aead_setkey, 2981 .setauthsize = aead_setauthsize, 2982 .encrypt = aead_encrypt, 2983 .decrypt = aead_decrypt, 2984 .ivsize = CTR_RFC3686_IV_SIZE, 2985 .maxauthsize = MD5_DIGEST_SIZE, 2986 }, 2987 .caam = { 2988 .class1_alg_type = OP_ALG_ALGSEL_AES | 2989 OP_ALG_AAI_CTR_MOD128, 2990 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 2991 OP_ALG_AAI_HMAC_PRECOMP, 2992 .rfc3686 = true, 2993 }, 2994 }, 2995 { 2996 .aead = { 2997 .base = { 2998 .cra_name = "seqiv(authenc(" 2999 "hmac(md5),rfc3686(ctr(aes))))", 3000 .cra_driver_name = "seqiv-authenc-hmac-md5-" 3001 "rfc3686-ctr-aes-caam", 3002 .cra_blocksize = 1, 3003 }, 3004 .setkey = aead_setkey, 3005 .setauthsize = aead_setauthsize, 3006 .encrypt = aead_encrypt, 3007 .decrypt = aead_decrypt, 3008 .ivsize = CTR_RFC3686_IV_SIZE, 3009 .maxauthsize = MD5_DIGEST_SIZE, 3010 }, 3011 .caam = { 3012 .class1_alg_type = OP_ALG_ALGSEL_AES | 3013 OP_ALG_AAI_CTR_MOD128, 3014 .class2_alg_type = OP_ALG_ALGSEL_MD5 | 3015 OP_ALG_AAI_HMAC_PRECOMP, 3016 .rfc3686 = true, 3017 .geniv = true, 3018 }, 3019 }, 3020 { 3021 .aead = { 3022 .base = { 3023 .cra_name = "authenc(hmac(sha1)," 3024 "rfc3686(ctr(aes)))", 3025 .cra_driver_name = "authenc-hmac-sha1-" 3026 "rfc3686-ctr-aes-caam", 3027 .cra_blocksize = 1, 3028 }, 3029 .setkey = aead_setkey, 3030 .setauthsize = aead_setauthsize, 3031 .encrypt = aead_encrypt, 3032 .decrypt = aead_decrypt, 3033 .ivsize = CTR_RFC3686_IV_SIZE, 3034 .maxauthsize = SHA1_DIGEST_SIZE, 3035 }, 3036 .caam = { 3037 .class1_alg_type = OP_ALG_ALGSEL_AES | 3038 OP_ALG_AAI_CTR_MOD128, 3039 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3040 OP_ALG_AAI_HMAC_PRECOMP, 3041 .rfc3686 = true, 3042 }, 3043 }, 3044 { 3045 .aead = { 3046 .base = { 3047 .cra_name = "seqiv(authenc(" 3048 "hmac(sha1),rfc3686(ctr(aes))))", 3049 .cra_driver_name = "seqiv-authenc-hmac-sha1-" 3050 "rfc3686-ctr-aes-caam", 3051 .cra_blocksize = 1, 3052 }, 3053 .setkey = aead_setkey, 3054 .setauthsize = aead_setauthsize, 3055 .encrypt = aead_encrypt, 3056 .decrypt = aead_decrypt, 3057 .ivsize = CTR_RFC3686_IV_SIZE, 3058 .maxauthsize = SHA1_DIGEST_SIZE, 3059 }, 3060 .caam = { 3061 .class1_alg_type = OP_ALG_ALGSEL_AES | 3062 OP_ALG_AAI_CTR_MOD128, 3063 .class2_alg_type = OP_ALG_ALGSEL_SHA1 | 3064 OP_ALG_AAI_HMAC_PRECOMP, 3065 .rfc3686 = true, 3066 .geniv = true, 3067 }, 3068 }, 3069 { 3070 .aead = { 3071 .base = { 3072 .cra_name = "authenc(hmac(sha224)," 3073 "rfc3686(ctr(aes)))", 3074 .cra_driver_name = "authenc-hmac-sha224-" 3075 "rfc3686-ctr-aes-caam", 3076 .cra_blocksize = 1, 3077 }, 3078 .setkey = aead_setkey, 3079 .setauthsize = aead_setauthsize, 3080 .encrypt = aead_encrypt, 3081 .decrypt = aead_decrypt, 3082 .ivsize = CTR_RFC3686_IV_SIZE, 3083 .maxauthsize = SHA224_DIGEST_SIZE, 3084 }, 3085 .caam = { 3086 .class1_alg_type = OP_ALG_ALGSEL_AES | 3087 OP_ALG_AAI_CTR_MOD128, 3088 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3089 OP_ALG_AAI_HMAC_PRECOMP, 3090 .rfc3686 = true, 3091 }, 3092 }, 3093 { 3094 .aead = { 3095 .base = { 3096 .cra_name = "seqiv(authenc(" 3097 "hmac(sha224),rfc3686(ctr(aes))))", 3098 .cra_driver_name = "seqiv-authenc-hmac-sha224-" 3099 "rfc3686-ctr-aes-caam", 3100 .cra_blocksize = 1, 3101 }, 3102 .setkey = aead_setkey, 3103 .setauthsize = aead_setauthsize, 3104 .encrypt = aead_encrypt, 3105 .decrypt = aead_decrypt, 3106 .ivsize = CTR_RFC3686_IV_SIZE, 3107 .maxauthsize = SHA224_DIGEST_SIZE, 3108 }, 3109 .caam = { 3110 .class1_alg_type = OP_ALG_ALGSEL_AES | 3111 OP_ALG_AAI_CTR_MOD128, 3112 .class2_alg_type = OP_ALG_ALGSEL_SHA224 | 3113 OP_ALG_AAI_HMAC_PRECOMP, 3114 .rfc3686 = true, 3115 .geniv = true, 3116 }, 3117 }, 3118 { 3119 .aead = { 3120 .base = { 3121 .cra_name = "authenc(hmac(sha256)," 3122 "rfc3686(ctr(aes)))", 3123 .cra_driver_name = "authenc-hmac-sha256-" 3124 "rfc3686-ctr-aes-caam", 3125 .cra_blocksize = 1, 3126 }, 3127 .setkey = aead_setkey, 3128 .setauthsize = aead_setauthsize, 3129 .encrypt = aead_encrypt, 3130 .decrypt = aead_decrypt, 3131 .ivsize = CTR_RFC3686_IV_SIZE, 3132 .maxauthsize = SHA256_DIGEST_SIZE, 3133 }, 3134 .caam = { 3135 .class1_alg_type = OP_ALG_ALGSEL_AES | 3136 OP_ALG_AAI_CTR_MOD128, 3137 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3138 OP_ALG_AAI_HMAC_PRECOMP, 3139 .rfc3686 = true, 3140 }, 3141 }, 3142 { 3143 .aead = { 3144 .base = { 3145 .cra_name = "seqiv(authenc(hmac(sha256)," 3146 "rfc3686(ctr(aes))))", 3147 .cra_driver_name = "seqiv-authenc-hmac-sha256-" 3148 "rfc3686-ctr-aes-caam", 3149 .cra_blocksize = 1, 3150 }, 3151 .setkey = aead_setkey, 3152 .setauthsize = aead_setauthsize, 3153 .encrypt = aead_encrypt, 3154 .decrypt = aead_decrypt, 3155 .ivsize = CTR_RFC3686_IV_SIZE, 3156 .maxauthsize = SHA256_DIGEST_SIZE, 3157 }, 3158 .caam = { 3159 .class1_alg_type = OP_ALG_ALGSEL_AES | 3160 OP_ALG_AAI_CTR_MOD128, 3161 .class2_alg_type = OP_ALG_ALGSEL_SHA256 | 3162 OP_ALG_AAI_HMAC_PRECOMP, 3163 .rfc3686 = true, 3164 .geniv = true, 3165 }, 3166 }, 3167 { 3168 .aead = { 3169 .base = { 3170 .cra_name = "authenc(hmac(sha384)," 3171 "rfc3686(ctr(aes)))", 3172 .cra_driver_name = "authenc-hmac-sha384-" 3173 "rfc3686-ctr-aes-caam", 3174 .cra_blocksize = 1, 3175 }, 3176 .setkey = aead_setkey, 3177 .setauthsize = aead_setauthsize, 3178 .encrypt = aead_encrypt, 3179 .decrypt = aead_decrypt, 3180 .ivsize = CTR_RFC3686_IV_SIZE, 3181 .maxauthsize = SHA384_DIGEST_SIZE, 3182 }, 3183 .caam = { 3184 .class1_alg_type = OP_ALG_ALGSEL_AES | 3185 OP_ALG_AAI_CTR_MOD128, 3186 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3187 OP_ALG_AAI_HMAC_PRECOMP, 3188 .rfc3686 = true, 3189 }, 3190 }, 3191 { 3192 .aead = { 3193 .base = { 3194 .cra_name = "seqiv(authenc(hmac(sha384)," 3195 "rfc3686(ctr(aes))))", 3196 .cra_driver_name = "seqiv-authenc-hmac-sha384-" 3197 "rfc3686-ctr-aes-caam", 3198 .cra_blocksize = 1, 3199 }, 3200 .setkey = aead_setkey, 3201 .setauthsize = aead_setauthsize, 3202 .encrypt = aead_encrypt, 3203 .decrypt = aead_decrypt, 3204 .ivsize = CTR_RFC3686_IV_SIZE, 3205 .maxauthsize = SHA384_DIGEST_SIZE, 3206 }, 3207 .caam = { 3208 .class1_alg_type = OP_ALG_ALGSEL_AES | 3209 OP_ALG_AAI_CTR_MOD128, 3210 .class2_alg_type = OP_ALG_ALGSEL_SHA384 | 3211 OP_ALG_AAI_HMAC_PRECOMP, 3212 .rfc3686 = true, 3213 .geniv = true, 3214 }, 3215 }, 3216 { 3217 .aead = { 3218 .base = { 3219 .cra_name = "authenc(hmac(sha512)," 3220 "rfc3686(ctr(aes)))", 3221 .cra_driver_name = "authenc-hmac-sha512-" 3222 "rfc3686-ctr-aes-caam", 3223 .cra_blocksize = 1, 3224 }, 3225 .setkey = aead_setkey, 3226 .setauthsize = aead_setauthsize, 3227 .encrypt = aead_encrypt, 3228 .decrypt = aead_decrypt, 3229 .ivsize = CTR_RFC3686_IV_SIZE, 3230 .maxauthsize = SHA512_DIGEST_SIZE, 3231 }, 3232 .caam = { 3233 .class1_alg_type = OP_ALG_ALGSEL_AES | 3234 OP_ALG_AAI_CTR_MOD128, 3235 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3236 OP_ALG_AAI_HMAC_PRECOMP, 3237 .rfc3686 = true, 3238 }, 3239 }, 3240 { 3241 .aead = { 3242 .base = { 3243 .cra_name = "seqiv(authenc(hmac(sha512)," 3244 "rfc3686(ctr(aes))))", 3245 .cra_driver_name = "seqiv-authenc-hmac-sha512-" 3246 "rfc3686-ctr-aes-caam", 3247 .cra_blocksize = 1, 3248 }, 3249 .setkey = aead_setkey, 3250 .setauthsize = aead_setauthsize, 3251 .encrypt = aead_encrypt, 3252 .decrypt = aead_decrypt, 3253 .ivsize = CTR_RFC3686_IV_SIZE, 3254 .maxauthsize = SHA512_DIGEST_SIZE, 3255 }, 3256 .caam = { 3257 .class1_alg_type = OP_ALG_ALGSEL_AES | 3258 OP_ALG_AAI_CTR_MOD128, 3259 .class2_alg_type = OP_ALG_ALGSEL_SHA512 | 3260 OP_ALG_AAI_HMAC_PRECOMP, 3261 .rfc3686 = true, 3262 .geniv = true, 3263 }, 3264 }, 3265 }; 3266 3267 struct caam_crypto_alg { 3268 struct crypto_alg crypto_alg; 3269 struct list_head entry; 3270 struct caam_alg_entry caam; 3271 }; 3272 3273 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam, 3274 bool uses_dkp) 3275 { 3276 dma_addr_t dma_addr; 3277 struct caam_drv_private *priv; 3278 3279 ctx->jrdev = caam_jr_alloc(); 3280 if (IS_ERR(ctx->jrdev)) { 3281 pr_err("Job Ring Device allocation for transform failed\n"); 3282 return PTR_ERR(ctx->jrdev); 3283 } 3284 3285 priv = dev_get_drvdata(ctx->jrdev->parent); 3286 if (priv->era >= 6 && uses_dkp) 3287 ctx->dir = DMA_BIDIRECTIONAL; 3288 else 3289 ctx->dir = DMA_TO_DEVICE; 3290 3291 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, 3292 offsetof(struct caam_ctx, 3293 sh_desc_enc_dma), 3294 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3295 if (dma_mapping_error(ctx->jrdev, dma_addr)) { 3296 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); 3297 caam_jr_free(ctx->jrdev); 3298 return -ENOMEM; 3299 } 3300 3301 ctx->sh_desc_enc_dma = dma_addr; 3302 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx, 3303 sh_desc_dec); 3304 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx, 3305 sh_desc_givenc); 3306 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key); 3307 3308 /* copy descriptor header template value */ 3309 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type; 3310 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type; 3311 3312 return 0; 3313 } 3314 3315 static int caam_cra_init(struct crypto_tfm *tfm) 3316 { 3317 struct crypto_alg *alg = tfm->__crt_alg; 3318 struct caam_crypto_alg *caam_alg = 3319 container_of(alg, struct caam_crypto_alg, crypto_alg); 3320 struct caam_ctx *ctx = crypto_tfm_ctx(tfm); 3321 3322 return caam_init_common(ctx, &caam_alg->caam, false); 3323 } 3324 3325 static int caam_aead_init(struct crypto_aead *tfm) 3326 { 3327 struct aead_alg *alg = crypto_aead_alg(tfm); 3328 struct caam_aead_alg *caam_alg = 3329 container_of(alg, struct caam_aead_alg, aead); 3330 struct caam_ctx *ctx = crypto_aead_ctx(tfm); 3331 3332 return caam_init_common(ctx, &caam_alg->caam, 3333 alg->setkey == aead_setkey); 3334 } 3335 3336 static void caam_exit_common(struct caam_ctx *ctx) 3337 { 3338 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, 3339 offsetof(struct caam_ctx, sh_desc_enc_dma), 3340 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC); 3341 caam_jr_free(ctx->jrdev); 3342 } 3343 3344 static void caam_cra_exit(struct crypto_tfm *tfm) 3345 { 3346 caam_exit_common(crypto_tfm_ctx(tfm)); 3347 } 3348 3349 static void caam_aead_exit(struct crypto_aead *tfm) 3350 { 3351 caam_exit_common(crypto_aead_ctx(tfm)); 3352 } 3353 3354 static void __exit caam_algapi_exit(void) 3355 { 3356 3357 struct caam_crypto_alg *t_alg, *n; 3358 int i; 3359 3360 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3361 struct caam_aead_alg *t_alg = driver_aeads + i; 3362 3363 if (t_alg->registered) 3364 crypto_unregister_aead(&t_alg->aead); 3365 } 3366 3367 if (!alg_list.next) 3368 return; 3369 3370 list_for_each_entry_safe(t_alg, n, &alg_list, entry) { 3371 crypto_unregister_alg(&t_alg->crypto_alg); 3372 list_del(&t_alg->entry); 3373 kfree(t_alg); 3374 } 3375 } 3376 3377 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template 3378 *template) 3379 { 3380 struct caam_crypto_alg *t_alg; 3381 struct crypto_alg *alg; 3382 3383 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL); 3384 if (!t_alg) { 3385 pr_err("failed to allocate t_alg\n"); 3386 return ERR_PTR(-ENOMEM); 3387 } 3388 3389 alg = &t_alg->crypto_alg; 3390 3391 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name); 3392 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", 3393 template->driver_name); 3394 alg->cra_module = THIS_MODULE; 3395 alg->cra_init = caam_cra_init; 3396 alg->cra_exit = caam_cra_exit; 3397 alg->cra_priority = CAAM_CRA_PRIORITY; 3398 alg->cra_blocksize = template->blocksize; 3399 alg->cra_alignmask = 0; 3400 alg->cra_ctxsize = sizeof(struct caam_ctx); 3401 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY | 3402 template->type; 3403 switch (template->type) { 3404 case CRYPTO_ALG_TYPE_GIVCIPHER: 3405 alg->cra_type = &crypto_givcipher_type; 3406 alg->cra_ablkcipher = template->template_ablkcipher; 3407 break; 3408 case CRYPTO_ALG_TYPE_ABLKCIPHER: 3409 alg->cra_type = &crypto_ablkcipher_type; 3410 alg->cra_ablkcipher = template->template_ablkcipher; 3411 break; 3412 } 3413 3414 t_alg->caam.class1_alg_type = template->class1_alg_type; 3415 t_alg->caam.class2_alg_type = template->class2_alg_type; 3416 3417 return t_alg; 3418 } 3419 3420 static void caam_aead_alg_init(struct caam_aead_alg *t_alg) 3421 { 3422 struct aead_alg *alg = &t_alg->aead; 3423 3424 alg->base.cra_module = THIS_MODULE; 3425 alg->base.cra_priority = CAAM_CRA_PRIORITY; 3426 alg->base.cra_ctxsize = sizeof(struct caam_ctx); 3427 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY; 3428 3429 alg->init = caam_aead_init; 3430 alg->exit = caam_aead_exit; 3431 } 3432 3433 static int __init caam_algapi_init(void) 3434 { 3435 struct device_node *dev_node; 3436 struct platform_device *pdev; 3437 struct device *ctrldev; 3438 struct caam_drv_private *priv; 3439 int i = 0, err = 0; 3440 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst; 3441 unsigned int md_limit = SHA512_DIGEST_SIZE; 3442 bool registered = false; 3443 3444 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0"); 3445 if (!dev_node) { 3446 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0"); 3447 if (!dev_node) 3448 return -ENODEV; 3449 } 3450 3451 pdev = of_find_device_by_node(dev_node); 3452 if (!pdev) { 3453 of_node_put(dev_node); 3454 return -ENODEV; 3455 } 3456 3457 ctrldev = &pdev->dev; 3458 priv = dev_get_drvdata(ctrldev); 3459 of_node_put(dev_node); 3460 3461 /* 3462 * If priv is NULL, it's probably because the caam driver wasn't 3463 * properly initialized (e.g. RNG4 init failed). Thus, bail out here. 3464 */ 3465 if (!priv) 3466 return -ENODEV; 3467 3468 3469 INIT_LIST_HEAD(&alg_list); 3470 3471 /* 3472 * Register crypto algorithms the device supports. 3473 * First, detect presence and attributes of DES, AES, and MD blocks. 3474 */ 3475 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls); 3476 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls); 3477 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT; 3478 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT; 3479 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT; 3480 3481 /* If MD is present, limit digest size based on LP256 */ 3482 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256)) 3483 md_limit = SHA256_DIGEST_SIZE; 3484 3485 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) { 3486 struct caam_crypto_alg *t_alg; 3487 struct caam_alg_template *alg = driver_algs + i; 3488 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK; 3489 3490 /* Skip DES algorithms if not supported by device */ 3491 if (!des_inst && 3492 ((alg_sel == OP_ALG_ALGSEL_3DES) || 3493 (alg_sel == OP_ALG_ALGSEL_DES))) 3494 continue; 3495 3496 /* Skip AES algorithms if not supported by device */ 3497 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES)) 3498 continue; 3499 3500 /* 3501 * Check support for AES modes not available 3502 * on LP devices. 3503 */ 3504 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3505 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) == 3506 OP_ALG_AAI_XTS) 3507 continue; 3508 3509 t_alg = caam_alg_alloc(alg); 3510 if (IS_ERR(t_alg)) { 3511 err = PTR_ERR(t_alg); 3512 pr_warn("%s alg allocation failed\n", alg->driver_name); 3513 continue; 3514 } 3515 3516 err = crypto_register_alg(&t_alg->crypto_alg); 3517 if (err) { 3518 pr_warn("%s alg registration failed\n", 3519 t_alg->crypto_alg.cra_driver_name); 3520 kfree(t_alg); 3521 continue; 3522 } 3523 3524 list_add_tail(&t_alg->entry, &alg_list); 3525 registered = true; 3526 } 3527 3528 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) { 3529 struct caam_aead_alg *t_alg = driver_aeads + i; 3530 u32 c1_alg_sel = t_alg->caam.class1_alg_type & 3531 OP_ALG_ALGSEL_MASK; 3532 u32 c2_alg_sel = t_alg->caam.class2_alg_type & 3533 OP_ALG_ALGSEL_MASK; 3534 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK; 3535 3536 /* Skip DES algorithms if not supported by device */ 3537 if (!des_inst && 3538 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) || 3539 (c1_alg_sel == OP_ALG_ALGSEL_DES))) 3540 continue; 3541 3542 /* Skip AES algorithms if not supported by device */ 3543 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES)) 3544 continue; 3545 3546 /* 3547 * Check support for AES algorithms not available 3548 * on LP devices. 3549 */ 3550 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP) 3551 if (alg_aai == OP_ALG_AAI_GCM) 3552 continue; 3553 3554 /* 3555 * Skip algorithms requiring message digests 3556 * if MD or MD size is not supported by device. 3557 */ 3558 if (c2_alg_sel && 3559 (!md_inst || (t_alg->aead.maxauthsize > md_limit))) 3560 continue; 3561 3562 caam_aead_alg_init(t_alg); 3563 3564 err = crypto_register_aead(&t_alg->aead); 3565 if (err) { 3566 pr_warn("%s alg registration failed\n", 3567 t_alg->aead.base.cra_driver_name); 3568 continue; 3569 } 3570 3571 t_alg->registered = true; 3572 registered = true; 3573 } 3574 3575 if (registered) 3576 pr_info("caam algorithms registered in /proc/crypto\n"); 3577 3578 return err; 3579 } 3580 3581 module_init(caam_algapi_init); 3582 module_exit(caam_algapi_exit); 3583 3584 MODULE_LICENSE("GPL"); 3585 MODULE_DESCRIPTION("FSL CAAM support for crypto API"); 3586 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC"); 3587