1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Software async crypto daemon. 4 * 5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 6 * 7 * Added AEAD support to cryptd. 8 * Authors: Tadeusz Struk (tadeusz.struk@intel.com) 9 * Adrian Hoban <adrian.hoban@intel.com> 10 * Gabriele Paoloni <gabriele.paoloni@intel.com> 11 * Aidan O'Mahony (aidan.o.mahony@intel.com) 12 * Copyright (c) 2010, Intel Corporation. 13 */ 14 15 #include <crypto/internal/hash.h> 16 #include <crypto/internal/aead.h> 17 #include <crypto/internal/skcipher.h> 18 #include <crypto/cryptd.h> 19 #include <linux/refcount.h> 20 #include <linux/err.h> 21 #include <linux/init.h> 22 #include <linux/kernel.h> 23 #include <linux/list.h> 24 #include <linux/module.h> 25 #include <linux/scatterlist.h> 26 #include <linux/sched.h> 27 #include <linux/slab.h> 28 #include <linux/workqueue.h> 29 30 static unsigned int cryptd_max_cpu_qlen = 1000; 31 module_param(cryptd_max_cpu_qlen, uint, 0); 32 MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth"); 33 34 static struct workqueue_struct *cryptd_wq; 35 36 struct cryptd_cpu_queue { 37 struct crypto_queue queue; 38 struct work_struct work; 39 }; 40 41 struct cryptd_queue { 42 /* 43 * Protected by disabling BH to allow enqueueing from softinterrupt and 44 * dequeuing from kworker (cryptd_queue_worker()). 45 */ 46 struct cryptd_cpu_queue __percpu *cpu_queue; 47 }; 48 49 struct cryptd_instance_ctx { 50 struct crypto_spawn spawn; 51 struct cryptd_queue *queue; 52 }; 53 54 struct skcipherd_instance_ctx { 55 struct crypto_skcipher_spawn spawn; 56 struct cryptd_queue *queue; 57 }; 58 59 struct hashd_instance_ctx { 60 struct crypto_shash_spawn spawn; 61 struct cryptd_queue *queue; 62 }; 63 64 struct aead_instance_ctx { 65 struct crypto_aead_spawn aead_spawn; 66 struct cryptd_queue *queue; 67 }; 68 69 struct cryptd_skcipher_ctx { 70 refcount_t refcnt; 71 struct crypto_skcipher *child; 72 }; 73 74 struct cryptd_skcipher_request_ctx { 75 struct skcipher_request req; 76 }; 77 78 struct cryptd_hash_ctx { 79 refcount_t refcnt; 80 struct crypto_shash *child; 81 }; 82 83 struct cryptd_hash_request_ctx { 84 crypto_completion_t complete; 85 void *data; 86 struct shash_desc desc; 87 }; 88 89 struct cryptd_aead_ctx { 90 refcount_t refcnt; 91 struct crypto_aead *child; 92 }; 93 94 struct cryptd_aead_request_ctx { 95 struct aead_request req; 96 }; 97 98 static void cryptd_queue_worker(struct work_struct *work); 99 100 static int cryptd_init_queue(struct cryptd_queue *queue, 101 unsigned int max_cpu_qlen) 102 { 103 int cpu; 104 struct cryptd_cpu_queue *cpu_queue; 105 106 queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue); 107 if (!queue->cpu_queue) 108 return -ENOMEM; 109 for_each_possible_cpu(cpu) { 110 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); 111 crypto_init_queue(&cpu_queue->queue, max_cpu_qlen); 112 INIT_WORK(&cpu_queue->work, cryptd_queue_worker); 113 } 114 pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen); 115 return 0; 116 } 117 118 static void cryptd_fini_queue(struct cryptd_queue *queue) 119 { 120 int cpu; 121 struct cryptd_cpu_queue *cpu_queue; 122 123 for_each_possible_cpu(cpu) { 124 cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); 125 BUG_ON(cpu_queue->queue.qlen); 126 } 127 free_percpu(queue->cpu_queue); 128 } 129 130 static int cryptd_enqueue_request(struct cryptd_queue *queue, 131 struct crypto_async_request *request) 132 { 133 int err; 134 struct cryptd_cpu_queue *cpu_queue; 135 refcount_t *refcnt; 136 137 local_bh_disable(); 138 cpu_queue = this_cpu_ptr(queue->cpu_queue); 139 err = crypto_enqueue_request(&cpu_queue->queue, request); 140 141 refcnt = crypto_tfm_ctx(request->tfm); 142 143 if (err == -ENOSPC) 144 goto out; 145 146 queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work); 147 148 if (!refcount_read(refcnt)) 149 goto out; 150 151 refcount_inc(refcnt); 152 153 out: 154 local_bh_enable(); 155 156 return err; 157 } 158 159 /* Called in workqueue context, do one real cryption work (via 160 * req->complete) and reschedule itself if there are more work to 161 * do. */ 162 static void cryptd_queue_worker(struct work_struct *work) 163 { 164 struct cryptd_cpu_queue *cpu_queue; 165 struct crypto_async_request *req, *backlog; 166 167 cpu_queue = container_of(work, struct cryptd_cpu_queue, work); 168 /* 169 * Only handle one request at a time to avoid hogging crypto workqueue. 170 */ 171 local_bh_disable(); 172 backlog = crypto_get_backlog(&cpu_queue->queue); 173 req = crypto_dequeue_request(&cpu_queue->queue); 174 local_bh_enable(); 175 176 if (!req) 177 return; 178 179 if (backlog) 180 crypto_request_complete(backlog, -EINPROGRESS); 181 crypto_request_complete(req, 0); 182 183 if (cpu_queue->queue.qlen) 184 queue_work(cryptd_wq, &cpu_queue->work); 185 } 186 187 static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm) 188 { 189 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); 190 struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst); 191 return ictx->queue; 192 } 193 194 static void cryptd_type_and_mask(struct crypto_attr_type *algt, 195 u32 *type, u32 *mask) 196 { 197 /* 198 * cryptd is allowed to wrap internal algorithms, but in that case the 199 * resulting cryptd instance will be marked as internal as well. 200 */ 201 *type = algt->type & CRYPTO_ALG_INTERNAL; 202 *mask = algt->mask & CRYPTO_ALG_INTERNAL; 203 204 /* No point in cryptd wrapping an algorithm that's already async. */ 205 *mask |= CRYPTO_ALG_ASYNC; 206 207 *mask |= crypto_algt_inherited_mask(algt); 208 } 209 210 static int cryptd_init_instance(struct crypto_instance *inst, 211 struct crypto_alg *alg) 212 { 213 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 214 "cryptd(%s)", 215 alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 216 return -ENAMETOOLONG; 217 218 memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME); 219 220 inst->alg.cra_priority = alg->cra_priority + 50; 221 inst->alg.cra_blocksize = alg->cra_blocksize; 222 inst->alg.cra_alignmask = alg->cra_alignmask; 223 224 return 0; 225 } 226 227 static int cryptd_skcipher_setkey(struct crypto_skcipher *parent, 228 const u8 *key, unsigned int keylen) 229 { 230 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent); 231 struct crypto_skcipher *child = ctx->child; 232 233 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 234 crypto_skcipher_set_flags(child, 235 crypto_skcipher_get_flags(parent) & 236 CRYPTO_TFM_REQ_MASK); 237 return crypto_skcipher_setkey(child, key, keylen); 238 } 239 240 static struct skcipher_request *cryptd_skcipher_prepare( 241 struct skcipher_request *req, int err) 242 { 243 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); 244 struct skcipher_request *subreq = &rctx->req; 245 struct cryptd_skcipher_ctx *ctx; 246 struct crypto_skcipher *child; 247 248 req->base.complete = subreq->base.complete; 249 req->base.data = subreq->base.data; 250 251 if (unlikely(err == -EINPROGRESS)) 252 return NULL; 253 254 ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)); 255 child = ctx->child; 256 257 skcipher_request_set_tfm(subreq, child); 258 skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP, 259 NULL, NULL); 260 skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 261 req->iv); 262 263 return subreq; 264 } 265 266 static void cryptd_skcipher_complete(struct skcipher_request *req, int err, 267 crypto_completion_t complete) 268 { 269 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); 270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 271 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); 272 struct skcipher_request *subreq = &rctx->req; 273 int refcnt = refcount_read(&ctx->refcnt); 274 275 local_bh_disable(); 276 skcipher_request_complete(req, err); 277 local_bh_enable(); 278 279 if (unlikely(err == -EINPROGRESS)) { 280 subreq->base.complete = req->base.complete; 281 subreq->base.data = req->base.data; 282 req->base.complete = complete; 283 req->base.data = req; 284 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt)) 285 crypto_free_skcipher(tfm); 286 } 287 288 static void cryptd_skcipher_encrypt(void *data, int err) 289 { 290 struct skcipher_request *req = data; 291 struct skcipher_request *subreq; 292 293 subreq = cryptd_skcipher_prepare(req, err); 294 if (likely(subreq)) 295 err = crypto_skcipher_encrypt(subreq); 296 297 cryptd_skcipher_complete(req, err, cryptd_skcipher_encrypt); 298 } 299 300 static void cryptd_skcipher_decrypt(void *data, int err) 301 { 302 struct skcipher_request *req = data; 303 struct skcipher_request *subreq; 304 305 subreq = cryptd_skcipher_prepare(req, err); 306 if (likely(subreq)) 307 err = crypto_skcipher_decrypt(subreq); 308 309 cryptd_skcipher_complete(req, err, cryptd_skcipher_decrypt); 310 } 311 312 static int cryptd_skcipher_enqueue(struct skcipher_request *req, 313 crypto_completion_t compl) 314 { 315 struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); 316 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 317 struct skcipher_request *subreq = &rctx->req; 318 struct cryptd_queue *queue; 319 320 queue = cryptd_get_queue(crypto_skcipher_tfm(tfm)); 321 subreq->base.complete = req->base.complete; 322 subreq->base.data = req->base.data; 323 req->base.complete = compl; 324 req->base.data = req; 325 326 return cryptd_enqueue_request(queue, &req->base); 327 } 328 329 static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req) 330 { 331 return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt); 332 } 333 334 static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req) 335 { 336 return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt); 337 } 338 339 static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm) 340 { 341 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 342 struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst); 343 struct crypto_skcipher_spawn *spawn = &ictx->spawn; 344 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); 345 struct crypto_skcipher *cipher; 346 347 cipher = crypto_spawn_skcipher(spawn); 348 if (IS_ERR(cipher)) 349 return PTR_ERR(cipher); 350 351 ctx->child = cipher; 352 crypto_skcipher_set_reqsize( 353 tfm, sizeof(struct cryptd_skcipher_request_ctx) + 354 crypto_skcipher_reqsize(cipher)); 355 return 0; 356 } 357 358 static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm) 359 { 360 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm); 361 362 crypto_free_skcipher(ctx->child); 363 } 364 365 static void cryptd_skcipher_free(struct skcipher_instance *inst) 366 { 367 struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst); 368 369 crypto_drop_skcipher(&ctx->spawn); 370 kfree(inst); 371 } 372 373 static int cryptd_create_skcipher(struct crypto_template *tmpl, 374 struct rtattr **tb, 375 struct crypto_attr_type *algt, 376 struct cryptd_queue *queue) 377 { 378 struct skcipherd_instance_ctx *ctx; 379 struct skcipher_instance *inst; 380 struct skcipher_alg *alg; 381 u32 type; 382 u32 mask; 383 int err; 384 385 cryptd_type_and_mask(algt, &type, &mask); 386 387 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 388 if (!inst) 389 return -ENOMEM; 390 391 ctx = skcipher_instance_ctx(inst); 392 ctx->queue = queue; 393 394 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst), 395 crypto_attr_alg_name(tb[1]), type, mask); 396 if (err) 397 goto err_free_inst; 398 399 alg = crypto_spawn_skcipher_alg(&ctx->spawn); 400 err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base); 401 if (err) 402 goto err_free_inst; 403 404 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | 405 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 406 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg); 407 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); 408 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg); 409 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg); 410 411 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx); 412 413 inst->alg.init = cryptd_skcipher_init_tfm; 414 inst->alg.exit = cryptd_skcipher_exit_tfm; 415 416 inst->alg.setkey = cryptd_skcipher_setkey; 417 inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue; 418 inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue; 419 420 inst->free = cryptd_skcipher_free; 421 422 err = skcipher_register_instance(tmpl, inst); 423 if (err) { 424 err_free_inst: 425 cryptd_skcipher_free(inst); 426 } 427 return err; 428 } 429 430 static int cryptd_hash_init_tfm(struct crypto_tfm *tfm) 431 { 432 struct crypto_instance *inst = crypto_tfm_alg_instance(tfm); 433 struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst); 434 struct crypto_shash_spawn *spawn = &ictx->spawn; 435 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 436 struct crypto_shash *hash; 437 438 hash = crypto_spawn_shash(spawn); 439 if (IS_ERR(hash)) 440 return PTR_ERR(hash); 441 442 ctx->child = hash; 443 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 444 sizeof(struct cryptd_hash_request_ctx) + 445 crypto_shash_descsize(hash)); 446 return 0; 447 } 448 449 static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm) 450 { 451 struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm); 452 453 crypto_free_shash(ctx->child); 454 } 455 456 static int cryptd_hash_setkey(struct crypto_ahash *parent, 457 const u8 *key, unsigned int keylen) 458 { 459 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(parent); 460 struct crypto_shash *child = ctx->child; 461 462 crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK); 463 crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) & 464 CRYPTO_TFM_REQ_MASK); 465 return crypto_shash_setkey(child, key, keylen); 466 } 467 468 static int cryptd_hash_enqueue(struct ahash_request *req, 469 crypto_completion_t compl) 470 { 471 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 472 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 473 struct cryptd_queue *queue = 474 cryptd_get_queue(crypto_ahash_tfm(tfm)); 475 476 rctx->complete = req->base.complete; 477 rctx->data = req->base.data; 478 req->base.complete = compl; 479 req->base.data = req; 480 481 return cryptd_enqueue_request(queue, &req->base); 482 } 483 484 static struct shash_desc *cryptd_hash_prepare(struct ahash_request *req, 485 int err) 486 { 487 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 488 489 req->base.complete = rctx->complete; 490 req->base.data = rctx->data; 491 492 if (unlikely(err == -EINPROGRESS)) 493 return NULL; 494 495 return &rctx->desc; 496 } 497 498 static void cryptd_hash_complete(struct ahash_request *req, int err, 499 crypto_completion_t complete) 500 { 501 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 502 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); 503 int refcnt = refcount_read(&ctx->refcnt); 504 505 local_bh_disable(); 506 ahash_request_complete(req, err); 507 local_bh_enable(); 508 509 if (err == -EINPROGRESS) { 510 req->base.complete = complete; 511 req->base.data = req; 512 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt)) 513 crypto_free_ahash(tfm); 514 } 515 516 static void cryptd_hash_init(void *data, int err) 517 { 518 struct ahash_request *req = data; 519 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 520 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); 521 struct crypto_shash *child = ctx->child; 522 struct shash_desc *desc; 523 524 desc = cryptd_hash_prepare(req, err); 525 if (unlikely(!desc)) 526 goto out; 527 528 desc->tfm = child; 529 530 err = crypto_shash_init(desc); 531 532 out: 533 cryptd_hash_complete(req, err, cryptd_hash_init); 534 } 535 536 static int cryptd_hash_init_enqueue(struct ahash_request *req) 537 { 538 return cryptd_hash_enqueue(req, cryptd_hash_init); 539 } 540 541 static void cryptd_hash_update(void *data, int err) 542 { 543 struct ahash_request *req = data; 544 struct shash_desc *desc; 545 546 desc = cryptd_hash_prepare(req, err); 547 if (likely(desc)) 548 err = shash_ahash_update(req, desc); 549 550 cryptd_hash_complete(req, err, cryptd_hash_update); 551 } 552 553 static int cryptd_hash_update_enqueue(struct ahash_request *req) 554 { 555 return cryptd_hash_enqueue(req, cryptd_hash_update); 556 } 557 558 static void cryptd_hash_final(void *data, int err) 559 { 560 struct ahash_request *req = data; 561 struct shash_desc *desc; 562 563 desc = cryptd_hash_prepare(req, err); 564 if (likely(desc)) 565 err = crypto_shash_final(desc, req->result); 566 567 cryptd_hash_complete(req, err, cryptd_hash_final); 568 } 569 570 static int cryptd_hash_final_enqueue(struct ahash_request *req) 571 { 572 return cryptd_hash_enqueue(req, cryptd_hash_final); 573 } 574 575 static void cryptd_hash_finup(void *data, int err) 576 { 577 struct ahash_request *req = data; 578 struct shash_desc *desc; 579 580 desc = cryptd_hash_prepare(req, err); 581 if (likely(desc)) 582 err = shash_ahash_finup(req, desc); 583 584 cryptd_hash_complete(req, err, cryptd_hash_finup); 585 } 586 587 static int cryptd_hash_finup_enqueue(struct ahash_request *req) 588 { 589 return cryptd_hash_enqueue(req, cryptd_hash_finup); 590 } 591 592 static void cryptd_hash_digest(void *data, int err) 593 { 594 struct ahash_request *req = data; 595 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 596 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); 597 struct crypto_shash *child = ctx->child; 598 struct shash_desc *desc; 599 600 desc = cryptd_hash_prepare(req, err); 601 if (unlikely(!desc)) 602 goto out; 603 604 desc->tfm = child; 605 606 err = shash_ahash_digest(req, desc); 607 608 out: 609 cryptd_hash_complete(req, err, cryptd_hash_digest); 610 } 611 612 static int cryptd_hash_digest_enqueue(struct ahash_request *req) 613 { 614 return cryptd_hash_enqueue(req, cryptd_hash_digest); 615 } 616 617 static int cryptd_hash_export(struct ahash_request *req, void *out) 618 { 619 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 620 621 return crypto_shash_export(&rctx->desc, out); 622 } 623 624 static int cryptd_hash_import(struct ahash_request *req, const void *in) 625 { 626 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 627 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm); 628 struct shash_desc *desc = cryptd_shash_desc(req); 629 630 desc->tfm = ctx->child; 631 632 return crypto_shash_import(desc, in); 633 } 634 635 static void cryptd_hash_free(struct ahash_instance *inst) 636 { 637 struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst); 638 639 crypto_drop_shash(&ctx->spawn); 640 kfree(inst); 641 } 642 643 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, 644 struct crypto_attr_type *algt, 645 struct cryptd_queue *queue) 646 { 647 struct hashd_instance_ctx *ctx; 648 struct ahash_instance *inst; 649 struct shash_alg *alg; 650 u32 type; 651 u32 mask; 652 int err; 653 654 cryptd_type_and_mask(algt, &type, &mask); 655 656 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 657 if (!inst) 658 return -ENOMEM; 659 660 ctx = ahash_instance_ctx(inst); 661 ctx->queue = queue; 662 663 err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst), 664 crypto_attr_alg_name(tb[1]), type, mask); 665 if (err) 666 goto err_free_inst; 667 alg = crypto_spawn_shash_alg(&ctx->spawn); 668 669 err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base); 670 if (err) 671 goto err_free_inst; 672 673 inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC | 674 (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL| 675 CRYPTO_ALG_OPTIONAL_KEY)); 676 inst->alg.halg.digestsize = alg->digestsize; 677 inst->alg.halg.statesize = alg->statesize; 678 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); 679 680 inst->alg.halg.base.cra_init = cryptd_hash_init_tfm; 681 inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm; 682 683 inst->alg.init = cryptd_hash_init_enqueue; 684 inst->alg.update = cryptd_hash_update_enqueue; 685 inst->alg.final = cryptd_hash_final_enqueue; 686 inst->alg.finup = cryptd_hash_finup_enqueue; 687 inst->alg.export = cryptd_hash_export; 688 inst->alg.import = cryptd_hash_import; 689 if (crypto_shash_alg_has_setkey(alg)) 690 inst->alg.setkey = cryptd_hash_setkey; 691 inst->alg.digest = cryptd_hash_digest_enqueue; 692 693 inst->free = cryptd_hash_free; 694 695 err = ahash_register_instance(tmpl, inst); 696 if (err) { 697 err_free_inst: 698 cryptd_hash_free(inst); 699 } 700 return err; 701 } 702 703 static int cryptd_aead_setkey(struct crypto_aead *parent, 704 const u8 *key, unsigned int keylen) 705 { 706 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent); 707 struct crypto_aead *child = ctx->child; 708 709 return crypto_aead_setkey(child, key, keylen); 710 } 711 712 static int cryptd_aead_setauthsize(struct crypto_aead *parent, 713 unsigned int authsize) 714 { 715 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent); 716 struct crypto_aead *child = ctx->child; 717 718 return crypto_aead_setauthsize(child, authsize); 719 } 720 721 static void cryptd_aead_crypt(struct aead_request *req, 722 struct crypto_aead *child, int err, 723 int (*crypt)(struct aead_request *req), 724 crypto_completion_t compl) 725 { 726 struct cryptd_aead_request_ctx *rctx; 727 struct aead_request *subreq; 728 struct cryptd_aead_ctx *ctx; 729 struct crypto_aead *tfm; 730 int refcnt; 731 732 rctx = aead_request_ctx(req); 733 subreq = &rctx->req; 734 req->base.complete = subreq->base.complete; 735 req->base.data = subreq->base.data; 736 737 tfm = crypto_aead_reqtfm(req); 738 739 if (unlikely(err == -EINPROGRESS)) 740 goto out; 741 742 aead_request_set_tfm(subreq, child); 743 aead_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP, 744 NULL, NULL); 745 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 746 req->iv); 747 aead_request_set_ad(subreq, req->assoclen); 748 749 err = crypt(subreq); 750 751 out: 752 ctx = crypto_aead_ctx(tfm); 753 refcnt = refcount_read(&ctx->refcnt); 754 755 local_bh_disable(); 756 aead_request_complete(req, err); 757 local_bh_enable(); 758 759 if (err == -EINPROGRESS) { 760 subreq->base.complete = req->base.complete; 761 subreq->base.data = req->base.data; 762 req->base.complete = compl; 763 req->base.data = req; 764 } else if (refcnt && refcount_dec_and_test(&ctx->refcnt)) 765 crypto_free_aead(tfm); 766 } 767 768 static void cryptd_aead_encrypt(void *data, int err) 769 { 770 struct aead_request *req = data; 771 struct cryptd_aead_ctx *ctx; 772 struct crypto_aead *child; 773 774 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 775 child = ctx->child; 776 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt, 777 cryptd_aead_encrypt); 778 } 779 780 static void cryptd_aead_decrypt(void *data, int err) 781 { 782 struct aead_request *req = data; 783 struct cryptd_aead_ctx *ctx; 784 struct crypto_aead *child; 785 786 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 787 child = ctx->child; 788 cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt, 789 cryptd_aead_decrypt); 790 } 791 792 static int cryptd_aead_enqueue(struct aead_request *req, 793 crypto_completion_t compl) 794 { 795 struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); 796 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 797 struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm)); 798 struct aead_request *subreq = &rctx->req; 799 800 subreq->base.complete = req->base.complete; 801 subreq->base.data = req->base.data; 802 req->base.complete = compl; 803 req->base.data = req; 804 return cryptd_enqueue_request(queue, &req->base); 805 } 806 807 static int cryptd_aead_encrypt_enqueue(struct aead_request *req) 808 { 809 return cryptd_aead_enqueue(req, cryptd_aead_encrypt ); 810 } 811 812 static int cryptd_aead_decrypt_enqueue(struct aead_request *req) 813 { 814 return cryptd_aead_enqueue(req, cryptd_aead_decrypt ); 815 } 816 817 static int cryptd_aead_init_tfm(struct crypto_aead *tfm) 818 { 819 struct aead_instance *inst = aead_alg_instance(tfm); 820 struct aead_instance_ctx *ictx = aead_instance_ctx(inst); 821 struct crypto_aead_spawn *spawn = &ictx->aead_spawn; 822 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm); 823 struct crypto_aead *cipher; 824 825 cipher = crypto_spawn_aead(spawn); 826 if (IS_ERR(cipher)) 827 return PTR_ERR(cipher); 828 829 ctx->child = cipher; 830 crypto_aead_set_reqsize( 831 tfm, sizeof(struct cryptd_aead_request_ctx) + 832 crypto_aead_reqsize(cipher)); 833 return 0; 834 } 835 836 static void cryptd_aead_exit_tfm(struct crypto_aead *tfm) 837 { 838 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm); 839 crypto_free_aead(ctx->child); 840 } 841 842 static void cryptd_aead_free(struct aead_instance *inst) 843 { 844 struct aead_instance_ctx *ctx = aead_instance_ctx(inst); 845 846 crypto_drop_aead(&ctx->aead_spawn); 847 kfree(inst); 848 } 849 850 static int cryptd_create_aead(struct crypto_template *tmpl, 851 struct rtattr **tb, 852 struct crypto_attr_type *algt, 853 struct cryptd_queue *queue) 854 { 855 struct aead_instance_ctx *ctx; 856 struct aead_instance *inst; 857 struct aead_alg *alg; 858 u32 type; 859 u32 mask; 860 int err; 861 862 cryptd_type_and_mask(algt, &type, &mask); 863 864 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 865 if (!inst) 866 return -ENOMEM; 867 868 ctx = aead_instance_ctx(inst); 869 ctx->queue = queue; 870 871 err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst), 872 crypto_attr_alg_name(tb[1]), type, mask); 873 if (err) 874 goto err_free_inst; 875 876 alg = crypto_spawn_aead_alg(&ctx->aead_spawn); 877 err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base); 878 if (err) 879 goto err_free_inst; 880 881 inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | 882 (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 883 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx); 884 885 inst->alg.ivsize = crypto_aead_alg_ivsize(alg); 886 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); 887 888 inst->alg.init = cryptd_aead_init_tfm; 889 inst->alg.exit = cryptd_aead_exit_tfm; 890 inst->alg.setkey = cryptd_aead_setkey; 891 inst->alg.setauthsize = cryptd_aead_setauthsize; 892 inst->alg.encrypt = cryptd_aead_encrypt_enqueue; 893 inst->alg.decrypt = cryptd_aead_decrypt_enqueue; 894 895 inst->free = cryptd_aead_free; 896 897 err = aead_register_instance(tmpl, inst); 898 if (err) { 899 err_free_inst: 900 cryptd_aead_free(inst); 901 } 902 return err; 903 } 904 905 static struct cryptd_queue queue; 906 907 static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb) 908 { 909 struct crypto_attr_type *algt; 910 911 algt = crypto_get_attr_type(tb); 912 if (IS_ERR(algt)) 913 return PTR_ERR(algt); 914 915 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { 916 case CRYPTO_ALG_TYPE_SKCIPHER: 917 return cryptd_create_skcipher(tmpl, tb, algt, &queue); 918 case CRYPTO_ALG_TYPE_HASH: 919 return cryptd_create_hash(tmpl, tb, algt, &queue); 920 case CRYPTO_ALG_TYPE_AEAD: 921 return cryptd_create_aead(tmpl, tb, algt, &queue); 922 } 923 924 return -EINVAL; 925 } 926 927 static struct crypto_template cryptd_tmpl = { 928 .name = "cryptd", 929 .create = cryptd_create, 930 .module = THIS_MODULE, 931 }; 932 933 struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name, 934 u32 type, u32 mask) 935 { 936 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; 937 struct cryptd_skcipher_ctx *ctx; 938 struct crypto_skcipher *tfm; 939 940 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, 941 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) 942 return ERR_PTR(-EINVAL); 943 944 tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask); 945 if (IS_ERR(tfm)) 946 return ERR_CAST(tfm); 947 948 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { 949 crypto_free_skcipher(tfm); 950 return ERR_PTR(-EINVAL); 951 } 952 953 ctx = crypto_skcipher_ctx(tfm); 954 refcount_set(&ctx->refcnt, 1); 955 956 return container_of(tfm, struct cryptd_skcipher, base); 957 } 958 EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher); 959 960 struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm) 961 { 962 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); 963 964 return ctx->child; 965 } 966 EXPORT_SYMBOL_GPL(cryptd_skcipher_child); 967 968 bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm) 969 { 970 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); 971 972 return refcount_read(&ctx->refcnt) - 1; 973 } 974 EXPORT_SYMBOL_GPL(cryptd_skcipher_queued); 975 976 void cryptd_free_skcipher(struct cryptd_skcipher *tfm) 977 { 978 struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base); 979 980 if (refcount_dec_and_test(&ctx->refcnt)) 981 crypto_free_skcipher(&tfm->base); 982 } 983 EXPORT_SYMBOL_GPL(cryptd_free_skcipher); 984 985 struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name, 986 u32 type, u32 mask) 987 { 988 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; 989 struct cryptd_hash_ctx *ctx; 990 struct crypto_ahash *tfm; 991 992 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, 993 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) 994 return ERR_PTR(-EINVAL); 995 tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask); 996 if (IS_ERR(tfm)) 997 return ERR_CAST(tfm); 998 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { 999 crypto_free_ahash(tfm); 1000 return ERR_PTR(-EINVAL); 1001 } 1002 1003 ctx = crypto_ahash_ctx(tfm); 1004 refcount_set(&ctx->refcnt, 1); 1005 1006 return __cryptd_ahash_cast(tfm); 1007 } 1008 EXPORT_SYMBOL_GPL(cryptd_alloc_ahash); 1009 1010 struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm) 1011 { 1012 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 1013 1014 return ctx->child; 1015 } 1016 EXPORT_SYMBOL_GPL(cryptd_ahash_child); 1017 1018 struct shash_desc *cryptd_shash_desc(struct ahash_request *req) 1019 { 1020 struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); 1021 return &rctx->desc; 1022 } 1023 EXPORT_SYMBOL_GPL(cryptd_shash_desc); 1024 1025 bool cryptd_ahash_queued(struct cryptd_ahash *tfm) 1026 { 1027 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 1028 1029 return refcount_read(&ctx->refcnt) - 1; 1030 } 1031 EXPORT_SYMBOL_GPL(cryptd_ahash_queued); 1032 1033 void cryptd_free_ahash(struct cryptd_ahash *tfm) 1034 { 1035 struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base); 1036 1037 if (refcount_dec_and_test(&ctx->refcnt)) 1038 crypto_free_ahash(&tfm->base); 1039 } 1040 EXPORT_SYMBOL_GPL(cryptd_free_ahash); 1041 1042 struct cryptd_aead *cryptd_alloc_aead(const char *alg_name, 1043 u32 type, u32 mask) 1044 { 1045 char cryptd_alg_name[CRYPTO_MAX_ALG_NAME]; 1046 struct cryptd_aead_ctx *ctx; 1047 struct crypto_aead *tfm; 1048 1049 if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME, 1050 "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME) 1051 return ERR_PTR(-EINVAL); 1052 tfm = crypto_alloc_aead(cryptd_alg_name, type, mask); 1053 if (IS_ERR(tfm)) 1054 return ERR_CAST(tfm); 1055 if (tfm->base.__crt_alg->cra_module != THIS_MODULE) { 1056 crypto_free_aead(tfm); 1057 return ERR_PTR(-EINVAL); 1058 } 1059 1060 ctx = crypto_aead_ctx(tfm); 1061 refcount_set(&ctx->refcnt, 1); 1062 1063 return __cryptd_aead_cast(tfm); 1064 } 1065 EXPORT_SYMBOL_GPL(cryptd_alloc_aead); 1066 1067 struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm) 1068 { 1069 struct cryptd_aead_ctx *ctx; 1070 ctx = crypto_aead_ctx(&tfm->base); 1071 return ctx->child; 1072 } 1073 EXPORT_SYMBOL_GPL(cryptd_aead_child); 1074 1075 bool cryptd_aead_queued(struct cryptd_aead *tfm) 1076 { 1077 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); 1078 1079 return refcount_read(&ctx->refcnt) - 1; 1080 } 1081 EXPORT_SYMBOL_GPL(cryptd_aead_queued); 1082 1083 void cryptd_free_aead(struct cryptd_aead *tfm) 1084 { 1085 struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base); 1086 1087 if (refcount_dec_and_test(&ctx->refcnt)) 1088 crypto_free_aead(&tfm->base); 1089 } 1090 EXPORT_SYMBOL_GPL(cryptd_free_aead); 1091 1092 static int __init cryptd_init(void) 1093 { 1094 int err; 1095 1096 cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE, 1097 1); 1098 if (!cryptd_wq) 1099 return -ENOMEM; 1100 1101 err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen); 1102 if (err) 1103 goto err_destroy_wq; 1104 1105 err = crypto_register_template(&cryptd_tmpl); 1106 if (err) 1107 goto err_fini_queue; 1108 1109 return 0; 1110 1111 err_fini_queue: 1112 cryptd_fini_queue(&queue); 1113 err_destroy_wq: 1114 destroy_workqueue(cryptd_wq); 1115 return err; 1116 } 1117 1118 static void __exit cryptd_exit(void) 1119 { 1120 destroy_workqueue(cryptd_wq); 1121 cryptd_fini_queue(&queue); 1122 crypto_unregister_template(&cryptd_tmpl); 1123 } 1124 1125 subsys_initcall(cryptd_init); 1126 module_exit(cryptd_exit); 1127 1128 MODULE_LICENSE("GPL"); 1129 MODULE_DESCRIPTION("Software async crypto daemon"); 1130 MODULE_ALIAS_CRYPTO("cryptd"); 1131