1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 18 #include <linux/err.h> 19 #include <linux/errno.h> 20 #include <linux/kernel.h> 21 #include <linux/kmod.h> 22 #include <linux/module.h> 23 #include <linux/param.h> 24 #include <linux/sched/signal.h> 25 #include <linux/slab.h> 26 #include <linux/string.h> 27 #include <linux/completion.h> 28 #include "internal.h" 29 30 LIST_HEAD(crypto_alg_list); 31 EXPORT_SYMBOL_GPL(crypto_alg_list); 32 DECLARE_RWSEM(crypto_alg_sem); 33 EXPORT_SYMBOL_GPL(crypto_alg_sem); 34 35 BLOCKING_NOTIFIER_HEAD(crypto_chain); 36 EXPORT_SYMBOL_GPL(crypto_chain); 37 38 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg); 39 40 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) 41 { 42 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; 43 } 44 EXPORT_SYMBOL_GPL(crypto_mod_get); 45 46 void crypto_mod_put(struct crypto_alg *alg) 47 { 48 struct module *module = alg->cra_module; 49 50 crypto_alg_put(alg); 51 module_put(module); 52 } 53 EXPORT_SYMBOL_GPL(crypto_mod_put); 54 55 static inline int crypto_is_test_larval(struct crypto_larval *larval) 56 { 57 return larval->alg.cra_driver_name[0]; 58 } 59 60 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, 61 u32 mask) 62 { 63 struct crypto_alg *q, *alg = NULL; 64 int best = -2; 65 66 list_for_each_entry(q, &crypto_alg_list, cra_list) { 67 int exact, fuzzy; 68 69 if (crypto_is_moribund(q)) 70 continue; 71 72 if ((q->cra_flags ^ type) & mask) 73 continue; 74 75 if (crypto_is_larval(q) && 76 !crypto_is_test_larval((struct crypto_larval *)q) && 77 ((struct crypto_larval *)q)->mask != mask) 78 continue; 79 80 exact = !strcmp(q->cra_driver_name, name); 81 fuzzy = !strcmp(q->cra_name, name); 82 if (!exact && !(fuzzy && q->cra_priority > best)) 83 continue; 84 85 if (unlikely(!crypto_mod_get(q))) 86 continue; 87 88 best = q->cra_priority; 89 if (alg) 90 crypto_mod_put(alg); 91 alg = q; 92 93 if (exact) 94 break; 95 } 96 97 return alg; 98 } 99 100 static void crypto_larval_destroy(struct crypto_alg *alg) 101 { 102 struct crypto_larval *larval = (void *)alg; 103 104 BUG_ON(!crypto_is_larval(alg)); 105 if (larval->adult) 106 crypto_mod_put(larval->adult); 107 kfree(larval); 108 } 109 110 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask) 111 { 112 struct crypto_larval *larval; 113 114 larval = kzalloc(sizeof(*larval), GFP_KERNEL); 115 if (!larval) 116 return ERR_PTR(-ENOMEM); 117 118 larval->mask = mask; 119 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; 120 larval->alg.cra_priority = -1; 121 larval->alg.cra_destroy = crypto_larval_destroy; 122 123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); 124 init_completion(&larval->completion); 125 126 return larval; 127 } 128 EXPORT_SYMBOL_GPL(crypto_larval_alloc); 129 130 static struct crypto_alg *crypto_larval_add(const char *name, u32 type, 131 u32 mask) 132 { 133 struct crypto_alg *alg; 134 struct crypto_larval *larval; 135 136 larval = crypto_larval_alloc(name, type, mask); 137 if (IS_ERR(larval)) 138 return ERR_CAST(larval); 139 140 atomic_set(&larval->alg.cra_refcnt, 2); 141 142 down_write(&crypto_alg_sem); 143 alg = __crypto_alg_lookup(name, type, mask); 144 if (!alg) { 145 alg = &larval->alg; 146 list_add(&alg->cra_list, &crypto_alg_list); 147 } 148 up_write(&crypto_alg_sem); 149 150 if (alg != &larval->alg) { 151 kfree(larval); 152 if (crypto_is_larval(alg)) 153 alg = crypto_larval_wait(alg); 154 } 155 156 return alg; 157 } 158 159 void crypto_larval_kill(struct crypto_alg *alg) 160 { 161 struct crypto_larval *larval = (void *)alg; 162 163 down_write(&crypto_alg_sem); 164 list_del(&alg->cra_list); 165 up_write(&crypto_alg_sem); 166 complete_all(&larval->completion); 167 crypto_alg_put(alg); 168 } 169 EXPORT_SYMBOL_GPL(crypto_larval_kill); 170 171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 172 { 173 struct crypto_larval *larval = (void *)alg; 174 long timeout; 175 176 timeout = wait_for_completion_killable_timeout( 177 &larval->completion, 60 * HZ); 178 179 alg = larval->adult; 180 if (timeout < 0) 181 alg = ERR_PTR(-EINTR); 182 else if (!timeout) 183 alg = ERR_PTR(-ETIMEDOUT); 184 else if (!alg) 185 alg = ERR_PTR(-ENOENT); 186 else if (crypto_is_test_larval(larval) && 187 !(alg->cra_flags & CRYPTO_ALG_TESTED)) 188 alg = ERR_PTR(-EAGAIN); 189 else if (!crypto_mod_get(alg)) 190 alg = ERR_PTR(-EAGAIN); 191 crypto_mod_put(&larval->alg); 192 193 return alg; 194 } 195 196 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask) 197 { 198 struct crypto_alg *alg; 199 200 down_read(&crypto_alg_sem); 201 alg = __crypto_alg_lookup(name, type, mask); 202 up_read(&crypto_alg_sem); 203 204 return alg; 205 } 206 EXPORT_SYMBOL_GPL(crypto_alg_lookup); 207 208 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) 209 { 210 struct crypto_alg *alg; 211 212 if (!name) 213 return ERR_PTR(-ENOENT); 214 215 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 216 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 217 218 alg = crypto_alg_lookup(name, type, mask); 219 if (!alg) { 220 request_module("crypto-%s", name); 221 222 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask & 223 CRYPTO_ALG_NEED_FALLBACK)) 224 request_module("crypto-%s-all", name); 225 226 alg = crypto_alg_lookup(name, type, mask); 227 } 228 229 if (alg) 230 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 231 232 return crypto_larval_add(name, type, mask); 233 } 234 EXPORT_SYMBOL_GPL(crypto_larval_lookup); 235 236 int crypto_probing_notify(unsigned long val, void *v) 237 { 238 int ok; 239 240 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 241 if (ok == NOTIFY_DONE) { 242 request_module("cryptomgr"); 243 ok = blocking_notifier_call_chain(&crypto_chain, val, v); 244 } 245 246 return ok; 247 } 248 EXPORT_SYMBOL_GPL(crypto_probing_notify); 249 250 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 251 { 252 struct crypto_alg *alg; 253 struct crypto_alg *larval; 254 int ok; 255 256 if (!((type | mask) & CRYPTO_ALG_TESTED)) { 257 type |= CRYPTO_ALG_TESTED; 258 mask |= CRYPTO_ALG_TESTED; 259 } 260 261 /* 262 * If the internal flag is set for a cipher, require a caller to 263 * to invoke the cipher with the internal flag to use that cipher. 264 * Also, if a caller wants to allocate a cipher that may or may 265 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and 266 * !(mask & CRYPTO_ALG_INTERNAL). 267 */ 268 if (!((type | mask) & CRYPTO_ALG_INTERNAL)) 269 mask |= CRYPTO_ALG_INTERNAL; 270 271 larval = crypto_larval_lookup(name, type, mask); 272 if (IS_ERR(larval) || !crypto_is_larval(larval)) 273 return larval; 274 275 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval); 276 277 if (ok == NOTIFY_STOP) 278 alg = crypto_larval_wait(larval); 279 else { 280 crypto_mod_put(larval); 281 alg = ERR_PTR(-ENOENT); 282 } 283 crypto_larval_kill(larval); 284 return alg; 285 } 286 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); 287 288 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 289 { 290 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; 291 292 if (type_obj) 293 return type_obj->init(tfm, type, mask); 294 295 switch (crypto_tfm_alg_type(tfm)) { 296 case CRYPTO_ALG_TYPE_CIPHER: 297 return crypto_init_cipher_ops(tfm); 298 299 case CRYPTO_ALG_TYPE_COMPRESS: 300 return crypto_init_compress_ops(tfm); 301 302 default: 303 break; 304 } 305 306 BUG(); 307 return -EINVAL; 308 } 309 310 static void crypto_exit_ops(struct crypto_tfm *tfm) 311 { 312 const struct crypto_type *type = tfm->__crt_alg->cra_type; 313 314 if (type && tfm->exit) 315 tfm->exit(tfm); 316 } 317 318 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) 319 { 320 const struct crypto_type *type_obj = alg->cra_type; 321 unsigned int len; 322 323 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); 324 if (type_obj) 325 return len + type_obj->ctxsize(alg, type, mask); 326 327 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 328 default: 329 BUG(); 330 331 case CRYPTO_ALG_TYPE_CIPHER: 332 len += crypto_cipher_ctxsize(alg); 333 break; 334 335 case CRYPTO_ALG_TYPE_COMPRESS: 336 len += crypto_compress_ctxsize(alg); 337 break; 338 } 339 340 return len; 341 } 342 343 void crypto_shoot_alg(struct crypto_alg *alg) 344 { 345 down_write(&crypto_alg_sem); 346 alg->cra_flags |= CRYPTO_ALG_DYING; 347 up_write(&crypto_alg_sem); 348 } 349 EXPORT_SYMBOL_GPL(crypto_shoot_alg); 350 351 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 352 u32 mask) 353 { 354 struct crypto_tfm *tfm = NULL; 355 unsigned int tfm_size; 356 int err = -ENOMEM; 357 358 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); 359 tfm = kzalloc(tfm_size, GFP_KERNEL); 360 if (tfm == NULL) 361 goto out_err; 362 363 tfm->__crt_alg = alg; 364 365 err = crypto_init_ops(tfm, type, mask); 366 if (err) 367 goto out_free_tfm; 368 369 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 370 goto cra_init_failed; 371 372 goto out; 373 374 cra_init_failed: 375 crypto_exit_ops(tfm); 376 out_free_tfm: 377 if (err == -EAGAIN) 378 crypto_shoot_alg(alg); 379 kfree(tfm); 380 out_err: 381 tfm = ERR_PTR(err); 382 out: 383 return tfm; 384 } 385 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); 386 387 /* 388 * crypto_alloc_base - Locate algorithm and allocate transform 389 * @alg_name: Name of algorithm 390 * @type: Type of algorithm 391 * @mask: Mask for type comparison 392 * 393 * This function should not be used by new algorithm types. 394 * Please use crypto_alloc_tfm instead. 395 * 396 * crypto_alloc_base() will first attempt to locate an already loaded 397 * algorithm. If that fails and the kernel supports dynamically loadable 398 * modules, it will then attempt to load a module of the same name or 399 * alias. If that fails it will send a query to any loaded crypto manager 400 * to construct an algorithm on the fly. A refcount is grabbed on the 401 * algorithm which is then associated with the new transform. 402 * 403 * The returned transform is of a non-determinate type. Most people 404 * should use one of the more specific allocation functions such as 405 * crypto_alloc_blkcipher. 406 * 407 * In case of error the return value is an error pointer. 408 */ 409 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) 410 { 411 struct crypto_tfm *tfm; 412 int err; 413 414 for (;;) { 415 struct crypto_alg *alg; 416 417 alg = crypto_alg_mod_lookup(alg_name, type, mask); 418 if (IS_ERR(alg)) { 419 err = PTR_ERR(alg); 420 goto err; 421 } 422 423 tfm = __crypto_alloc_tfm(alg, type, mask); 424 if (!IS_ERR(tfm)) 425 return tfm; 426 427 crypto_mod_put(alg); 428 err = PTR_ERR(tfm); 429 430 err: 431 if (err != -EAGAIN) 432 break; 433 if (fatal_signal_pending(current)) { 434 err = -EINTR; 435 break; 436 } 437 } 438 439 return ERR_PTR(err); 440 } 441 EXPORT_SYMBOL_GPL(crypto_alloc_base); 442 443 void *crypto_create_tfm(struct crypto_alg *alg, 444 const struct crypto_type *frontend) 445 { 446 char *mem; 447 struct crypto_tfm *tfm = NULL; 448 unsigned int tfmsize; 449 unsigned int total; 450 int err = -ENOMEM; 451 452 tfmsize = frontend->tfmsize; 453 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg); 454 455 mem = kzalloc(total, GFP_KERNEL); 456 if (mem == NULL) 457 goto out_err; 458 459 tfm = (struct crypto_tfm *)(mem + tfmsize); 460 tfm->__crt_alg = alg; 461 462 err = frontend->init_tfm(tfm); 463 if (err) 464 goto out_free_tfm; 465 466 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm))) 467 goto cra_init_failed; 468 469 goto out; 470 471 cra_init_failed: 472 crypto_exit_ops(tfm); 473 out_free_tfm: 474 if (err == -EAGAIN) 475 crypto_shoot_alg(alg); 476 kfree(mem); 477 out_err: 478 mem = ERR_PTR(err); 479 out: 480 return mem; 481 } 482 EXPORT_SYMBOL_GPL(crypto_create_tfm); 483 484 struct crypto_alg *crypto_find_alg(const char *alg_name, 485 const struct crypto_type *frontend, 486 u32 type, u32 mask) 487 { 488 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) = 489 crypto_alg_mod_lookup; 490 491 if (frontend) { 492 type &= frontend->maskclear; 493 mask &= frontend->maskclear; 494 type |= frontend->type; 495 mask |= frontend->maskset; 496 497 if (frontend->lookup) 498 lookup = frontend->lookup; 499 } 500 501 return lookup(alg_name, type, mask); 502 } 503 EXPORT_SYMBOL_GPL(crypto_find_alg); 504 505 /* 506 * crypto_alloc_tfm - Locate algorithm and allocate transform 507 * @alg_name: Name of algorithm 508 * @frontend: Frontend algorithm type 509 * @type: Type of algorithm 510 * @mask: Mask for type comparison 511 * 512 * crypto_alloc_tfm() will first attempt to locate an already loaded 513 * algorithm. If that fails and the kernel supports dynamically loadable 514 * modules, it will then attempt to load a module of the same name or 515 * alias. If that fails it will send a query to any loaded crypto manager 516 * to construct an algorithm on the fly. A refcount is grabbed on the 517 * algorithm which is then associated with the new transform. 518 * 519 * The returned transform is of a non-determinate type. Most people 520 * should use one of the more specific allocation functions such as 521 * crypto_alloc_blkcipher. 522 * 523 * In case of error the return value is an error pointer. 524 */ 525 void *crypto_alloc_tfm(const char *alg_name, 526 const struct crypto_type *frontend, u32 type, u32 mask) 527 { 528 void *tfm; 529 int err; 530 531 for (;;) { 532 struct crypto_alg *alg; 533 534 alg = crypto_find_alg(alg_name, frontend, type, mask); 535 if (IS_ERR(alg)) { 536 err = PTR_ERR(alg); 537 goto err; 538 } 539 540 tfm = crypto_create_tfm(alg, frontend); 541 if (!IS_ERR(tfm)) 542 return tfm; 543 544 crypto_mod_put(alg); 545 err = PTR_ERR(tfm); 546 547 err: 548 if (err != -EAGAIN) 549 break; 550 if (fatal_signal_pending(current)) { 551 err = -EINTR; 552 break; 553 } 554 } 555 556 return ERR_PTR(err); 557 } 558 EXPORT_SYMBOL_GPL(crypto_alloc_tfm); 559 560 /* 561 * crypto_destroy_tfm - Free crypto transform 562 * @mem: Start of tfm slab 563 * @tfm: Transform to free 564 * 565 * This function frees up the transform and any associated resources, 566 * then drops the refcount on the associated algorithm. 567 */ 568 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm) 569 { 570 struct crypto_alg *alg; 571 572 if (unlikely(!mem)) 573 return; 574 575 alg = tfm->__crt_alg; 576 577 if (!tfm->exit && alg->cra_exit) 578 alg->cra_exit(tfm); 579 crypto_exit_ops(tfm); 580 crypto_mod_put(alg); 581 kzfree(mem); 582 } 583 EXPORT_SYMBOL_GPL(crypto_destroy_tfm); 584 585 int crypto_has_alg(const char *name, u32 type, u32 mask) 586 { 587 int ret = 0; 588 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); 589 590 if (!IS_ERR(alg)) { 591 crypto_mod_put(alg); 592 ret = 1; 593 } 594 595 return ret; 596 } 597 EXPORT_SYMBOL_GPL(crypto_has_alg); 598 599 void crypto_req_done(struct crypto_async_request *req, int err) 600 { 601 struct crypto_wait *wait = req->data; 602 603 if (err == -EINPROGRESS) 604 return; 605 606 wait->err = err; 607 complete(&wait->completion); 608 } 609 EXPORT_SYMBOL_GPL(crypto_req_done); 610 611 MODULE_DESCRIPTION("Cryptographic core API"); 612 MODULE_LICENSE("GPL"); 613