12874c5fdSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-or-later
21da177e4SLinus Torvalds /*
31da177e4SLinus Torvalds * Scatterlist Cryptographic API.
41da177e4SLinus Torvalds *
51da177e4SLinus Torvalds * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
61da177e4SLinus Torvalds * Copyright (c) 2002 David S. Miller (davem@redhat.com)
75cb1454bSHerbert Xu * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
81da177e4SLinus Torvalds *
91da177e4SLinus Torvalds * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10991d1740SJohn Anthony Kazos Jr * and Nettle, by Niels Möller.
111da177e4SLinus Torvalds */
12a61cc448SJesper Juhl
136bfd4809SHerbert Xu #include <linux/err.h>
141da177e4SLinus Torvalds #include <linux/errno.h>
15adad556eSHerbert Xu #include <linux/jump_label.h>
165cb1454bSHerbert Xu #include <linux/kernel.h>
17176c3652SAdrian Bunk #include <linux/kmod.h>
182b8c19dbSHerbert Xu #include <linux/module.h>
192825982dSHerbert Xu #include <linux/param.h>
20174cd4b1SIngo Molnar #include <linux/sched/signal.h>
211da177e4SLinus Torvalds #include <linux/slab.h>
225cb1454bSHerbert Xu #include <linux/string.h>
23ada69a16SGilad Ben-Yossef #include <linux/completion.h>
241da177e4SLinus Torvalds #include "internal.h"
251da177e4SLinus Torvalds
261da177e4SLinus Torvalds LIST_HEAD(crypto_alg_list);
27cce9e06dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_alg_list);
281da177e4SLinus Torvalds DECLARE_RWSEM(crypto_alg_sem);
29cce9e06dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_alg_sem);
301da177e4SLinus Torvalds
312825982dSHerbert Xu BLOCKING_NOTIFIER_HEAD(crypto_chain);
322825982dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_chain);
332825982dSHerbert Xu
3406bd9c96SEric Biggers #ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
3506bd9c96SEric Biggers DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
3606bd9c96SEric Biggers EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
3706bd9c96SEric Biggers #endif
38adad556eSHerbert Xu
3977dbd7a9SHerbert Xu static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
4077dbd7a9SHerbert Xu
crypto_mod_get(struct crypto_alg * alg)412825982dSHerbert Xu struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
426521f302SHerbert Xu {
436521f302SHerbert Xu return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
446521f302SHerbert Xu }
452825982dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_mod_get);
466521f302SHerbert Xu
crypto_mod_put(struct crypto_alg * alg)472825982dSHerbert Xu void crypto_mod_put(struct crypto_alg *alg)
486521f302SHerbert Xu {
49da7cd59aSHerbert Xu struct module *module = alg->cra_module;
50da7cd59aSHerbert Xu
516521f302SHerbert Xu crypto_alg_put(alg);
52da7cd59aSHerbert Xu module_put(module);
531da177e4SLinus Torvalds }
542825982dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_mod_put);
551da177e4SLinus Torvalds
__crypto_alg_lookup(const char * name,u32 type,u32 mask)56c51b6c81SHerbert Xu static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57c51b6c81SHerbert Xu u32 mask)
581da177e4SLinus Torvalds {
591da177e4SLinus Torvalds struct crypto_alg *q, *alg = NULL;
602825982dSHerbert Xu int best = -2;
611da177e4SLinus Torvalds
621da177e4SLinus Torvalds list_for_each_entry(q, &crypto_alg_list, cra_list) {
635cb1454bSHerbert Xu int exact, fuzzy;
645cb1454bSHerbert Xu
656bfd4809SHerbert Xu if (crypto_is_moribund(q))
666bfd4809SHerbert Xu continue;
676bfd4809SHerbert Xu
68492e2b63SHerbert Xu if ((q->cra_flags ^ type) & mask)
69492e2b63SHerbert Xu continue;
70492e2b63SHerbert Xu
71492e2b63SHerbert Xu if (crypto_is_larval(q) &&
7273d3864aSHerbert Xu !crypto_is_test_larval((struct crypto_larval *)q) &&
73492e2b63SHerbert Xu ((struct crypto_larval *)q)->mask != mask)
74492e2b63SHerbert Xu continue;
75492e2b63SHerbert Xu
765cb1454bSHerbert Xu exact = !strcmp(q->cra_driver_name, name);
775cb1454bSHerbert Xu fuzzy = !strcmp(q->cra_name, name);
785cb1454bSHerbert Xu if (!exact && !(fuzzy && q->cra_priority > best))
795cb1454bSHerbert Xu continue;
805cb1454bSHerbert Xu
8172fa4919SHerbert Xu if (unlikely(!crypto_mod_get(q)))
825cb1454bSHerbert Xu continue;
835cb1454bSHerbert Xu
845cb1454bSHerbert Xu best = q->cra_priority;
855cb1454bSHerbert Xu if (alg)
8672fa4919SHerbert Xu crypto_mod_put(alg);
871da177e4SLinus Torvalds alg = q;
885cb1454bSHerbert Xu
895cb1454bSHerbert Xu if (exact)
901da177e4SLinus Torvalds break;
911da177e4SLinus Torvalds }
921da177e4SLinus Torvalds
932825982dSHerbert Xu return alg;
942825982dSHerbert Xu }
952825982dSHerbert Xu
crypto_larval_destroy(struct crypto_alg * alg)962825982dSHerbert Xu static void crypto_larval_destroy(struct crypto_alg *alg)
972825982dSHerbert Xu {
982825982dSHerbert Xu struct crypto_larval *larval = (void *)alg;
992825982dSHerbert Xu
1002825982dSHerbert Xu BUG_ON(!crypto_is_larval(alg));
1012bbb3375SHerbert Xu if (!IS_ERR_OR_NULL(larval->adult))
1022825982dSHerbert Xu crypto_mod_put(larval->adult);
1032825982dSHerbert Xu kfree(larval);
1042825982dSHerbert Xu }
1052825982dSHerbert Xu
crypto_larval_alloc(const char * name,u32 type,u32 mask)10673d3864aSHerbert Xu struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
1072825982dSHerbert Xu {
1082825982dSHerbert Xu struct crypto_larval *larval;
1092825982dSHerbert Xu
1102825982dSHerbert Xu larval = kzalloc(sizeof(*larval), GFP_KERNEL);
1112825982dSHerbert Xu if (!larval)
1126bfd4809SHerbert Xu return ERR_PTR(-ENOMEM);
1132825982dSHerbert Xu
114492e2b63SHerbert Xu larval->mask = mask;
115492e2b63SHerbert Xu larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
1162825982dSHerbert Xu larval->alg.cra_priority = -1;
1172825982dSHerbert Xu larval->alg.cra_destroy = crypto_larval_destroy;
1182825982dSHerbert Xu
119dd4f8ee7SWolfram Sang strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
1202825982dSHerbert Xu init_completion(&larval->completion);
1212825982dSHerbert Xu
12273d3864aSHerbert Xu return larval;
12373d3864aSHerbert Xu }
12473d3864aSHerbert Xu EXPORT_SYMBOL_GPL(crypto_larval_alloc);
12573d3864aSHerbert Xu
crypto_larval_add(const char * name,u32 type,u32 mask)12673d3864aSHerbert Xu static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
12773d3864aSHerbert Xu u32 mask)
12873d3864aSHerbert Xu {
12973d3864aSHerbert Xu struct crypto_alg *alg;
13073d3864aSHerbert Xu struct crypto_larval *larval;
13173d3864aSHerbert Xu
13273d3864aSHerbert Xu larval = crypto_larval_alloc(name, type, mask);
13373d3864aSHerbert Xu if (IS_ERR(larval))
13473d3864aSHerbert Xu return ERR_CAST(larval);
13573d3864aSHerbert Xu
136ce8614a3SEric Biggers refcount_set(&larval->alg.cra_refcnt, 2);
13773d3864aSHerbert Xu
1382825982dSHerbert Xu down_write(&crypto_alg_sem);
139492e2b63SHerbert Xu alg = __crypto_alg_lookup(name, type, mask);
1402825982dSHerbert Xu if (!alg) {
1412825982dSHerbert Xu alg = &larval->alg;
1422825982dSHerbert Xu list_add(&alg->cra_list, &crypto_alg_list);
1432825982dSHerbert Xu }
1442825982dSHerbert Xu up_write(&crypto_alg_sem);
1452825982dSHerbert Xu
14677dbd7a9SHerbert Xu if (alg != &larval->alg) {
1472825982dSHerbert Xu kfree(larval);
14877dbd7a9SHerbert Xu if (crypto_is_larval(alg))
14977dbd7a9SHerbert Xu alg = crypto_larval_wait(alg);
15077dbd7a9SHerbert Xu }
1512825982dSHerbert Xu
1522825982dSHerbert Xu return alg;
1532825982dSHerbert Xu }
1542825982dSHerbert Xu
crypto_larval_kill(struct crypto_alg * alg)155b9c55aa4SHerbert Xu void crypto_larval_kill(struct crypto_alg *alg)
1562825982dSHerbert Xu {
1572825982dSHerbert Xu struct crypto_larval *larval = (void *)alg;
1582825982dSHerbert Xu
1592825982dSHerbert Xu down_write(&crypto_alg_sem);
1602825982dSHerbert Xu list_del(&alg->cra_list);
1612825982dSHerbert Xu up_write(&crypto_alg_sem);
162fe3c5206SHerbert Xu complete_all(&larval->completion);
1632825982dSHerbert Xu crypto_alg_put(alg);
1642825982dSHerbert Xu }
165b9c55aa4SHerbert Xu EXPORT_SYMBOL_GPL(crypto_larval_kill);
1662825982dSHerbert Xu
crypto_wait_for_test(struct crypto_larval * larval)167adad556eSHerbert Xu void crypto_wait_for_test(struct crypto_larval *larval)
168adad556eSHerbert Xu {
169adad556eSHerbert Xu int err;
170adad556eSHerbert Xu
171adad556eSHerbert Xu err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172cad439fcSHerbert Xu if (WARN_ON_ONCE(err != NOTIFY_STOP))
173adad556eSHerbert Xu goto out;
174adad556eSHerbert Xu
175adad556eSHerbert Xu err = wait_for_completion_killable(&larval->completion);
176adad556eSHerbert Xu WARN_ON(err);
177adad556eSHerbert Xu out:
178adad556eSHerbert Xu crypto_larval_kill(&larval->alg);
179adad556eSHerbert Xu }
180adad556eSHerbert Xu EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181adad556eSHerbert Xu
crypto_start_test(struct crypto_larval * larval)182adad556eSHerbert Xu static void crypto_start_test(struct crypto_larval *larval)
183adad556eSHerbert Xu {
184adad556eSHerbert Xu if (!crypto_is_test_larval(larval))
185adad556eSHerbert Xu return;
186adad556eSHerbert Xu
187adad556eSHerbert Xu if (larval->test_started)
188adad556eSHerbert Xu return;
189adad556eSHerbert Xu
190adad556eSHerbert Xu down_write(&crypto_alg_sem);
191adad556eSHerbert Xu if (larval->test_started) {
192adad556eSHerbert Xu up_write(&crypto_alg_sem);
193adad556eSHerbert Xu return;
194adad556eSHerbert Xu }
195adad556eSHerbert Xu
196adad556eSHerbert Xu larval->test_started = true;
197adad556eSHerbert Xu up_write(&crypto_alg_sem);
198adad556eSHerbert Xu
199adad556eSHerbert Xu crypto_wait_for_test(larval);
200adad556eSHerbert Xu }
201adad556eSHerbert Xu
crypto_larval_wait(struct crypto_alg * alg)2022825982dSHerbert Xu static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
2032825982dSHerbert Xu {
2042825982dSHerbert Xu struct crypto_larval *larval = (void *)alg;
20573d3864aSHerbert Xu long timeout;
2062825982dSHerbert Xu
20706bd9c96SEric Biggers if (!crypto_boot_test_finished())
208adad556eSHerbert Xu crypto_start_test(larval);
209adad556eSHerbert Xu
2103fc89adbSHerbert Xu timeout = wait_for_completion_killable_timeout(
21173d3864aSHerbert Xu &larval->completion, 60 * HZ);
21273d3864aSHerbert Xu
2132825982dSHerbert Xu alg = larval->adult;
21473d3864aSHerbert Xu if (timeout < 0)
21573d3864aSHerbert Xu alg = ERR_PTR(-EINTR);
21673d3864aSHerbert Xu else if (!timeout)
21773d3864aSHerbert Xu alg = ERR_PTR(-ETIMEDOUT);
21873d3864aSHerbert Xu else if (!alg)
2196bfd4809SHerbert Xu alg = ERR_PTR(-ENOENT);
2202bbb3375SHerbert Xu else if (IS_ERR(alg))
2212bbb3375SHerbert Xu ;
22273d3864aSHerbert Xu else if (crypto_is_test_larval(larval) &&
22373d3864aSHerbert Xu !(alg->cra_flags & CRYPTO_ALG_TESTED))
22473d3864aSHerbert Xu alg = ERR_PTR(-EAGAIN);
225d6097b8dSNicolai Stange else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226d6097b8dSNicolai Stange alg = ERR_PTR(-EAGAIN);
22773d3864aSHerbert Xu else if (!crypto_mod_get(alg))
22873d3864aSHerbert Xu alg = ERR_PTR(-EAGAIN);
2292825982dSHerbert Xu crypto_mod_put(&larval->alg);
2302825982dSHerbert Xu
2312825982dSHerbert Xu return alg;
2322825982dSHerbert Xu }
2332825982dSHerbert Xu
crypto_alg_lookup(const char * name,u32 type,u32 mask)2343ca1e994SHerbert Xu static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
2353ca1e994SHerbert Xu u32 mask)
2362825982dSHerbert Xu {
237d6097b8dSNicolai Stange const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
2382825982dSHerbert Xu struct crypto_alg *alg;
239eb02c38fSHerbert Xu u32 test = 0;
240eb02c38fSHerbert Xu
241eb02c38fSHerbert Xu if (!((type | mask) & CRYPTO_ALG_TESTED))
242eb02c38fSHerbert Xu test |= CRYPTO_ALG_TESTED;
2432825982dSHerbert Xu
2442825982dSHerbert Xu down_read(&crypto_alg_sem);
245d6097b8dSNicolai Stange alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246d6097b8dSNicolai Stange (mask | test) & ~fips);
247d6097b8dSNicolai Stange if (alg) {
248d6097b8dSNicolai Stange if (((type | mask) ^ fips) & fips)
249d6097b8dSNicolai Stange mask |= fips;
250d6097b8dSNicolai Stange mask &= fips;
251d6097b8dSNicolai Stange
252d6097b8dSNicolai Stange if (!crypto_is_larval(alg) &&
253d6097b8dSNicolai Stange ((type ^ alg->cra_flags) & mask)) {
254d6097b8dSNicolai Stange /* Algorithm is disallowed in FIPS mode. */
255d6097b8dSNicolai Stange crypto_mod_put(alg);
256d6097b8dSNicolai Stange alg = ERR_PTR(-ENOENT);
257d6097b8dSNicolai Stange }
258d6097b8dSNicolai Stange } else if (test) {
259b346e492SEric Biggers alg = __crypto_alg_lookup(name, type, mask);
260b346e492SEric Biggers if (alg && !crypto_is_larval(alg)) {
261b346e492SEric Biggers /* Test failed */
262b346e492SEric Biggers crypto_mod_put(alg);
263b346e492SEric Biggers alg = ERR_PTR(-ELIBBAD);
264b346e492SEric Biggers }
265b346e492SEric Biggers }
2661da177e4SLinus Torvalds up_read(&crypto_alg_sem);
2672825982dSHerbert Xu
2681da177e4SLinus Torvalds return alg;
2691da177e4SLinus Torvalds }
2701da177e4SLinus Torvalds
crypto_larval_lookup(const char * name,u32 type,u32 mask)271cadc9ab5SEric Biggers static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
272cadc9ab5SEric Biggers u32 mask)
273176c3652SAdrian Bunk {
2742825982dSHerbert Xu struct crypto_alg *alg;
2752825982dSHerbert Xu
2766bfd4809SHerbert Xu if (!name)
2776bfd4809SHerbert Xu return ERR_PTR(-ENOENT);
2786bfd4809SHerbert Xu
279430b441cSHerbert Xu type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
2806bfd4809SHerbert Xu mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281492e2b63SHerbert Xu
282a760a665SHerbert Xu alg = crypto_alg_lookup(name, type, mask);
283e2861fa7SMatthew Garrett if (!alg && !(mask & CRYPTO_NOLOAD)) {
2845d26a105SKees Cook request_module("crypto-%s", name);
285a760a665SHerbert Xu
28637fc334cSHerbert Xu if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287aa07a699SAlex Riesen CRYPTO_ALG_NEED_FALLBACK))
2885d26a105SKees Cook request_module("crypto-%s-all", name);
289a760a665SHerbert Xu
290a760a665SHerbert Xu alg = crypto_alg_lookup(name, type, mask);
291a760a665SHerbert Xu }
292a760a665SHerbert Xu
293eb02c38fSHerbert Xu if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294eb02c38fSHerbert Xu alg = crypto_larval_wait(alg);
295eb02c38fSHerbert Xu else if (!alg)
296eb02c38fSHerbert Xu alg = crypto_larval_add(name, type, mask);
2972825982dSHerbert Xu
298eb02c38fSHerbert Xu return alg;
299b9c55aa4SHerbert Xu }
300b9c55aa4SHerbert Xu
crypto_probing_notify(unsigned long val,void * v)30173d3864aSHerbert Xu int crypto_probing_notify(unsigned long val, void *v)
30273d3864aSHerbert Xu {
30373d3864aSHerbert Xu int ok;
30473d3864aSHerbert Xu
30573d3864aSHerbert Xu ok = blocking_notifier_call_chain(&crypto_chain, val, v);
30673d3864aSHerbert Xu if (ok == NOTIFY_DONE) {
30773d3864aSHerbert Xu request_module("cryptomgr");
30873d3864aSHerbert Xu ok = blocking_notifier_call_chain(&crypto_chain, val, v);
30973d3864aSHerbert Xu }
31073d3864aSHerbert Xu
31173d3864aSHerbert Xu return ok;
31273d3864aSHerbert Xu }
31373d3864aSHerbert Xu EXPORT_SYMBOL_GPL(crypto_probing_notify);
31473d3864aSHerbert Xu
crypto_alg_mod_lookup(const char * name,u32 type,u32 mask)315b9c55aa4SHerbert Xu struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
316b9c55aa4SHerbert Xu {
317b9c55aa4SHerbert Xu struct crypto_alg *alg;
318b9c55aa4SHerbert Xu struct crypto_alg *larval;
319b9c55aa4SHerbert Xu int ok;
320b9c55aa4SHerbert Xu
32106ca7f68SStephan Mueller /*
32206ca7f68SStephan Mueller * If the internal flag is set for a cipher, require a caller to
323bc9d6dacSJason Wang * invoke the cipher with the internal flag to use that cipher.
32406ca7f68SStephan Mueller * Also, if a caller wants to allocate a cipher that may or may
32506ca7f68SStephan Mueller * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
32606ca7f68SStephan Mueller * !(mask & CRYPTO_ALG_INTERNAL).
32706ca7f68SStephan Mueller */
32806ca7f68SStephan Mueller if (!((type | mask) & CRYPTO_ALG_INTERNAL))
32906ca7f68SStephan Mueller mask |= CRYPTO_ALG_INTERNAL;
33006ca7f68SStephan Mueller
331b9c55aa4SHerbert Xu larval = crypto_larval_lookup(name, type, mask);
3326bfd4809SHerbert Xu if (IS_ERR(larval) || !crypto_is_larval(larval))
3332825982dSHerbert Xu return larval;
3342825982dSHerbert Xu
33573d3864aSHerbert Xu ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
3362b8c19dbSHerbert Xu
3372b8c19dbSHerbert Xu if (ok == NOTIFY_STOP)
3382825982dSHerbert Xu alg = crypto_larval_wait(larval);
3392825982dSHerbert Xu else {
3402825982dSHerbert Xu crypto_mod_put(larval);
3416bfd4809SHerbert Xu alg = ERR_PTR(-ENOENT);
3422825982dSHerbert Xu }
3432825982dSHerbert Xu crypto_larval_kill(larval);
3442825982dSHerbert Xu return alg;
345176c3652SAdrian Bunk }
346492e2b63SHerbert Xu EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
347176c3652SAdrian Bunk
crypto_exit_ops(struct crypto_tfm * tfm)34827d2a330SHerbert Xu static void crypto_exit_ops(struct crypto_tfm *tfm)
3491da177e4SLinus Torvalds {
35027d2a330SHerbert Xu const struct crypto_type *type = tfm->__crt_alg->cra_type;
351e853c3cfSHerbert Xu
35227d2a330SHerbert Xu if (type && tfm->exit)
35327d2a330SHerbert Xu tfm->exit(tfm);
354c441a909SEric Biggers }
3551da177e4SLinus Torvalds
crypto_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)3561da177e4SLinus Torvalds static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
3571da177e4SLinus Torvalds {
3581da177e4SLinus Torvalds const struct crypto_type *type_obj = alg->cra_type;
359e853c3cfSHerbert Xu unsigned int len;
360e853c3cfSHerbert Xu
3619c8ae17bSEric Biggers len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
3624a779486SHerbert Xu if (type_obj)
3631da177e4SLinus Torvalds return len + type_obj->ctxsize(alg, type, mask);
3641da177e4SLinus Torvalds
36527d2a330SHerbert Xu switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
366fbdae9f3SHerbert Xu default:
36727d2a330SHerbert Xu BUG();
368fbdae9f3SHerbert Xu
369fbdae9f3SHerbert Xu case CRYPTO_ALG_TYPE_CIPHER:
370e853c3cfSHerbert Xu len += crypto_cipher_ctxsize(alg);
37127d2a330SHerbert Xu break;
37227d2a330SHerbert Xu
373e853c3cfSHerbert Xu case CRYPTO_ALG_TYPE_COMPRESS:
374fbdae9f3SHerbert Xu len += crypto_compress_ctxsize(alg);
375fbdae9f3SHerbert Xu break;
376fbdae9f3SHerbert Xu }
377fbdae9f3SHerbert Xu
378fbdae9f3SHerbert Xu return len;
379f1ddcaf3SHerbert Xu }
380fbdae9f3SHerbert Xu
crypto_shoot_alg(struct crypto_alg * alg)381fbdae9f3SHerbert Xu void crypto_shoot_alg(struct crypto_alg *alg)
382fbdae9f3SHerbert Xu {
383f1ddcaf3SHerbert Xu down_write(&crypto_alg_sem);
384fbdae9f3SHerbert Xu alg->cra_flags |= CRYPTO_ALG_DYING;
385fbdae9f3SHerbert Xu up_write(&crypto_alg_sem);
386fbdae9f3SHerbert Xu }
387e853c3cfSHerbert Xu EXPORT_SYMBOL_GPL(crypto_shoot_alg);
388fbdae9f3SHerbert Xu
__crypto_alloc_tfmgfp(struct crypto_alg * alg,u32 type,u32 mask,gfp_t gfp)389fbdae9f3SHerbert Xu struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
3906603523bSHerbert Xu u32 mask, gfp_t gfp)
3916bfd4809SHerbert Xu {
3926bfd4809SHerbert Xu struct crypto_tfm *tfm = NULL;
3936bfd4809SHerbert Xu unsigned int tfm_size;
3946bfd4809SHerbert Xu int err = -ENOMEM;
3956bfd4809SHerbert Xu
3966603523bSHerbert Xu tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
3976bfd4809SHerbert Xu tfm = kzalloc(tfm_size, gfp);
39827d2a330SHerbert Xu if (tfm == NULL)
39927d2a330SHerbert Xu goto out_err;
4001da177e4SLinus Torvalds
4011da177e4SLinus Torvalds tfm->__crt_alg = alg;
402fbdae9f3SHerbert Xu refcount_set(&tfm->refcnt, 1);
4036bfd4809SHerbert Xu
4041da177e4SLinus Torvalds if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
40527d2a330SHerbert Xu goto cra_init_failed;
406bbeb563fSEric Sesterhenn
4071da177e4SLinus Torvalds goto out;
4089765d262SAkinobu Mita
4091da177e4SLinus Torvalds cra_init_failed:
4101da177e4SLinus Torvalds crypto_exit_ops(tfm);
411ae131f49SHerbert Xu if (err == -EAGAIN)
4121da177e4SLinus Torvalds crypto_shoot_alg(alg);
41327d2a330SHerbert Xu kfree(tfm);
4146bfd4809SHerbert Xu out_err:
4151da177e4SLinus Torvalds tfm = ERR_PTR(err);
416c7fc0599SHerbert Xu out:
4174a779486SHerbert Xu return tfm;
418c7fc0599SHerbert Xu }
4191da177e4SLinus Torvalds EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
4201da177e4SLinus Torvalds
__crypto_alloc_tfm(struct crypto_alg * alg,u32 type,u32 mask)4211da177e4SLinus Torvalds struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
422c7fc0599SHerbert Xu u32 mask)
423c7fc0599SHerbert Xu {
4241da177e4SLinus Torvalds return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
4254a779486SHerbert Xu }
4264a779486SHerbert Xu EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
4271da177e4SLinus Torvalds
4289765d262SAkinobu Mita /*
4296bfd4809SHerbert Xu * crypto_alloc_base - Locate algorithm and allocate transform
4301da177e4SLinus Torvalds * @alg_name: Name of algorithm
4311da177e4SLinus Torvalds * @type: Type of algorithm
4321da177e4SLinus Torvalds * @mask: Mask for type comparison
4336bfd4809SHerbert Xu *
4346bfd4809SHerbert Xu * This function should not be used by new algorithm types.
4356d7d684dSHerbert Xu * Please use crypto_alloc_tfm instead.
4366d7d684dSHerbert Xu *
4376d7d684dSHerbert Xu * crypto_alloc_base() will first attempt to locate an already loaded
4386d7d684dSHerbert Xu * algorithm. If that fails and the kernel supports dynamically loadable
4396d7d684dSHerbert Xu * modules, it will then attempt to load a module of the same name or
4406d7d684dSHerbert Xu * alias. If that fails it will send a query to any loaded crypto manager
4417b0bac64SHerbert Xu * to construct an algorithm on the fly. A refcount is grabbed on the
442fd1a1900SCristian Stoica * algorithm which is then associated with the new transform.
4437b0bac64SHerbert Xu *
4446d7d684dSHerbert Xu * The returned transform is of a non-determinate type. Most people
4456d7d684dSHerbert Xu * should use one of the more specific allocation functions such as
4466d7d684dSHerbert Xu * crypto_alloc_skcipher().
4476d7d684dSHerbert Xu *
4486d7d684dSHerbert Xu * In case of error the return value is an error pointer.
4496d7d684dSHerbert Xu */
crypto_alloc_base(const char * alg_name,u32 type,u32 mask)4506d7d684dSHerbert Xu struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
4516d7d684dSHerbert Xu {
4526d7d684dSHerbert Xu struct crypto_tfm *tfm;
453c65058b7SEric Biggers int err;
4546d7d684dSHerbert Xu
4556d7d684dSHerbert Xu for (;;) {
4566d7d684dSHerbert Xu struct crypto_alg *alg;
4576d7d684dSHerbert Xu
4586d7d684dSHerbert Xu alg = crypto_alg_mod_lookup(alg_name, type, mask);
4596d7d684dSHerbert Xu if (IS_ERR(alg)) {
4606d7d684dSHerbert Xu err = PTR_ERR(alg);
4616d7d684dSHerbert Xu goto err;
4626d7d684dSHerbert Xu }
4636d7d684dSHerbert Xu
4646d7d684dSHerbert Xu tfm = __crypto_alloc_tfm(alg, type, mask);
4656d7d684dSHerbert Xu if (!IS_ERR(tfm))
4669765d262SAkinobu Mita return tfm;
4676d7d684dSHerbert Xu
4686d7d684dSHerbert Xu crypto_mod_put(alg);
4699765d262SAkinobu Mita err = PTR_ERR(tfm);
4706d7d684dSHerbert Xu
47127d2a330SHerbert Xu err:
4726d7d684dSHerbert Xu if (err != -EAGAIN)
4739765d262SAkinobu Mita break;
4746d7d684dSHerbert Xu if (fatal_signal_pending(current)) {
4756d7d684dSHerbert Xu err = -EINTR;
4766d7d684dSHerbert Xu break;
4776d7d684dSHerbert Xu }
4786d7d684dSHerbert Xu }
4796d7d684dSHerbert Xu
4806d7d684dSHerbert Xu return ERR_PTR(err);
4813fc89adbSHerbert Xu }
4826d7d684dSHerbert Xu EXPORT_SYMBOL_GPL(crypto_alloc_base);
4836d7d684dSHerbert Xu
crypto_alloc_tfmmem(struct crypto_alg * alg,const struct crypto_type * frontend,int node,gfp_t gfp)4846d7d684dSHerbert Xu static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
4859765d262SAkinobu Mita const struct crypto_type *frontend, int node,
4866d7d684dSHerbert Xu gfp_t gfp)
4879765d262SAkinobu Mita {
4886d7d684dSHerbert Xu struct crypto_tfm *tfm;
4896d7d684dSHerbert Xu unsigned int tfmsize;
4906d7d684dSHerbert Xu unsigned int total;
491*3c3a24cbSHerbert Xu char *mem;
492*3c3a24cbSHerbert Xu
493*3c3a24cbSHerbert Xu tfmsize = frontend->tfmsize;
4947b0bac64SHerbert Xu total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
495*3c3a24cbSHerbert Xu
4967b0bac64SHerbert Xu mem = kzalloc_node(total, gfp, node);
4977b0bac64SHerbert Xu if (mem == NULL)
498*3c3a24cbSHerbert Xu return ERR_PTR(-ENOMEM);
4997b0bac64SHerbert Xu
5007b0bac64SHerbert Xu tfm = (struct crypto_tfm *)(mem + tfmsize);
5012ca33da1SHerbert Xu tfm->__crt_alg = alg;
5027b0bac64SHerbert Xu tfm->node = node;
503*3c3a24cbSHerbert Xu refcount_set(&tfm->refcnt, 1);
5047b0bac64SHerbert Xu
505*3c3a24cbSHerbert Xu return mem;
5067b0bac64SHerbert Xu }
5077b0bac64SHerbert Xu
crypto_create_tfm_node(struct crypto_alg * alg,const struct crypto_type * frontend,int node)5087b0bac64SHerbert Xu void *crypto_create_tfm_node(struct crypto_alg *alg,
5097bc13b5bSBarry Song const struct crypto_type *frontend,
510ae131f49SHerbert Xu int node)
5117b0bac64SHerbert Xu {
512*3c3a24cbSHerbert Xu struct crypto_tfm *tfm;
513*3c3a24cbSHerbert Xu char *mem;
514*3c3a24cbSHerbert Xu int err;
515*3c3a24cbSHerbert Xu
516*3c3a24cbSHerbert Xu mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
517*3c3a24cbSHerbert Xu if (IS_ERR(mem))
518*3c3a24cbSHerbert Xu goto out;
519*3c3a24cbSHerbert Xu
520*3c3a24cbSHerbert Xu tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
521*3c3a24cbSHerbert Xu
522*3c3a24cbSHerbert Xu err = frontend->init_tfm(tfm);
523*3c3a24cbSHerbert Xu if (err)
524*3c3a24cbSHerbert Xu goto out_free_tfm;
525*3c3a24cbSHerbert Xu
526*3c3a24cbSHerbert Xu if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
527*3c3a24cbSHerbert Xu goto cra_init_failed;
528*3c3a24cbSHerbert Xu
5292ca33da1SHerbert Xu goto out;
5307b0bac64SHerbert Xu
5317b0bac64SHerbert Xu cra_init_failed:
5327b0bac64SHerbert Xu crypto_exit_ops(tfm);
5337b0bac64SHerbert Xu out_free_tfm:
5347b0bac64SHerbert Xu if (err == -EAGAIN)
5357b0bac64SHerbert Xu crypto_shoot_alg(alg);
5367b0bac64SHerbert Xu kfree(mem);
5377b0bac64SHerbert Xu mem = ERR_PTR(err);
5387b0bac64SHerbert Xu out:
5397b0bac64SHerbert Xu return mem;
5407b0bac64SHerbert Xu }
5417b0bac64SHerbert Xu EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
5427b0bac64SHerbert Xu
crypto_clone_tfm(const struct crypto_type * frontend,struct crypto_tfm * otfm)5437b0bac64SHerbert Xu void *crypto_clone_tfm(const struct crypto_type *frontend,
5443f683d61SHerbert Xu struct crypto_tfm *otfm)
5457b0bac64SHerbert Xu {
5463f683d61SHerbert Xu struct crypto_alg *alg = otfm->__crt_alg;
5477b0bac64SHerbert Xu struct crypto_tfm *tfm;
5487bc13b5bSBarry Song char *mem;
5497b0bac64SHerbert Xu
550*3c3a24cbSHerbert Xu mem = ERR_PTR(-ESTALE);
551*3c3a24cbSHerbert Xu if (unlikely(!crypto_mod_get(alg)))
552*3c3a24cbSHerbert Xu goto out;
553*3c3a24cbSHerbert Xu
554*3c3a24cbSHerbert Xu mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
555*3c3a24cbSHerbert Xu if (IS_ERR(mem)) {
556*3c3a24cbSHerbert Xu crypto_mod_put(alg);
557*3c3a24cbSHerbert Xu goto out;
558*3c3a24cbSHerbert Xu }
559*3c3a24cbSHerbert Xu
560*3c3a24cbSHerbert Xu tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
561*3c3a24cbSHerbert Xu tfm->crt_flags = otfm->crt_flags;
562*3c3a24cbSHerbert Xu tfm->exit = otfm->exit;
563*3c3a24cbSHerbert Xu
564*3c3a24cbSHerbert Xu out:
565*3c3a24cbSHerbert Xu return mem;
566*3c3a24cbSHerbert Xu }
567*3c3a24cbSHerbert Xu EXPORT_SYMBOL_GPL(crypto_clone_tfm);
568*3c3a24cbSHerbert Xu
crypto_find_alg(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)569*3c3a24cbSHerbert Xu struct crypto_alg *crypto_find_alg(const char *alg_name,
570*3c3a24cbSHerbert Xu const struct crypto_type *frontend,
571*3c3a24cbSHerbert Xu u32 type, u32 mask)
572*3c3a24cbSHerbert Xu {
573*3c3a24cbSHerbert Xu if (frontend) {
574*3c3a24cbSHerbert Xu type &= frontend->maskclear;
575*3c3a24cbSHerbert Xu mask &= frontend->maskclear;
576d06854f0SHerbert Xu type |= frontend->type;
577d06854f0SHerbert Xu mask |= frontend->maskset;
578d06854f0SHerbert Xu }
579d06854f0SHerbert Xu
580d06854f0SHerbert Xu return crypto_alg_mod_lookup(alg_name, type, mask);
581d06854f0SHerbert Xu }
582d06854f0SHerbert Xu EXPORT_SYMBOL_GPL(crypto_find_alg);
583d06854f0SHerbert Xu
584d06854f0SHerbert Xu /*
585d06854f0SHerbert Xu * crypto_alloc_tfm_node - Locate algorithm and allocate transform
586d06854f0SHerbert Xu * @alg_name: Name of algorithm
5874989d4f0SHerbert Xu * @frontend: Frontend algorithm type
588d06854f0SHerbert Xu * @type: Type of algorithm
589d06854f0SHerbert Xu * @mask: Mask for type comparison
590d06854f0SHerbert Xu * @node: NUMA node in which users desire to put requests, if node is
5917b0bac64SHerbert Xu * NUMA_NO_NODE, it means users have no special requirement.
5927bc13b5bSBarry Song *
5937b0bac64SHerbert Xu * crypto_alloc_tfm() will first attempt to locate an already loaded
5947b0bac64SHerbert Xu * algorithm. If that fails and the kernel supports dynamically loadable
5957b0bac64SHerbert Xu * modules, it will then attempt to load a module of the same name or
5967b0bac64SHerbert Xu * alias. If that fails it will send a query to any loaded crypto manager
5977bc13b5bSBarry Song * to construct an algorithm on the fly. A refcount is grabbed on the
5987bc13b5bSBarry Song * algorithm which is then associated with the new transform.
5997b0bac64SHerbert Xu *
6007b0bac64SHerbert Xu * The returned transform is of a non-determinate type. Most people
6017b0bac64SHerbert Xu * should use one of the more specific allocation functions such as
6027b0bac64SHerbert Xu * crypto_alloc_skcipher().
6037b0bac64SHerbert Xu *
6047b0bac64SHerbert Xu * In case of error the return value is an error pointer.
6057b0bac64SHerbert Xu */
6067b0bac64SHerbert Xu
crypto_alloc_tfm_node(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask,int node)6077b0bac64SHerbert Xu void *crypto_alloc_tfm_node(const char *alg_name,
6087b0bac64SHerbert Xu const struct crypto_type *frontend, u32 type, u32 mask,
6090a940d4eSEric Biggers int node)
6107b0bac64SHerbert Xu {
6117b0bac64SHerbert Xu void *tfm;
6127b0bac64SHerbert Xu int err;
6137bc13b5bSBarry Song
6147bc13b5bSBarry Song for (;;) {
6157bc13b5bSBarry Song struct crypto_alg *alg;
6167bc13b5bSBarry Song
6177b0bac64SHerbert Xu alg = crypto_find_alg(alg_name, frontend, type, mask);
6183f683d61SHerbert Xu if (IS_ERR(alg)) {
6197b0bac64SHerbert Xu err = PTR_ERR(alg);
6207b0bac64SHerbert Xu goto err;
6217b0bac64SHerbert Xu }
6227b0bac64SHerbert Xu
6237b0bac64SHerbert Xu tfm = crypto_create_tfm_node(alg, frontend, node);
624d06854f0SHerbert Xu if (!IS_ERR(tfm))
6257b0bac64SHerbert Xu return tfm;
6267b0bac64SHerbert Xu
6277b0bac64SHerbert Xu crypto_mod_put(alg);
6287b0bac64SHerbert Xu err = PTR_ERR(tfm);
6297b0bac64SHerbert Xu
6307bc13b5bSBarry Song err:
6317b0bac64SHerbert Xu if (err != -EAGAIN)
6327b0bac64SHerbert Xu break;
6337b0bac64SHerbert Xu if (fatal_signal_pending(current)) {
6347b0bac64SHerbert Xu err = -EINTR;
6357b0bac64SHerbert Xu break;
6367b0bac64SHerbert Xu }
6377b0bac64SHerbert Xu }
6387b0bac64SHerbert Xu
6397b0bac64SHerbert Xu return ERR_PTR(err);
6403fc89adbSHerbert Xu }
6417b0bac64SHerbert Xu EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
6427b0bac64SHerbert Xu
6437b0bac64SHerbert Xu /*
6447b0bac64SHerbert Xu * crypto_destroy_tfm - Free crypto transform
6457b0bac64SHerbert Xu * @mem: Start of tfm slab
6467b0bac64SHerbert Xu * @tfm: Transform to free
6477b0bac64SHerbert Xu *
6487bc13b5bSBarry Song * This function frees up the transform and any associated resources,
6497b0bac64SHerbert Xu * then drops the refcount on the associated algorithm.
6506d7d684dSHerbert Xu */
crypto_destroy_tfm(void * mem,struct crypto_tfm * tfm)6517b2cd92aSHerbert Xu void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
6527b2cd92aSHerbert Xu {
6536d7d684dSHerbert Xu struct crypto_alg *alg;
6546d7d684dSHerbert Xu
6557b2cd92aSHerbert Xu if (IS_ERR_OR_NULL(mem))
6566d7d684dSHerbert Xu return;
6576d7d684dSHerbert Xu
6587b2cd92aSHerbert Xu if (!refcount_dec_and_test(&tfm->refcnt))
6591da177e4SLinus Torvalds return;
660a61cc448SJesper Juhl alg = tfm->__crt_alg;
661a61cc448SJesper Juhl
66283681f2bSArd Biesheuvel if (!tfm->exit && alg->cra_exit)
663a61cc448SJesper Juhl alg->cra_exit(tfm);
664a61cc448SJesper Juhl crypto_exit_ops(tfm);
665ae131f49SHerbert Xu crypto_mod_put(alg);
666ae131f49SHerbert Xu kfree_sensitive(mem);
667a61cc448SJesper Juhl }
6681da177e4SLinus Torvalds EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
6694a779486SHerbert Xu
crypto_has_alg(const char * name,u32 type,u32 mask)670c7fc0599SHerbert Xu int crypto_has_alg(const char *name, u32 type, u32 mask)
6711da177e4SLinus Torvalds {
67272fa4919SHerbert Xu int ret = 0;
673453431a5SWaiman Long struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
6741da177e4SLinus Torvalds
6757b2cd92aSHerbert Xu if (!IS_ERR(alg)) {
676fce32d70SHerbert Xu crypto_mod_put(alg);
677fce32d70SHerbert Xu ret = 1;
678fce32d70SHerbert Xu }
679fce32d70SHerbert Xu
680fce32d70SHerbert Xu return ret;
681fce32d70SHerbert Xu }
682fce32d70SHerbert Xu EXPORT_SYMBOL_GPL(crypto_has_alg);
683fce32d70SHerbert Xu
crypto_req_done(void * data,int err)684fce32d70SHerbert Xu void crypto_req_done(void *data, int err)
685fce32d70SHerbert Xu {
686fce32d70SHerbert Xu struct crypto_wait *wait = data;
687fce32d70SHerbert Xu
688fce32d70SHerbert Xu if (err == -EINPROGRESS)
689fce32d70SHerbert Xu return;
690c3715cb9SSebastian Siewior
691255e48ebSHerbert Xu wait->err = err;
692ada69a16SGilad Ben-Yossef complete(&wait->completion);
693255e48ebSHerbert Xu }
694ada69a16SGilad Ben-Yossef EXPORT_SYMBOL_GPL(crypto_req_done);
695ada69a16SGilad Ben-Yossef
696ada69a16SGilad Ben-Yossef MODULE_DESCRIPTION("Cryptographic core API");
697ada69a16SGilad Ben-Yossef MODULE_LICENSE("GPL");
698ada69a16SGilad Ben-Yossef