xref: /openbmc/linux/crypto/api.c (revision 6c8c1406a6d6a3f2e61ac590f5c0994231bc6be7)
1  // SPDX-License-Identifier: GPL-2.0-or-later
2  /*
3   * Scatterlist Cryptographic API.
4   *
5   * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6   * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7   * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8   *
9   * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10   * and Nettle, by Niels Möller.
11   */
12  
13  #include <linux/err.h>
14  #include <linux/errno.h>
15  #include <linux/jump_label.h>
16  #include <linux/kernel.h>
17  #include <linux/kmod.h>
18  #include <linux/module.h>
19  #include <linux/param.h>
20  #include <linux/sched/signal.h>
21  #include <linux/slab.h>
22  #include <linux/string.h>
23  #include <linux/completion.h>
24  #include "internal.h"
25  
26  LIST_HEAD(crypto_alg_list);
27  EXPORT_SYMBOL_GPL(crypto_alg_list);
28  DECLARE_RWSEM(crypto_alg_sem);
29  EXPORT_SYMBOL_GPL(crypto_alg_sem);
30  
31  BLOCKING_NOTIFIER_HEAD(crypto_chain);
32  EXPORT_SYMBOL_GPL(crypto_chain);
33  
34  DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
35  EXPORT_SYMBOL_GPL(crypto_boot_test_finished);
36  
37  static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
38  
39  struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
40  {
41  	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
42  }
43  EXPORT_SYMBOL_GPL(crypto_mod_get);
44  
45  void crypto_mod_put(struct crypto_alg *alg)
46  {
47  	struct module *module = alg->cra_module;
48  
49  	crypto_alg_put(alg);
50  	module_put(module);
51  }
52  EXPORT_SYMBOL_GPL(crypto_mod_put);
53  
54  static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
55  					      u32 mask)
56  {
57  	struct crypto_alg *q, *alg = NULL;
58  	int best = -2;
59  
60  	list_for_each_entry(q, &crypto_alg_list, cra_list) {
61  		int exact, fuzzy;
62  
63  		if (crypto_is_moribund(q))
64  			continue;
65  
66  		if ((q->cra_flags ^ type) & mask)
67  			continue;
68  
69  		if (crypto_is_larval(q) &&
70  		    !crypto_is_test_larval((struct crypto_larval *)q) &&
71  		    ((struct crypto_larval *)q)->mask != mask)
72  			continue;
73  
74  		exact = !strcmp(q->cra_driver_name, name);
75  		fuzzy = !strcmp(q->cra_name, name);
76  		if (!exact && !(fuzzy && q->cra_priority > best))
77  			continue;
78  
79  		if (unlikely(!crypto_mod_get(q)))
80  			continue;
81  
82  		best = q->cra_priority;
83  		if (alg)
84  			crypto_mod_put(alg);
85  		alg = q;
86  
87  		if (exact)
88  			break;
89  	}
90  
91  	return alg;
92  }
93  
94  static void crypto_larval_destroy(struct crypto_alg *alg)
95  {
96  	struct crypto_larval *larval = (void *)alg;
97  
98  	BUG_ON(!crypto_is_larval(alg));
99  	if (!IS_ERR_OR_NULL(larval->adult))
100  		crypto_mod_put(larval->adult);
101  	kfree(larval);
102  }
103  
104  struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
105  {
106  	struct crypto_larval *larval;
107  
108  	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109  	if (!larval)
110  		return ERR_PTR(-ENOMEM);
111  
112  	larval->mask = mask;
113  	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
114  	larval->alg.cra_priority = -1;
115  	larval->alg.cra_destroy = crypto_larval_destroy;
116  
117  	strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
118  	init_completion(&larval->completion);
119  
120  	return larval;
121  }
122  EXPORT_SYMBOL_GPL(crypto_larval_alloc);
123  
124  static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
125  					    u32 mask)
126  {
127  	struct crypto_alg *alg;
128  	struct crypto_larval *larval;
129  
130  	larval = crypto_larval_alloc(name, type, mask);
131  	if (IS_ERR(larval))
132  		return ERR_CAST(larval);
133  
134  	refcount_set(&larval->alg.cra_refcnt, 2);
135  
136  	down_write(&crypto_alg_sem);
137  	alg = __crypto_alg_lookup(name, type, mask);
138  	if (!alg) {
139  		alg = &larval->alg;
140  		list_add(&alg->cra_list, &crypto_alg_list);
141  	}
142  	up_write(&crypto_alg_sem);
143  
144  	if (alg != &larval->alg) {
145  		kfree(larval);
146  		if (crypto_is_larval(alg))
147  			alg = crypto_larval_wait(alg);
148  	}
149  
150  	return alg;
151  }
152  
153  void crypto_larval_kill(struct crypto_alg *alg)
154  {
155  	struct crypto_larval *larval = (void *)alg;
156  
157  	down_write(&crypto_alg_sem);
158  	list_del(&alg->cra_list);
159  	up_write(&crypto_alg_sem);
160  	complete_all(&larval->completion);
161  	crypto_alg_put(alg);
162  }
163  EXPORT_SYMBOL_GPL(crypto_larval_kill);
164  
165  void crypto_wait_for_test(struct crypto_larval *larval)
166  {
167  	int err;
168  
169  	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
170  	if (WARN_ON_ONCE(err != NOTIFY_STOP))
171  		goto out;
172  
173  	err = wait_for_completion_killable(&larval->completion);
174  	WARN_ON(err);
175  	if (!err)
176  		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
177  
178  out:
179  	crypto_larval_kill(&larval->alg);
180  }
181  EXPORT_SYMBOL_GPL(crypto_wait_for_test);
182  
183  static void crypto_start_test(struct crypto_larval *larval)
184  {
185  	if (!crypto_is_test_larval(larval))
186  		return;
187  
188  	if (larval->test_started)
189  		return;
190  
191  	down_write(&crypto_alg_sem);
192  	if (larval->test_started) {
193  		up_write(&crypto_alg_sem);
194  		return;
195  	}
196  
197  	larval->test_started = true;
198  	up_write(&crypto_alg_sem);
199  
200  	crypto_wait_for_test(larval);
201  }
202  
203  static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
204  {
205  	struct crypto_larval *larval = (void *)alg;
206  	long timeout;
207  
208  	if (!static_branch_likely(&crypto_boot_test_finished))
209  		crypto_start_test(larval);
210  
211  	timeout = wait_for_completion_killable_timeout(
212  		&larval->completion, 60 * HZ);
213  
214  	alg = larval->adult;
215  	if (timeout < 0)
216  		alg = ERR_PTR(-EINTR);
217  	else if (!timeout)
218  		alg = ERR_PTR(-ETIMEDOUT);
219  	else if (!alg)
220  		alg = ERR_PTR(-ENOENT);
221  	else if (IS_ERR(alg))
222  		;
223  	else if (crypto_is_test_larval(larval) &&
224  		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
225  		alg = ERR_PTR(-EAGAIN);
226  	else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
227  		alg = ERR_PTR(-EAGAIN);
228  	else if (!crypto_mod_get(alg))
229  		alg = ERR_PTR(-EAGAIN);
230  	crypto_mod_put(&larval->alg);
231  
232  	return alg;
233  }
234  
235  static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
236  					    u32 mask)
237  {
238  	const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
239  	struct crypto_alg *alg;
240  	u32 test = 0;
241  
242  	if (!((type | mask) & CRYPTO_ALG_TESTED))
243  		test |= CRYPTO_ALG_TESTED;
244  
245  	down_read(&crypto_alg_sem);
246  	alg = __crypto_alg_lookup(name, (type | test) & ~fips,
247  				  (mask | test) & ~fips);
248  	if (alg) {
249  		if (((type | mask) ^ fips) & fips)
250  			mask |= fips;
251  		mask &= fips;
252  
253  		if (!crypto_is_larval(alg) &&
254  		    ((type ^ alg->cra_flags) & mask)) {
255  			/* Algorithm is disallowed in FIPS mode. */
256  			crypto_mod_put(alg);
257  			alg = ERR_PTR(-ENOENT);
258  		}
259  	} else if (test) {
260  		alg = __crypto_alg_lookup(name, type, mask);
261  		if (alg && !crypto_is_larval(alg)) {
262  			/* Test failed */
263  			crypto_mod_put(alg);
264  			alg = ERR_PTR(-ELIBBAD);
265  		}
266  	}
267  	up_read(&crypto_alg_sem);
268  
269  	return alg;
270  }
271  
272  static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
273  					       u32 mask)
274  {
275  	struct crypto_alg *alg;
276  
277  	if (!name)
278  		return ERR_PTR(-ENOENT);
279  
280  	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281  	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
282  
283  	alg = crypto_alg_lookup(name, type, mask);
284  	if (!alg && !(mask & CRYPTO_NOLOAD)) {
285  		request_module("crypto-%s", name);
286  
287  		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
288  		      CRYPTO_ALG_NEED_FALLBACK))
289  			request_module("crypto-%s-all", name);
290  
291  		alg = crypto_alg_lookup(name, type, mask);
292  	}
293  
294  	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
295  		alg = crypto_larval_wait(alg);
296  	else if (!alg)
297  		alg = crypto_larval_add(name, type, mask);
298  
299  	return alg;
300  }
301  
302  int crypto_probing_notify(unsigned long val, void *v)
303  {
304  	int ok;
305  
306  	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
307  	if (ok == NOTIFY_DONE) {
308  		request_module("cryptomgr");
309  		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
310  	}
311  
312  	return ok;
313  }
314  EXPORT_SYMBOL_GPL(crypto_probing_notify);
315  
316  struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
317  {
318  	struct crypto_alg *alg;
319  	struct crypto_alg *larval;
320  	int ok;
321  
322  	/*
323  	 * If the internal flag is set for a cipher, require a caller to
324  	 * invoke the cipher with the internal flag to use that cipher.
325  	 * Also, if a caller wants to allocate a cipher that may or may
326  	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
327  	 * !(mask & CRYPTO_ALG_INTERNAL).
328  	 */
329  	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
330  		mask |= CRYPTO_ALG_INTERNAL;
331  
332  	larval = crypto_larval_lookup(name, type, mask);
333  	if (IS_ERR(larval) || !crypto_is_larval(larval))
334  		return larval;
335  
336  	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
337  
338  	if (ok == NOTIFY_STOP)
339  		alg = crypto_larval_wait(larval);
340  	else {
341  		crypto_mod_put(larval);
342  		alg = ERR_PTR(-ENOENT);
343  	}
344  	crypto_larval_kill(larval);
345  	return alg;
346  }
347  EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
348  
349  static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
350  {
351  	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
352  
353  	if (type_obj)
354  		return type_obj->init(tfm, type, mask);
355  	return 0;
356  }
357  
358  static void crypto_exit_ops(struct crypto_tfm *tfm)
359  {
360  	const struct crypto_type *type = tfm->__crt_alg->cra_type;
361  
362  	if (type && tfm->exit)
363  		tfm->exit(tfm);
364  }
365  
366  static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
367  {
368  	const struct crypto_type *type_obj = alg->cra_type;
369  	unsigned int len;
370  
371  	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
372  	if (type_obj)
373  		return len + type_obj->ctxsize(alg, type, mask);
374  
375  	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
376  	default:
377  		BUG();
378  
379  	case CRYPTO_ALG_TYPE_CIPHER:
380  		len += crypto_cipher_ctxsize(alg);
381  		break;
382  
383  	case CRYPTO_ALG_TYPE_COMPRESS:
384  		len += crypto_compress_ctxsize(alg);
385  		break;
386  	}
387  
388  	return len;
389  }
390  
391  void crypto_shoot_alg(struct crypto_alg *alg)
392  {
393  	down_write(&crypto_alg_sem);
394  	alg->cra_flags |= CRYPTO_ALG_DYING;
395  	up_write(&crypto_alg_sem);
396  }
397  EXPORT_SYMBOL_GPL(crypto_shoot_alg);
398  
399  struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
400  				      u32 mask)
401  {
402  	struct crypto_tfm *tfm = NULL;
403  	unsigned int tfm_size;
404  	int err = -ENOMEM;
405  
406  	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
407  	tfm = kzalloc(tfm_size, GFP_KERNEL);
408  	if (tfm == NULL)
409  		goto out_err;
410  
411  	tfm->__crt_alg = alg;
412  
413  	err = crypto_init_ops(tfm, type, mask);
414  	if (err)
415  		goto out_free_tfm;
416  
417  	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
418  		goto cra_init_failed;
419  
420  	goto out;
421  
422  cra_init_failed:
423  	crypto_exit_ops(tfm);
424  out_free_tfm:
425  	if (err == -EAGAIN)
426  		crypto_shoot_alg(alg);
427  	kfree(tfm);
428  out_err:
429  	tfm = ERR_PTR(err);
430  out:
431  	return tfm;
432  }
433  EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
434  
435  /*
436   *	crypto_alloc_base - Locate algorithm and allocate transform
437   *	@alg_name: Name of algorithm
438   *	@type: Type of algorithm
439   *	@mask: Mask for type comparison
440   *
441   *	This function should not be used by new algorithm types.
442   *	Please use crypto_alloc_tfm instead.
443   *
444   *	crypto_alloc_base() will first attempt to locate an already loaded
445   *	algorithm.  If that fails and the kernel supports dynamically loadable
446   *	modules, it will then attempt to load a module of the same name or
447   *	alias.  If that fails it will send a query to any loaded crypto manager
448   *	to construct an algorithm on the fly.  A refcount is grabbed on the
449   *	algorithm which is then associated with the new transform.
450   *
451   *	The returned transform is of a non-determinate type.  Most people
452   *	should use one of the more specific allocation functions such as
453   *	crypto_alloc_skcipher().
454   *
455   *	In case of error the return value is an error pointer.
456   */
457  struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
458  {
459  	struct crypto_tfm *tfm;
460  	int err;
461  
462  	for (;;) {
463  		struct crypto_alg *alg;
464  
465  		alg = crypto_alg_mod_lookup(alg_name, type, mask);
466  		if (IS_ERR(alg)) {
467  			err = PTR_ERR(alg);
468  			goto err;
469  		}
470  
471  		tfm = __crypto_alloc_tfm(alg, type, mask);
472  		if (!IS_ERR(tfm))
473  			return tfm;
474  
475  		crypto_mod_put(alg);
476  		err = PTR_ERR(tfm);
477  
478  err:
479  		if (err != -EAGAIN)
480  			break;
481  		if (fatal_signal_pending(current)) {
482  			err = -EINTR;
483  			break;
484  		}
485  	}
486  
487  	return ERR_PTR(err);
488  }
489  EXPORT_SYMBOL_GPL(crypto_alloc_base);
490  
491  void *crypto_create_tfm_node(struct crypto_alg *alg,
492  			const struct crypto_type *frontend,
493  			int node)
494  {
495  	char *mem;
496  	struct crypto_tfm *tfm = NULL;
497  	unsigned int tfmsize;
498  	unsigned int total;
499  	int err = -ENOMEM;
500  
501  	tfmsize = frontend->tfmsize;
502  	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
503  
504  	mem = kzalloc_node(total, GFP_KERNEL, node);
505  	if (mem == NULL)
506  		goto out_err;
507  
508  	tfm = (struct crypto_tfm *)(mem + tfmsize);
509  	tfm->__crt_alg = alg;
510  	tfm->node = node;
511  
512  	err = frontend->init_tfm(tfm);
513  	if (err)
514  		goto out_free_tfm;
515  
516  	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
517  		goto cra_init_failed;
518  
519  	goto out;
520  
521  cra_init_failed:
522  	crypto_exit_ops(tfm);
523  out_free_tfm:
524  	if (err == -EAGAIN)
525  		crypto_shoot_alg(alg);
526  	kfree(mem);
527  out_err:
528  	mem = ERR_PTR(err);
529  out:
530  	return mem;
531  }
532  EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
533  
534  struct crypto_alg *crypto_find_alg(const char *alg_name,
535  				   const struct crypto_type *frontend,
536  				   u32 type, u32 mask)
537  {
538  	if (frontend) {
539  		type &= frontend->maskclear;
540  		mask &= frontend->maskclear;
541  		type |= frontend->type;
542  		mask |= frontend->maskset;
543  	}
544  
545  	return crypto_alg_mod_lookup(alg_name, type, mask);
546  }
547  EXPORT_SYMBOL_GPL(crypto_find_alg);
548  
549  /*
550   *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
551   *	@alg_name: Name of algorithm
552   *	@frontend: Frontend algorithm type
553   *	@type: Type of algorithm
554   *	@mask: Mask for type comparison
555   *	@node: NUMA node in which users desire to put requests, if node is
556   *		NUMA_NO_NODE, it means users have no special requirement.
557   *
558   *	crypto_alloc_tfm() will first attempt to locate an already loaded
559   *	algorithm.  If that fails and the kernel supports dynamically loadable
560   *	modules, it will then attempt to load a module of the same name or
561   *	alias.  If that fails it will send a query to any loaded crypto manager
562   *	to construct an algorithm on the fly.  A refcount is grabbed on the
563   *	algorithm which is then associated with the new transform.
564   *
565   *	The returned transform is of a non-determinate type.  Most people
566   *	should use one of the more specific allocation functions such as
567   *	crypto_alloc_skcipher().
568   *
569   *	In case of error the return value is an error pointer.
570   */
571  
572  void *crypto_alloc_tfm_node(const char *alg_name,
573  		       const struct crypto_type *frontend, u32 type, u32 mask,
574  		       int node)
575  {
576  	void *tfm;
577  	int err;
578  
579  	for (;;) {
580  		struct crypto_alg *alg;
581  
582  		alg = crypto_find_alg(alg_name, frontend, type, mask);
583  		if (IS_ERR(alg)) {
584  			err = PTR_ERR(alg);
585  			goto err;
586  		}
587  
588  		tfm = crypto_create_tfm_node(alg, frontend, node);
589  		if (!IS_ERR(tfm))
590  			return tfm;
591  
592  		crypto_mod_put(alg);
593  		err = PTR_ERR(tfm);
594  
595  err:
596  		if (err != -EAGAIN)
597  			break;
598  		if (fatal_signal_pending(current)) {
599  			err = -EINTR;
600  			break;
601  		}
602  	}
603  
604  	return ERR_PTR(err);
605  }
606  EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
607  
608  /*
609   *	crypto_destroy_tfm - Free crypto transform
610   *	@mem: Start of tfm slab
611   *	@tfm: Transform to free
612   *
613   *	This function frees up the transform and any associated resources,
614   *	then drops the refcount on the associated algorithm.
615   */
616  void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
617  {
618  	struct crypto_alg *alg;
619  
620  	if (IS_ERR_OR_NULL(mem))
621  		return;
622  
623  	alg = tfm->__crt_alg;
624  
625  	if (!tfm->exit && alg->cra_exit)
626  		alg->cra_exit(tfm);
627  	crypto_exit_ops(tfm);
628  	crypto_mod_put(alg);
629  	kfree_sensitive(mem);
630  }
631  EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
632  
633  int crypto_has_alg(const char *name, u32 type, u32 mask)
634  {
635  	int ret = 0;
636  	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
637  
638  	if (!IS_ERR(alg)) {
639  		crypto_mod_put(alg);
640  		ret = 1;
641  	}
642  
643  	return ret;
644  }
645  EXPORT_SYMBOL_GPL(crypto_has_alg);
646  
647  void crypto_req_done(struct crypto_async_request *req, int err)
648  {
649  	struct crypto_wait *wait = req->data;
650  
651  	if (err == -EINPROGRESS)
652  		return;
653  
654  	wait->err = err;
655  	complete(&wait->completion);
656  }
657  EXPORT_SYMBOL_GPL(crypto_req_done);
658  
659  MODULE_DESCRIPTION("Cryptographic core API");
660  MODULE_LICENSE("GPL");
661