xref: /openbmc/linux/crypto/api.c (revision 405db98b)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Scatterlist Cryptographic API.
4  *
5  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10  * and Nettle, by Niels Möller.
11  */
12 
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
24 #include "internal.h"
25 
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30 
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
33 
34 DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
35 EXPORT_SYMBOL_GPL(crypto_boot_test_finished);
36 
37 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
38 
39 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
40 {
41 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
42 }
43 EXPORT_SYMBOL_GPL(crypto_mod_get);
44 
45 void crypto_mod_put(struct crypto_alg *alg)
46 {
47 	struct module *module = alg->cra_module;
48 
49 	crypto_alg_put(alg);
50 	module_put(module);
51 }
52 EXPORT_SYMBOL_GPL(crypto_mod_put);
53 
54 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
55 					      u32 mask)
56 {
57 	struct crypto_alg *q, *alg = NULL;
58 	int best = -2;
59 
60 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
61 		int exact, fuzzy;
62 
63 		if (crypto_is_moribund(q))
64 			continue;
65 
66 		if ((q->cra_flags ^ type) & mask)
67 			continue;
68 
69 		if (crypto_is_larval(q) &&
70 		    !crypto_is_test_larval((struct crypto_larval *)q) &&
71 		    ((struct crypto_larval *)q)->mask != mask)
72 			continue;
73 
74 		exact = !strcmp(q->cra_driver_name, name);
75 		fuzzy = !strcmp(q->cra_name, name);
76 		if (!exact && !(fuzzy && q->cra_priority > best))
77 			continue;
78 
79 		if (unlikely(!crypto_mod_get(q)))
80 			continue;
81 
82 		best = q->cra_priority;
83 		if (alg)
84 			crypto_mod_put(alg);
85 		alg = q;
86 
87 		if (exact)
88 			break;
89 	}
90 
91 	return alg;
92 }
93 
94 static void crypto_larval_destroy(struct crypto_alg *alg)
95 {
96 	struct crypto_larval *larval = (void *)alg;
97 
98 	BUG_ON(!crypto_is_larval(alg));
99 	if (!IS_ERR_OR_NULL(larval->adult))
100 		crypto_mod_put(larval->adult);
101 	kfree(larval);
102 }
103 
104 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
105 {
106 	struct crypto_larval *larval;
107 
108 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
109 	if (!larval)
110 		return ERR_PTR(-ENOMEM);
111 
112 	larval->mask = mask;
113 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
114 	larval->alg.cra_priority = -1;
115 	larval->alg.cra_destroy = crypto_larval_destroy;
116 
117 	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
118 	init_completion(&larval->completion);
119 
120 	return larval;
121 }
122 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
123 
124 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
125 					    u32 mask)
126 {
127 	struct crypto_alg *alg;
128 	struct crypto_larval *larval;
129 
130 	larval = crypto_larval_alloc(name, type, mask);
131 	if (IS_ERR(larval))
132 		return ERR_CAST(larval);
133 
134 	refcount_set(&larval->alg.cra_refcnt, 2);
135 
136 	down_write(&crypto_alg_sem);
137 	alg = __crypto_alg_lookup(name, type, mask);
138 	if (!alg) {
139 		alg = &larval->alg;
140 		list_add(&alg->cra_list, &crypto_alg_list);
141 	}
142 	up_write(&crypto_alg_sem);
143 
144 	if (alg != &larval->alg) {
145 		kfree(larval);
146 		if (crypto_is_larval(alg))
147 			alg = crypto_larval_wait(alg);
148 	}
149 
150 	return alg;
151 }
152 
153 void crypto_larval_kill(struct crypto_alg *alg)
154 {
155 	struct crypto_larval *larval = (void *)alg;
156 
157 	down_write(&crypto_alg_sem);
158 	list_del(&alg->cra_list);
159 	up_write(&crypto_alg_sem);
160 	complete_all(&larval->completion);
161 	crypto_alg_put(alg);
162 }
163 EXPORT_SYMBOL_GPL(crypto_larval_kill);
164 
165 void crypto_wait_for_test(struct crypto_larval *larval)
166 {
167 	int err;
168 
169 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
170 	if (WARN_ON_ONCE(err != NOTIFY_STOP))
171 		goto out;
172 
173 	err = wait_for_completion_killable(&larval->completion);
174 	WARN_ON(err);
175 	if (!err)
176 		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
177 
178 out:
179 	crypto_larval_kill(&larval->alg);
180 }
181 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
182 
183 static void crypto_start_test(struct crypto_larval *larval)
184 {
185 	if (!crypto_is_test_larval(larval))
186 		return;
187 
188 	if (larval->test_started)
189 		return;
190 
191 	down_write(&crypto_alg_sem);
192 	if (larval->test_started) {
193 		up_write(&crypto_alg_sem);
194 		return;
195 	}
196 
197 	larval->test_started = true;
198 	up_write(&crypto_alg_sem);
199 
200 	crypto_wait_for_test(larval);
201 }
202 
203 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
204 {
205 	struct crypto_larval *larval = (void *)alg;
206 	long timeout;
207 
208 	if (!static_branch_likely(&crypto_boot_test_finished))
209 		crypto_start_test(larval);
210 
211 	timeout = wait_for_completion_killable_timeout(
212 		&larval->completion, 60 * HZ);
213 
214 	alg = larval->adult;
215 	if (timeout < 0)
216 		alg = ERR_PTR(-EINTR);
217 	else if (!timeout)
218 		alg = ERR_PTR(-ETIMEDOUT);
219 	else if (!alg)
220 		alg = ERR_PTR(-ENOENT);
221 	else if (IS_ERR(alg))
222 		;
223 	else if (crypto_is_test_larval(larval) &&
224 		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
225 		alg = ERR_PTR(-EAGAIN);
226 	else if (!crypto_mod_get(alg))
227 		alg = ERR_PTR(-EAGAIN);
228 	crypto_mod_put(&larval->alg);
229 
230 	return alg;
231 }
232 
233 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
234 					    u32 mask)
235 {
236 	struct crypto_alg *alg;
237 	u32 test = 0;
238 
239 	if (!((type | mask) & CRYPTO_ALG_TESTED))
240 		test |= CRYPTO_ALG_TESTED;
241 
242 	down_read(&crypto_alg_sem);
243 	alg = __crypto_alg_lookup(name, type | test, mask | test);
244 	if (!alg && test) {
245 		alg = __crypto_alg_lookup(name, type, mask);
246 		if (alg && !crypto_is_larval(alg)) {
247 			/* Test failed */
248 			crypto_mod_put(alg);
249 			alg = ERR_PTR(-ELIBBAD);
250 		}
251 	}
252 	up_read(&crypto_alg_sem);
253 
254 	return alg;
255 }
256 
257 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
258 					       u32 mask)
259 {
260 	struct crypto_alg *alg;
261 
262 	if (!name)
263 		return ERR_PTR(-ENOENT);
264 
265 	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
266 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
267 
268 	alg = crypto_alg_lookup(name, type, mask);
269 	if (!alg && !(mask & CRYPTO_NOLOAD)) {
270 		request_module("crypto-%s", name);
271 
272 		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
273 		      CRYPTO_ALG_NEED_FALLBACK))
274 			request_module("crypto-%s-all", name);
275 
276 		alg = crypto_alg_lookup(name, type, mask);
277 	}
278 
279 	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
280 		alg = crypto_larval_wait(alg);
281 	else if (!alg)
282 		alg = crypto_larval_add(name, type, mask);
283 
284 	return alg;
285 }
286 
287 int crypto_probing_notify(unsigned long val, void *v)
288 {
289 	int ok;
290 
291 	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
292 	if (ok == NOTIFY_DONE) {
293 		request_module("cryptomgr");
294 		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
295 	}
296 
297 	return ok;
298 }
299 EXPORT_SYMBOL_GPL(crypto_probing_notify);
300 
301 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
302 {
303 	struct crypto_alg *alg;
304 	struct crypto_alg *larval;
305 	int ok;
306 
307 	/*
308 	 * If the internal flag is set for a cipher, require a caller to
309 	 * to invoke the cipher with the internal flag to use that cipher.
310 	 * Also, if a caller wants to allocate a cipher that may or may
311 	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
312 	 * !(mask & CRYPTO_ALG_INTERNAL).
313 	 */
314 	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
315 		mask |= CRYPTO_ALG_INTERNAL;
316 
317 	larval = crypto_larval_lookup(name, type, mask);
318 	if (IS_ERR(larval) || !crypto_is_larval(larval))
319 		return larval;
320 
321 	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
322 
323 	if (ok == NOTIFY_STOP)
324 		alg = crypto_larval_wait(larval);
325 	else {
326 		crypto_mod_put(larval);
327 		alg = ERR_PTR(-ENOENT);
328 	}
329 	crypto_larval_kill(larval);
330 	return alg;
331 }
332 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
333 
334 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
335 {
336 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
337 
338 	if (type_obj)
339 		return type_obj->init(tfm, type, mask);
340 	return 0;
341 }
342 
343 static void crypto_exit_ops(struct crypto_tfm *tfm)
344 {
345 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
346 
347 	if (type && tfm->exit)
348 		tfm->exit(tfm);
349 }
350 
351 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
352 {
353 	const struct crypto_type *type_obj = alg->cra_type;
354 	unsigned int len;
355 
356 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
357 	if (type_obj)
358 		return len + type_obj->ctxsize(alg, type, mask);
359 
360 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
361 	default:
362 		BUG();
363 
364 	case CRYPTO_ALG_TYPE_CIPHER:
365 		len += crypto_cipher_ctxsize(alg);
366 		break;
367 
368 	case CRYPTO_ALG_TYPE_COMPRESS:
369 		len += crypto_compress_ctxsize(alg);
370 		break;
371 	}
372 
373 	return len;
374 }
375 
376 void crypto_shoot_alg(struct crypto_alg *alg)
377 {
378 	down_write(&crypto_alg_sem);
379 	alg->cra_flags |= CRYPTO_ALG_DYING;
380 	up_write(&crypto_alg_sem);
381 }
382 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
383 
384 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
385 				      u32 mask)
386 {
387 	struct crypto_tfm *tfm = NULL;
388 	unsigned int tfm_size;
389 	int err = -ENOMEM;
390 
391 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
392 	tfm = kzalloc(tfm_size, GFP_KERNEL);
393 	if (tfm == NULL)
394 		goto out_err;
395 
396 	tfm->__crt_alg = alg;
397 
398 	err = crypto_init_ops(tfm, type, mask);
399 	if (err)
400 		goto out_free_tfm;
401 
402 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
403 		goto cra_init_failed;
404 
405 	goto out;
406 
407 cra_init_failed:
408 	crypto_exit_ops(tfm);
409 out_free_tfm:
410 	if (err == -EAGAIN)
411 		crypto_shoot_alg(alg);
412 	kfree(tfm);
413 out_err:
414 	tfm = ERR_PTR(err);
415 out:
416 	return tfm;
417 }
418 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
419 
420 /*
421  *	crypto_alloc_base - Locate algorithm and allocate transform
422  *	@alg_name: Name of algorithm
423  *	@type: Type of algorithm
424  *	@mask: Mask for type comparison
425  *
426  *	This function should not be used by new algorithm types.
427  *	Please use crypto_alloc_tfm instead.
428  *
429  *	crypto_alloc_base() will first attempt to locate an already loaded
430  *	algorithm.  If that fails and the kernel supports dynamically loadable
431  *	modules, it will then attempt to load a module of the same name or
432  *	alias.  If that fails it will send a query to any loaded crypto manager
433  *	to construct an algorithm on the fly.  A refcount is grabbed on the
434  *	algorithm which is then associated with the new transform.
435  *
436  *	The returned transform is of a non-determinate type.  Most people
437  *	should use one of the more specific allocation functions such as
438  *	crypto_alloc_skcipher().
439  *
440  *	In case of error the return value is an error pointer.
441  */
442 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
443 {
444 	struct crypto_tfm *tfm;
445 	int err;
446 
447 	for (;;) {
448 		struct crypto_alg *alg;
449 
450 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
451 		if (IS_ERR(alg)) {
452 			err = PTR_ERR(alg);
453 			goto err;
454 		}
455 
456 		tfm = __crypto_alloc_tfm(alg, type, mask);
457 		if (!IS_ERR(tfm))
458 			return tfm;
459 
460 		crypto_mod_put(alg);
461 		err = PTR_ERR(tfm);
462 
463 err:
464 		if (err != -EAGAIN)
465 			break;
466 		if (fatal_signal_pending(current)) {
467 			err = -EINTR;
468 			break;
469 		}
470 	}
471 
472 	return ERR_PTR(err);
473 }
474 EXPORT_SYMBOL_GPL(crypto_alloc_base);
475 
476 void *crypto_create_tfm_node(struct crypto_alg *alg,
477 			const struct crypto_type *frontend,
478 			int node)
479 {
480 	char *mem;
481 	struct crypto_tfm *tfm = NULL;
482 	unsigned int tfmsize;
483 	unsigned int total;
484 	int err = -ENOMEM;
485 
486 	tfmsize = frontend->tfmsize;
487 	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
488 
489 	mem = kzalloc_node(total, GFP_KERNEL, node);
490 	if (mem == NULL)
491 		goto out_err;
492 
493 	tfm = (struct crypto_tfm *)(mem + tfmsize);
494 	tfm->__crt_alg = alg;
495 	tfm->node = node;
496 
497 	err = frontend->init_tfm(tfm);
498 	if (err)
499 		goto out_free_tfm;
500 
501 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
502 		goto cra_init_failed;
503 
504 	goto out;
505 
506 cra_init_failed:
507 	crypto_exit_ops(tfm);
508 out_free_tfm:
509 	if (err == -EAGAIN)
510 		crypto_shoot_alg(alg);
511 	kfree(mem);
512 out_err:
513 	mem = ERR_PTR(err);
514 out:
515 	return mem;
516 }
517 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
518 
519 struct crypto_alg *crypto_find_alg(const char *alg_name,
520 				   const struct crypto_type *frontend,
521 				   u32 type, u32 mask)
522 {
523 	if (frontend) {
524 		type &= frontend->maskclear;
525 		mask &= frontend->maskclear;
526 		type |= frontend->type;
527 		mask |= frontend->maskset;
528 	}
529 
530 	return crypto_alg_mod_lookup(alg_name, type, mask);
531 }
532 EXPORT_SYMBOL_GPL(crypto_find_alg);
533 
534 /*
535  *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
536  *	@alg_name: Name of algorithm
537  *	@frontend: Frontend algorithm type
538  *	@type: Type of algorithm
539  *	@mask: Mask for type comparison
540  *	@node: NUMA node in which users desire to put requests, if node is
541  *		NUMA_NO_NODE, it means users have no special requirement.
542  *
543  *	crypto_alloc_tfm() will first attempt to locate an already loaded
544  *	algorithm.  If that fails and the kernel supports dynamically loadable
545  *	modules, it will then attempt to load a module of the same name or
546  *	alias.  If that fails it will send a query to any loaded crypto manager
547  *	to construct an algorithm on the fly.  A refcount is grabbed on the
548  *	algorithm which is then associated with the new transform.
549  *
550  *	The returned transform is of a non-determinate type.  Most people
551  *	should use one of the more specific allocation functions such as
552  *	crypto_alloc_skcipher().
553  *
554  *	In case of error the return value is an error pointer.
555  */
556 
557 void *crypto_alloc_tfm_node(const char *alg_name,
558 		       const struct crypto_type *frontend, u32 type, u32 mask,
559 		       int node)
560 {
561 	void *tfm;
562 	int err;
563 
564 	for (;;) {
565 		struct crypto_alg *alg;
566 
567 		alg = crypto_find_alg(alg_name, frontend, type, mask);
568 		if (IS_ERR(alg)) {
569 			err = PTR_ERR(alg);
570 			goto err;
571 		}
572 
573 		tfm = crypto_create_tfm_node(alg, frontend, node);
574 		if (!IS_ERR(tfm))
575 			return tfm;
576 
577 		crypto_mod_put(alg);
578 		err = PTR_ERR(tfm);
579 
580 err:
581 		if (err != -EAGAIN)
582 			break;
583 		if (fatal_signal_pending(current)) {
584 			err = -EINTR;
585 			break;
586 		}
587 	}
588 
589 	return ERR_PTR(err);
590 }
591 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
592 
593 /*
594  *	crypto_destroy_tfm - Free crypto transform
595  *	@mem: Start of tfm slab
596  *	@tfm: Transform to free
597  *
598  *	This function frees up the transform and any associated resources,
599  *	then drops the refcount on the associated algorithm.
600  */
601 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
602 {
603 	struct crypto_alg *alg;
604 
605 	if (IS_ERR_OR_NULL(mem))
606 		return;
607 
608 	alg = tfm->__crt_alg;
609 
610 	if (!tfm->exit && alg->cra_exit)
611 		alg->cra_exit(tfm);
612 	crypto_exit_ops(tfm);
613 	crypto_mod_put(alg);
614 	kfree_sensitive(mem);
615 }
616 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
617 
618 int crypto_has_alg(const char *name, u32 type, u32 mask)
619 {
620 	int ret = 0;
621 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
622 
623 	if (!IS_ERR(alg)) {
624 		crypto_mod_put(alg);
625 		ret = 1;
626 	}
627 
628 	return ret;
629 }
630 EXPORT_SYMBOL_GPL(crypto_has_alg);
631 
632 void crypto_req_done(struct crypto_async_request *req, int err)
633 {
634 	struct crypto_wait *wait = req->data;
635 
636 	if (err == -EINPROGRESS)
637 		return;
638 
639 	wait->err = err;
640 	complete(&wait->completion);
641 }
642 EXPORT_SYMBOL_GPL(crypto_req_done);
643 
644 MODULE_DESCRIPTION("Cryptographic core API");
645 MODULE_LICENSE("GPL");
646 MODULE_SOFTDEP("pre: cryptomgr");
647