xref: /openbmc/linux/crypto/api.c (revision e868d61272caa648214046a096e5a6bfc068dc8c)
1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9  * and Nettle, by Niels M�ller.
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include "internal.h"
28 
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
33 
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
36 
37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
38 {
39 	atomic_inc(&alg->cra_refcnt);
40 	return alg;
41 }
42 
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
44 {
45 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
46 }
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
48 
49 void crypto_mod_put(struct crypto_alg *alg)
50 {
51 	crypto_alg_put(alg);
52 	module_put(alg->cra_module);
53 }
54 EXPORT_SYMBOL_GPL(crypto_mod_put);
55 
56 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
57 {
58 	struct crypto_alg *q, *alg = NULL;
59 	int best = -2;
60 
61 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 		int exact, fuzzy;
63 
64 		if (crypto_is_moribund(q))
65 			continue;
66 
67 		if ((q->cra_flags ^ type) & mask)
68 			continue;
69 
70 		if (crypto_is_larval(q) &&
71 		    ((struct crypto_larval *)q)->mask != mask)
72 			continue;
73 
74 		exact = !strcmp(q->cra_driver_name, name);
75 		fuzzy = !strcmp(q->cra_name, name);
76 		if (!exact && !(fuzzy && q->cra_priority > best))
77 			continue;
78 
79 		if (unlikely(!crypto_mod_get(q)))
80 			continue;
81 
82 		best = q->cra_priority;
83 		if (alg)
84 			crypto_mod_put(alg);
85 		alg = q;
86 
87 		if (exact)
88 			break;
89 	}
90 
91 	return alg;
92 }
93 EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
94 
95 static void crypto_larval_destroy(struct crypto_alg *alg)
96 {
97 	struct crypto_larval *larval = (void *)alg;
98 
99 	BUG_ON(!crypto_is_larval(alg));
100 	if (larval->adult)
101 		crypto_mod_put(larval->adult);
102 	kfree(larval);
103 }
104 
105 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
106 					      u32 mask)
107 {
108 	struct crypto_alg *alg;
109 	struct crypto_larval *larval;
110 
111 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
112 	if (!larval)
113 		return ERR_PTR(-ENOMEM);
114 
115 	larval->mask = mask;
116 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
117 	larval->alg.cra_priority = -1;
118 	larval->alg.cra_destroy = crypto_larval_destroy;
119 
120 	atomic_set(&larval->alg.cra_refcnt, 2);
121 	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
122 	init_completion(&larval->completion);
123 
124 	down_write(&crypto_alg_sem);
125 	alg = __crypto_alg_lookup(name, type, mask);
126 	if (!alg) {
127 		alg = &larval->alg;
128 		list_add(&alg->cra_list, &crypto_alg_list);
129 	}
130 	up_write(&crypto_alg_sem);
131 
132 	if (alg != &larval->alg)
133 		kfree(larval);
134 
135 	return alg;
136 }
137 
138 static void crypto_larval_kill(struct crypto_alg *alg)
139 {
140 	struct crypto_larval *larval = (void *)alg;
141 
142 	down_write(&crypto_alg_sem);
143 	list_del(&alg->cra_list);
144 	up_write(&crypto_alg_sem);
145 	complete(&larval->completion);
146 	crypto_alg_put(alg);
147 }
148 
149 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
150 {
151 	struct crypto_larval *larval = (void *)alg;
152 
153 	wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
154 	alg = larval->adult;
155 	if (alg) {
156 		if (!crypto_mod_get(alg))
157 			alg = ERR_PTR(-EAGAIN);
158 	} else
159 		alg = ERR_PTR(-ENOENT);
160 	crypto_mod_put(&larval->alg);
161 
162 	return alg;
163 }
164 
165 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
166 					    u32 mask)
167 {
168 	struct crypto_alg *alg;
169 
170 	down_read(&crypto_alg_sem);
171 	alg = __crypto_alg_lookup(name, type, mask);
172 	up_read(&crypto_alg_sem);
173 
174 	return alg;
175 }
176 
177 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
178 {
179 	struct crypto_alg *alg;
180 	struct crypto_alg *larval;
181 	int ok;
182 
183 	if (!name)
184 		return ERR_PTR(-ENOENT);
185 
186 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
187 	type &= mask;
188 
189 	alg = try_then_request_module(crypto_alg_lookup(name, type, mask),
190 				      name);
191 	if (alg)
192 		return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
193 
194 	larval = crypto_larval_alloc(name, type, mask);
195 	if (IS_ERR(larval) || !crypto_is_larval(larval))
196 		return larval;
197 
198 	ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
199 	if (ok == NOTIFY_DONE) {
200 		request_module("cryptomgr");
201 		ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
202 	}
203 
204 	if (ok == NOTIFY_STOP)
205 		alg = crypto_larval_wait(larval);
206 	else {
207 		crypto_mod_put(larval);
208 		alg = ERR_PTR(-ENOENT);
209 	}
210 	crypto_larval_kill(larval);
211 	return alg;
212 }
213 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
214 
215 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
216 {
217 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
218 
219 	if (type_obj)
220 		return type_obj->init(tfm, type, mask);
221 
222 	switch (crypto_tfm_alg_type(tfm)) {
223 	case CRYPTO_ALG_TYPE_CIPHER:
224 		return crypto_init_cipher_ops(tfm);
225 
226 	case CRYPTO_ALG_TYPE_DIGEST:
227 		return crypto_init_digest_ops(tfm);
228 
229 	case CRYPTO_ALG_TYPE_COMPRESS:
230 		return crypto_init_compress_ops(tfm);
231 
232 	default:
233 		break;
234 	}
235 
236 	BUG();
237 	return -EINVAL;
238 }
239 
240 static void crypto_exit_ops(struct crypto_tfm *tfm)
241 {
242 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
243 
244 	if (type) {
245 		if (type->exit)
246 			type->exit(tfm);
247 		return;
248 	}
249 
250 	switch (crypto_tfm_alg_type(tfm)) {
251 	case CRYPTO_ALG_TYPE_CIPHER:
252 		crypto_exit_cipher_ops(tfm);
253 		break;
254 
255 	case CRYPTO_ALG_TYPE_DIGEST:
256 		crypto_exit_digest_ops(tfm);
257 		break;
258 
259 	case CRYPTO_ALG_TYPE_COMPRESS:
260 		crypto_exit_compress_ops(tfm);
261 		break;
262 
263 	default:
264 		BUG();
265 
266 	}
267 }
268 
269 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
270 {
271 	const struct crypto_type *type_obj = alg->cra_type;
272 	unsigned int len;
273 
274 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
275 	if (type_obj)
276 		return len + type_obj->ctxsize(alg, type, mask);
277 
278 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
279 	default:
280 		BUG();
281 
282 	case CRYPTO_ALG_TYPE_CIPHER:
283 		len += crypto_cipher_ctxsize(alg);
284 		break;
285 
286 	case CRYPTO_ALG_TYPE_DIGEST:
287 		len += crypto_digest_ctxsize(alg);
288 		break;
289 
290 	case CRYPTO_ALG_TYPE_COMPRESS:
291 		len += crypto_compress_ctxsize(alg);
292 		break;
293 	}
294 
295 	return len;
296 }
297 
298 void crypto_shoot_alg(struct crypto_alg *alg)
299 {
300 	down_write(&crypto_alg_sem);
301 	alg->cra_flags |= CRYPTO_ALG_DYING;
302 	up_write(&crypto_alg_sem);
303 }
304 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
305 
306 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
307 				      u32 mask)
308 {
309 	struct crypto_tfm *tfm = NULL;
310 	unsigned int tfm_size;
311 	int err = -ENOMEM;
312 
313 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
314 	tfm = kzalloc(tfm_size, GFP_KERNEL);
315 	if (tfm == NULL)
316 		goto out_err;
317 
318 	tfm->__crt_alg = alg;
319 
320 	err = crypto_init_ops(tfm, type, mask);
321 	if (err)
322 		goto out_free_tfm;
323 
324 	if (alg->cra_init && (err = alg->cra_init(tfm))) {
325 		if (err == -EAGAIN)
326 			crypto_shoot_alg(alg);
327 		goto cra_init_failed;
328 	}
329 
330 	goto out;
331 
332 cra_init_failed:
333 	crypto_exit_ops(tfm);
334 out_free_tfm:
335 	kfree(tfm);
336 out_err:
337 	tfm = ERR_PTR(err);
338 out:
339 	return tfm;
340 }
341 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
342 
343 /*
344  *	crypto_alloc_base - Locate algorithm and allocate transform
345  *	@alg_name: Name of algorithm
346  *	@type: Type of algorithm
347  *	@mask: Mask for type comparison
348  *
349  *	crypto_alloc_base() will first attempt to locate an already loaded
350  *	algorithm.  If that fails and the kernel supports dynamically loadable
351  *	modules, it will then attempt to load a module of the same name or
352  *	alias.  If that fails it will send a query to any loaded crypto manager
353  *	to construct an algorithm on the fly.  A refcount is grabbed on the
354  *	algorithm which is then associated with the new transform.
355  *
356  *	The returned transform is of a non-determinate type.  Most people
357  *	should use one of the more specific allocation functions such as
358  *	crypto_alloc_blkcipher.
359  *
360  *	In case of error the return value is an error pointer.
361  */
362 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
363 {
364 	struct crypto_tfm *tfm;
365 	int err;
366 
367 	for (;;) {
368 		struct crypto_alg *alg;
369 
370 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
371 		if (IS_ERR(alg)) {
372 			err = PTR_ERR(alg);
373 			goto err;
374 		}
375 
376 		tfm = __crypto_alloc_tfm(alg, type, mask);
377 		if (!IS_ERR(tfm))
378 			return tfm;
379 
380 		crypto_mod_put(alg);
381 		err = PTR_ERR(tfm);
382 
383 err:
384 		if (err != -EAGAIN)
385 			break;
386 		if (signal_pending(current)) {
387 			err = -EINTR;
388 			break;
389 		}
390 	}
391 
392 	return ERR_PTR(err);
393 }
394 EXPORT_SYMBOL_GPL(crypto_alloc_base);
395 
396 /*
397  *	crypto_free_tfm - Free crypto transform
398  *	@tfm: Transform to free
399  *
400  *	crypto_free_tfm() frees up the transform and any associated resources,
401  *	then drops the refcount on the associated algorithm.
402  */
403 void crypto_free_tfm(struct crypto_tfm *tfm)
404 {
405 	struct crypto_alg *alg;
406 	int size;
407 
408 	if (unlikely(!tfm))
409 		return;
410 
411 	alg = tfm->__crt_alg;
412 	size = sizeof(*tfm) + alg->cra_ctxsize;
413 
414 	if (alg->cra_exit)
415 		alg->cra_exit(tfm);
416 	crypto_exit_ops(tfm);
417 	crypto_mod_put(alg);
418 	memset(tfm, 0, size);
419 	kfree(tfm);
420 }
421 
422 EXPORT_SYMBOL_GPL(crypto_free_tfm);
423 
424 int crypto_has_alg(const char *name, u32 type, u32 mask)
425 {
426 	int ret = 0;
427 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
428 
429 	if (!IS_ERR(alg)) {
430 		crypto_mod_put(alg);
431 		ret = 1;
432 	}
433 
434 	return ret;
435 }
436 EXPORT_SYMBOL_GPL(crypto_has_alg);
437