xref: /openbmc/linux/crypto/shash.c (revision d4295e12)
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
23 
24 #include "internal.h"
25 
26 static const struct crypto_type crypto_shash_type;
27 
28 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29 		    unsigned int keylen)
30 {
31 	return -ENOSYS;
32 }
33 EXPORT_SYMBOL_GPL(shash_no_setkey);
34 
35 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
36 				  unsigned int keylen)
37 {
38 	struct shash_alg *shash = crypto_shash_alg(tfm);
39 	unsigned long alignmask = crypto_shash_alignmask(tfm);
40 	unsigned long absize;
41 	u8 *buffer, *alignbuffer;
42 	int err;
43 
44 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
45 	buffer = kmalloc(absize, GFP_ATOMIC);
46 	if (!buffer)
47 		return -ENOMEM;
48 
49 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
50 	memcpy(alignbuffer, key, keylen);
51 	err = shash->setkey(tfm, alignbuffer, keylen);
52 	kzfree(buffer);
53 	return err;
54 }
55 
56 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
57 			unsigned int keylen)
58 {
59 	struct shash_alg *shash = crypto_shash_alg(tfm);
60 	unsigned long alignmask = crypto_shash_alignmask(tfm);
61 	int err;
62 
63 	if ((unsigned long)key & alignmask)
64 		err = shash_setkey_unaligned(tfm, key, keylen);
65 	else
66 		err = shash->setkey(tfm, key, keylen);
67 
68 	if (err)
69 		return err;
70 
71 	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
72 	return 0;
73 }
74 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
75 
76 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
77 				  unsigned int len)
78 {
79 	struct crypto_shash *tfm = desc->tfm;
80 	struct shash_alg *shash = crypto_shash_alg(tfm);
81 	unsigned long alignmask = crypto_shash_alignmask(tfm);
82 	unsigned int unaligned_len = alignmask + 1 -
83 				     ((unsigned long)data & alignmask);
84 	/*
85 	 * We cannot count on __aligned() working for large values:
86 	 * https://patchwork.kernel.org/patch/9507697/
87 	 */
88 	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
89 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
90 	int err;
91 
92 	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
93 		return -EINVAL;
94 
95 	if (unaligned_len > len)
96 		unaligned_len = len;
97 
98 	memcpy(buf, data, unaligned_len);
99 	err = shash->update(desc, buf, unaligned_len);
100 	memset(buf, 0, unaligned_len);
101 
102 	return err ?:
103 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
104 }
105 
106 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
107 			unsigned int len)
108 {
109 	struct crypto_shash *tfm = desc->tfm;
110 	struct shash_alg *shash = crypto_shash_alg(tfm);
111 	unsigned long alignmask = crypto_shash_alignmask(tfm);
112 
113 	if ((unsigned long)data & alignmask)
114 		return shash_update_unaligned(desc, data, len);
115 
116 	return shash->update(desc, data, len);
117 }
118 EXPORT_SYMBOL_GPL(crypto_shash_update);
119 
120 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
121 {
122 	struct crypto_shash *tfm = desc->tfm;
123 	unsigned long alignmask = crypto_shash_alignmask(tfm);
124 	struct shash_alg *shash = crypto_shash_alg(tfm);
125 	unsigned int ds = crypto_shash_digestsize(tfm);
126 	/*
127 	 * We cannot count on __aligned() working for large values:
128 	 * https://patchwork.kernel.org/patch/9507697/
129 	 */
130 	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
131 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
132 	int err;
133 
134 	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
135 		return -EINVAL;
136 
137 	err = shash->final(desc, buf);
138 	if (err)
139 		goto out;
140 
141 	memcpy(out, buf, ds);
142 
143 out:
144 	memset(buf, 0, ds);
145 	return err;
146 }
147 
148 int crypto_shash_final(struct shash_desc *desc, u8 *out)
149 {
150 	struct crypto_shash *tfm = desc->tfm;
151 	struct shash_alg *shash = crypto_shash_alg(tfm);
152 	unsigned long alignmask = crypto_shash_alignmask(tfm);
153 
154 	if ((unsigned long)out & alignmask)
155 		return shash_final_unaligned(desc, out);
156 
157 	return shash->final(desc, out);
158 }
159 EXPORT_SYMBOL_GPL(crypto_shash_final);
160 
161 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
162 				 unsigned int len, u8 *out)
163 {
164 	return crypto_shash_update(desc, data, len) ?:
165 	       crypto_shash_final(desc, out);
166 }
167 
168 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
169 		       unsigned int len, u8 *out)
170 {
171 	struct crypto_shash *tfm = desc->tfm;
172 	struct shash_alg *shash = crypto_shash_alg(tfm);
173 	unsigned long alignmask = crypto_shash_alignmask(tfm);
174 
175 	if (((unsigned long)data | (unsigned long)out) & alignmask)
176 		return shash_finup_unaligned(desc, data, len, out);
177 
178 	return shash->finup(desc, data, len, out);
179 }
180 EXPORT_SYMBOL_GPL(crypto_shash_finup);
181 
182 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
183 				  unsigned int len, u8 *out)
184 {
185 	return crypto_shash_init(desc) ?:
186 	       crypto_shash_finup(desc, data, len, out);
187 }
188 
189 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
190 			unsigned int len, u8 *out)
191 {
192 	struct crypto_shash *tfm = desc->tfm;
193 	struct shash_alg *shash = crypto_shash_alg(tfm);
194 	unsigned long alignmask = crypto_shash_alignmask(tfm);
195 
196 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
197 		return -ENOKEY;
198 
199 	if (((unsigned long)data | (unsigned long)out) & alignmask)
200 		return shash_digest_unaligned(desc, data, len, out);
201 
202 	return shash->digest(desc, data, len, out);
203 }
204 EXPORT_SYMBOL_GPL(crypto_shash_digest);
205 
206 static int shash_default_export(struct shash_desc *desc, void *out)
207 {
208 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
209 	return 0;
210 }
211 
212 static int shash_default_import(struct shash_desc *desc, const void *in)
213 {
214 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
215 	return 0;
216 }
217 
218 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
219 			      unsigned int keylen)
220 {
221 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
222 
223 	return crypto_shash_setkey(*ctx, key, keylen);
224 }
225 
226 static int shash_async_init(struct ahash_request *req)
227 {
228 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
229 	struct shash_desc *desc = ahash_request_ctx(req);
230 
231 	desc->tfm = *ctx;
232 	desc->flags = req->base.flags;
233 
234 	return crypto_shash_init(desc);
235 }
236 
237 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
238 {
239 	struct crypto_hash_walk walk;
240 	int nbytes;
241 
242 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
243 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
244 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
245 
246 	return nbytes;
247 }
248 EXPORT_SYMBOL_GPL(shash_ahash_update);
249 
250 static int shash_async_update(struct ahash_request *req)
251 {
252 	return shash_ahash_update(req, ahash_request_ctx(req));
253 }
254 
255 static int shash_async_final(struct ahash_request *req)
256 {
257 	return crypto_shash_final(ahash_request_ctx(req), req->result);
258 }
259 
260 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
261 {
262 	struct crypto_hash_walk walk;
263 	int nbytes;
264 
265 	nbytes = crypto_hash_walk_first(req, &walk);
266 	if (!nbytes)
267 		return crypto_shash_final(desc, req->result);
268 
269 	do {
270 		nbytes = crypto_hash_walk_last(&walk) ?
271 			 crypto_shash_finup(desc, walk.data, nbytes,
272 					    req->result) :
273 			 crypto_shash_update(desc, walk.data, nbytes);
274 		nbytes = crypto_hash_walk_done(&walk, nbytes);
275 	} while (nbytes > 0);
276 
277 	return nbytes;
278 }
279 EXPORT_SYMBOL_GPL(shash_ahash_finup);
280 
281 static int shash_async_finup(struct ahash_request *req)
282 {
283 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
284 	struct shash_desc *desc = ahash_request_ctx(req);
285 
286 	desc->tfm = *ctx;
287 	desc->flags = req->base.flags;
288 
289 	return shash_ahash_finup(req, desc);
290 }
291 
292 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
293 {
294 	unsigned int nbytes = req->nbytes;
295 	struct scatterlist *sg;
296 	unsigned int offset;
297 	int err;
298 
299 	if (nbytes &&
300 	    (sg = req->src, offset = sg->offset,
301 	     nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
302 		void *data;
303 
304 		data = kmap_atomic(sg_page(sg));
305 		err = crypto_shash_digest(desc, data + offset, nbytes,
306 					  req->result);
307 		kunmap_atomic(data);
308 		crypto_yield(desc->flags);
309 	} else
310 		err = crypto_shash_init(desc) ?:
311 		      shash_ahash_finup(req, desc);
312 
313 	return err;
314 }
315 EXPORT_SYMBOL_GPL(shash_ahash_digest);
316 
317 static int shash_async_digest(struct ahash_request *req)
318 {
319 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320 	struct shash_desc *desc = ahash_request_ctx(req);
321 
322 	desc->tfm = *ctx;
323 	desc->flags = req->base.flags;
324 
325 	return shash_ahash_digest(req, desc);
326 }
327 
328 static int shash_async_export(struct ahash_request *req, void *out)
329 {
330 	return crypto_shash_export(ahash_request_ctx(req), out);
331 }
332 
333 static int shash_async_import(struct ahash_request *req, const void *in)
334 {
335 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336 	struct shash_desc *desc = ahash_request_ctx(req);
337 
338 	desc->tfm = *ctx;
339 	desc->flags = req->base.flags;
340 
341 	return crypto_shash_import(desc, in);
342 }
343 
344 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
345 {
346 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
347 
348 	crypto_free_shash(*ctx);
349 }
350 
351 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
352 {
353 	struct crypto_alg *calg = tfm->__crt_alg;
354 	struct shash_alg *alg = __crypto_shash_alg(calg);
355 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
356 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
357 	struct crypto_shash *shash;
358 
359 	if (!crypto_mod_get(calg))
360 		return -EAGAIN;
361 
362 	shash = crypto_create_tfm(calg, &crypto_shash_type);
363 	if (IS_ERR(shash)) {
364 		crypto_mod_put(calg);
365 		return PTR_ERR(shash);
366 	}
367 
368 	*ctx = shash;
369 	tfm->exit = crypto_exit_shash_ops_async;
370 
371 	crt->init = shash_async_init;
372 	crt->update = shash_async_update;
373 	crt->final = shash_async_final;
374 	crt->finup = shash_async_finup;
375 	crt->digest = shash_async_digest;
376 	crt->setkey = shash_async_setkey;
377 
378 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
379 				    CRYPTO_TFM_NEED_KEY);
380 
381 	if (alg->export)
382 		crt->export = shash_async_export;
383 	if (alg->import)
384 		crt->import = shash_async_import;
385 
386 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
387 
388 	return 0;
389 }
390 
391 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
392 {
393 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
394 	struct shash_alg *alg = crypto_shash_alg(hash);
395 
396 	hash->descsize = alg->descsize;
397 
398 	if (crypto_shash_alg_has_setkey(alg) &&
399 	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
400 		crypto_shash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
401 
402 	return 0;
403 }
404 
405 #ifdef CONFIG_NET
406 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
407 {
408 	struct crypto_report_hash rhash;
409 	struct shash_alg *salg = __crypto_shash_alg(alg);
410 
411 	strncpy(rhash.type, "shash", sizeof(rhash.type));
412 
413 	rhash.blocksize = alg->cra_blocksize;
414 	rhash.digestsize = salg->digestsize;
415 
416 	if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
417 		    sizeof(struct crypto_report_hash), &rhash))
418 		goto nla_put_failure;
419 	return 0;
420 
421 nla_put_failure:
422 	return -EMSGSIZE;
423 }
424 #else
425 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
426 {
427 	return -ENOSYS;
428 }
429 #endif
430 
431 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
432 	__maybe_unused;
433 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
434 {
435 	struct shash_alg *salg = __crypto_shash_alg(alg);
436 
437 	seq_printf(m, "type         : shash\n");
438 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
439 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
440 }
441 
442 static const struct crypto_type crypto_shash_type = {
443 	.extsize = crypto_alg_extsize,
444 	.init_tfm = crypto_shash_init_tfm,
445 #ifdef CONFIG_PROC_FS
446 	.show = crypto_shash_show,
447 #endif
448 	.report = crypto_shash_report,
449 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
450 	.maskset = CRYPTO_ALG_TYPE_MASK,
451 	.type = CRYPTO_ALG_TYPE_SHASH,
452 	.tfmsize = offsetof(struct crypto_shash, base),
453 };
454 
455 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
456 					u32 mask)
457 {
458 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
459 }
460 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
461 
462 static int shash_prepare_alg(struct shash_alg *alg)
463 {
464 	struct crypto_alg *base = &alg->base;
465 
466 	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
467 	    alg->descsize > HASH_MAX_DESCSIZE ||
468 	    alg->statesize > HASH_MAX_STATESIZE)
469 		return -EINVAL;
470 
471 	base->cra_type = &crypto_shash_type;
472 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
473 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
474 
475 	if (!alg->finup)
476 		alg->finup = shash_finup_unaligned;
477 	if (!alg->digest)
478 		alg->digest = shash_digest_unaligned;
479 	if (!alg->export) {
480 		alg->export = shash_default_export;
481 		alg->import = shash_default_import;
482 		alg->statesize = alg->descsize;
483 	}
484 	if (!alg->setkey)
485 		alg->setkey = shash_no_setkey;
486 
487 	return 0;
488 }
489 
490 int crypto_register_shash(struct shash_alg *alg)
491 {
492 	struct crypto_alg *base = &alg->base;
493 	int err;
494 
495 	err = shash_prepare_alg(alg);
496 	if (err)
497 		return err;
498 
499 	return crypto_register_alg(base);
500 }
501 EXPORT_SYMBOL_GPL(crypto_register_shash);
502 
503 int crypto_unregister_shash(struct shash_alg *alg)
504 {
505 	return crypto_unregister_alg(&alg->base);
506 }
507 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
508 
509 int crypto_register_shashes(struct shash_alg *algs, int count)
510 {
511 	int i, ret;
512 
513 	for (i = 0; i < count; i++) {
514 		ret = crypto_register_shash(&algs[i]);
515 		if (ret)
516 			goto err;
517 	}
518 
519 	return 0;
520 
521 err:
522 	for (--i; i >= 0; --i)
523 		crypto_unregister_shash(&algs[i]);
524 
525 	return ret;
526 }
527 EXPORT_SYMBOL_GPL(crypto_register_shashes);
528 
529 int crypto_unregister_shashes(struct shash_alg *algs, int count)
530 {
531 	int i, ret;
532 
533 	for (i = count - 1; i >= 0; --i) {
534 		ret = crypto_unregister_shash(&algs[i]);
535 		if (ret)
536 			pr_err("Failed to unregister %s %s: %d\n",
537 			       algs[i].base.cra_driver_name,
538 			       algs[i].base.cra_name, ret);
539 	}
540 
541 	return 0;
542 }
543 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
544 
545 int shash_register_instance(struct crypto_template *tmpl,
546 			    struct shash_instance *inst)
547 {
548 	int err;
549 
550 	err = shash_prepare_alg(&inst->alg);
551 	if (err)
552 		return err;
553 
554 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
555 }
556 EXPORT_SYMBOL_GPL(shash_register_instance);
557 
558 void shash_free_instance(struct crypto_instance *inst)
559 {
560 	crypto_drop_spawn(crypto_instance_ctx(inst));
561 	kfree(shash_instance(inst));
562 }
563 EXPORT_SYMBOL_GPL(shash_free_instance);
564 
565 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
566 			    struct shash_alg *alg,
567 			    struct crypto_instance *inst)
568 {
569 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
570 				  &crypto_shash_type);
571 }
572 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
573 
574 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
575 {
576 	struct crypto_alg *alg;
577 
578 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
579 	return IS_ERR(alg) ? ERR_CAST(alg) :
580 	       container_of(alg, struct shash_alg, base);
581 }
582 EXPORT_SYMBOL_GPL(shash_attr_alg);
583 
584 MODULE_LICENSE("GPL");
585 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
586