xref: /openbmc/linux/crypto/shash.c (revision 413d6ed3)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Synchronous Cryptographic Hash operations.
4  *
5  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/scatterwalk.h>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/cryptouser.h>
16 #include <net/netlink.h>
17 #include <linux/compiler.h>
18 
19 #include "internal.h"
20 
21 static const struct crypto_type crypto_shash_type;
22 
23 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
24 		    unsigned int keylen)
25 {
26 	return -ENOSYS;
27 }
28 EXPORT_SYMBOL_GPL(shash_no_setkey);
29 
30 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
31 				  unsigned int keylen)
32 {
33 	struct shash_alg *shash = crypto_shash_alg(tfm);
34 	unsigned long alignmask = crypto_shash_alignmask(tfm);
35 	unsigned long absize;
36 	u8 *buffer, *alignbuffer;
37 	int err;
38 
39 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
40 	buffer = kmalloc(absize, GFP_ATOMIC);
41 	if (!buffer)
42 		return -ENOMEM;
43 
44 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
45 	memcpy(alignbuffer, key, keylen);
46 	err = shash->setkey(tfm, alignbuffer, keylen);
47 	kfree_sensitive(buffer);
48 	return err;
49 }
50 
51 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
52 {
53 	if (crypto_shash_alg_needs_key(alg))
54 		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
55 }
56 
57 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
58 			unsigned int keylen)
59 {
60 	struct shash_alg *shash = crypto_shash_alg(tfm);
61 	unsigned long alignmask = crypto_shash_alignmask(tfm);
62 	int err;
63 
64 	if ((unsigned long)key & alignmask)
65 		err = shash_setkey_unaligned(tfm, key, keylen);
66 	else
67 		err = shash->setkey(tfm, key, keylen);
68 
69 	if (unlikely(err)) {
70 		shash_set_needkey(tfm, shash);
71 		return err;
72 	}
73 
74 	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
75 	return 0;
76 }
77 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
78 
79 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
80 				  unsigned int len)
81 {
82 	struct crypto_shash *tfm = desc->tfm;
83 	struct shash_alg *shash = crypto_shash_alg(tfm);
84 	unsigned long alignmask = crypto_shash_alignmask(tfm);
85 	unsigned int unaligned_len = alignmask + 1 -
86 				     ((unsigned long)data & alignmask);
87 	/*
88 	 * We cannot count on __aligned() working for large values:
89 	 * https://patchwork.kernel.org/patch/9507697/
90 	 */
91 	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
92 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
93 	int err;
94 
95 	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
96 		return -EINVAL;
97 
98 	if (unaligned_len > len)
99 		unaligned_len = len;
100 
101 	memcpy(buf, data, unaligned_len);
102 	err = shash->update(desc, buf, unaligned_len);
103 	memset(buf, 0, unaligned_len);
104 
105 	return err ?:
106 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
107 }
108 
109 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
110 			unsigned int len)
111 {
112 	struct crypto_shash *tfm = desc->tfm;
113 	struct shash_alg *shash = crypto_shash_alg(tfm);
114 	unsigned long alignmask = crypto_shash_alignmask(tfm);
115 
116 	if ((unsigned long)data & alignmask)
117 		return shash_update_unaligned(desc, data, len);
118 
119 	return shash->update(desc, data, len);
120 }
121 EXPORT_SYMBOL_GPL(crypto_shash_update);
122 
123 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
124 {
125 	struct crypto_shash *tfm = desc->tfm;
126 	unsigned long alignmask = crypto_shash_alignmask(tfm);
127 	struct shash_alg *shash = crypto_shash_alg(tfm);
128 	unsigned int ds = crypto_shash_digestsize(tfm);
129 	/*
130 	 * We cannot count on __aligned() working for large values:
131 	 * https://patchwork.kernel.org/patch/9507697/
132 	 */
133 	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
134 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
135 	int err;
136 
137 	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
138 		return -EINVAL;
139 
140 	err = shash->final(desc, buf);
141 	if (err)
142 		goto out;
143 
144 	memcpy(out, buf, ds);
145 
146 out:
147 	memset(buf, 0, ds);
148 	return err;
149 }
150 
151 int crypto_shash_final(struct shash_desc *desc, u8 *out)
152 {
153 	struct crypto_shash *tfm = desc->tfm;
154 	struct shash_alg *shash = crypto_shash_alg(tfm);
155 	unsigned long alignmask = crypto_shash_alignmask(tfm);
156 
157 	if ((unsigned long)out & alignmask)
158 		return shash_final_unaligned(desc, out);
159 
160 	return shash->final(desc, out);
161 }
162 EXPORT_SYMBOL_GPL(crypto_shash_final);
163 
164 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
165 				 unsigned int len, u8 *out)
166 {
167 	return crypto_shash_update(desc, data, len) ?:
168 	       crypto_shash_final(desc, out);
169 }
170 
171 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
172 		       unsigned int len, u8 *out)
173 {
174 	struct crypto_shash *tfm = desc->tfm;
175 	struct shash_alg *shash = crypto_shash_alg(tfm);
176 	unsigned long alignmask = crypto_shash_alignmask(tfm);
177 
178 	if (((unsigned long)data | (unsigned long)out) & alignmask)
179 		return shash_finup_unaligned(desc, data, len, out);
180 
181 	return shash->finup(desc, data, len, out);
182 }
183 EXPORT_SYMBOL_GPL(crypto_shash_finup);
184 
185 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
186 				  unsigned int len, u8 *out)
187 {
188 	return crypto_shash_init(desc) ?:
189 	       crypto_shash_finup(desc, data, len, out);
190 }
191 
192 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
193 			unsigned int len, u8 *out)
194 {
195 	struct crypto_shash *tfm = desc->tfm;
196 	struct shash_alg *shash = crypto_shash_alg(tfm);
197 	unsigned long alignmask = crypto_shash_alignmask(tfm);
198 
199 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
200 		return -ENOKEY;
201 
202 	if (((unsigned long)data | (unsigned long)out) & alignmask)
203 		return shash_digest_unaligned(desc, data, len, out);
204 
205 	return shash->digest(desc, data, len, out);
206 }
207 EXPORT_SYMBOL_GPL(crypto_shash_digest);
208 
209 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
210 			    unsigned int len, u8 *out)
211 {
212 	SHASH_DESC_ON_STACK(desc, tfm);
213 	int err;
214 
215 	desc->tfm = tfm;
216 
217 	err = crypto_shash_digest(desc, data, len, out);
218 
219 	shash_desc_zero(desc);
220 
221 	return err;
222 }
223 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
224 
225 static int shash_default_export(struct shash_desc *desc, void *out)
226 {
227 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
228 	return 0;
229 }
230 
231 static int shash_default_import(struct shash_desc *desc, const void *in)
232 {
233 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
234 	return 0;
235 }
236 
237 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
238 			      unsigned int keylen)
239 {
240 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
241 
242 	return crypto_shash_setkey(*ctx, key, keylen);
243 }
244 
245 static int shash_async_init(struct ahash_request *req)
246 {
247 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
248 	struct shash_desc *desc = ahash_request_ctx(req);
249 
250 	desc->tfm = *ctx;
251 
252 	return crypto_shash_init(desc);
253 }
254 
255 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
256 {
257 	struct crypto_hash_walk walk;
258 	int nbytes;
259 
260 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
261 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
262 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
263 
264 	return nbytes;
265 }
266 EXPORT_SYMBOL_GPL(shash_ahash_update);
267 
268 static int shash_async_update(struct ahash_request *req)
269 {
270 	return shash_ahash_update(req, ahash_request_ctx(req));
271 }
272 
273 static int shash_async_final(struct ahash_request *req)
274 {
275 	return crypto_shash_final(ahash_request_ctx(req), req->result);
276 }
277 
278 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
279 {
280 	struct crypto_hash_walk walk;
281 	int nbytes;
282 
283 	nbytes = crypto_hash_walk_first(req, &walk);
284 	if (!nbytes)
285 		return crypto_shash_final(desc, req->result);
286 
287 	do {
288 		nbytes = crypto_hash_walk_last(&walk) ?
289 			 crypto_shash_finup(desc, walk.data, nbytes,
290 					    req->result) :
291 			 crypto_shash_update(desc, walk.data, nbytes);
292 		nbytes = crypto_hash_walk_done(&walk, nbytes);
293 	} while (nbytes > 0);
294 
295 	return nbytes;
296 }
297 EXPORT_SYMBOL_GPL(shash_ahash_finup);
298 
299 static int shash_async_finup(struct ahash_request *req)
300 {
301 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
302 	struct shash_desc *desc = ahash_request_ctx(req);
303 
304 	desc->tfm = *ctx;
305 
306 	return shash_ahash_finup(req, desc);
307 }
308 
309 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
310 {
311 	unsigned int nbytes = req->nbytes;
312 	struct scatterlist *sg;
313 	unsigned int offset;
314 	int err;
315 
316 	if (nbytes &&
317 	    (sg = req->src, offset = sg->offset,
318 	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
319 		void *data;
320 
321 		data = kmap_atomic(sg_page(sg));
322 		err = crypto_shash_digest(desc, data + offset, nbytes,
323 					  req->result);
324 		kunmap_atomic(data);
325 	} else
326 		err = crypto_shash_init(desc) ?:
327 		      shash_ahash_finup(req, desc);
328 
329 	return err;
330 }
331 EXPORT_SYMBOL_GPL(shash_ahash_digest);
332 
333 static int shash_async_digest(struct ahash_request *req)
334 {
335 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336 	struct shash_desc *desc = ahash_request_ctx(req);
337 
338 	desc->tfm = *ctx;
339 
340 	return shash_ahash_digest(req, desc);
341 }
342 
343 static int shash_async_export(struct ahash_request *req, void *out)
344 {
345 	return crypto_shash_export(ahash_request_ctx(req), out);
346 }
347 
348 static int shash_async_import(struct ahash_request *req, const void *in)
349 {
350 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
351 	struct shash_desc *desc = ahash_request_ctx(req);
352 
353 	desc->tfm = *ctx;
354 
355 	return crypto_shash_import(desc, in);
356 }
357 
358 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
359 {
360 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
361 
362 	crypto_free_shash(*ctx);
363 }
364 
365 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
366 {
367 	struct crypto_alg *calg = tfm->__crt_alg;
368 	struct shash_alg *alg = __crypto_shash_alg(calg);
369 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
370 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
371 	struct crypto_shash *shash;
372 
373 	if (!crypto_mod_get(calg))
374 		return -EAGAIN;
375 
376 	shash = crypto_create_tfm(calg, &crypto_shash_type);
377 	if (IS_ERR(shash)) {
378 		crypto_mod_put(calg);
379 		return PTR_ERR(shash);
380 	}
381 
382 	*ctx = shash;
383 	tfm->exit = crypto_exit_shash_ops_async;
384 
385 	crt->init = shash_async_init;
386 	crt->update = shash_async_update;
387 	crt->final = shash_async_final;
388 	crt->finup = shash_async_finup;
389 	crt->digest = shash_async_digest;
390 	if (crypto_shash_alg_has_setkey(alg))
391 		crt->setkey = shash_async_setkey;
392 
393 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
394 				    CRYPTO_TFM_NEED_KEY);
395 
396 	crt->export = shash_async_export;
397 	crt->import = shash_async_import;
398 
399 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
400 
401 	return 0;
402 }
403 
404 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
405 {
406 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
407 	struct shash_alg *alg = crypto_shash_alg(hash);
408 
409 	alg->exit_tfm(hash);
410 }
411 
412 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
413 {
414 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
415 	struct shash_alg *alg = crypto_shash_alg(hash);
416 	int err;
417 
418 	hash->descsize = alg->descsize;
419 
420 	shash_set_needkey(hash, alg);
421 
422 	if (alg->exit_tfm)
423 		tfm->exit = crypto_shash_exit_tfm;
424 
425 	if (!alg->init_tfm)
426 		return 0;
427 
428 	err = alg->init_tfm(hash);
429 	if (err)
430 		return err;
431 
432 	/* ->init_tfm() may have increased the descsize. */
433 	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
434 		if (alg->exit_tfm)
435 			alg->exit_tfm(hash);
436 		return -EINVAL;
437 	}
438 
439 	return 0;
440 }
441 
442 static void crypto_shash_free_instance(struct crypto_instance *inst)
443 {
444 	struct shash_instance *shash = shash_instance(inst);
445 
446 	shash->free(shash);
447 }
448 
449 #ifdef CONFIG_NET
450 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
451 {
452 	struct crypto_report_hash rhash;
453 	struct shash_alg *salg = __crypto_shash_alg(alg);
454 
455 	memset(&rhash, 0, sizeof(rhash));
456 
457 	strscpy(rhash.type, "shash", sizeof(rhash.type));
458 
459 	rhash.blocksize = alg->cra_blocksize;
460 	rhash.digestsize = salg->digestsize;
461 
462 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
463 }
464 #else
465 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
466 {
467 	return -ENOSYS;
468 }
469 #endif
470 
471 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
472 	__maybe_unused;
473 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474 {
475 	struct shash_alg *salg = __crypto_shash_alg(alg);
476 
477 	seq_printf(m, "type         : shash\n");
478 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
479 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
480 }
481 
482 static const struct crypto_type crypto_shash_type = {
483 	.extsize = crypto_alg_extsize,
484 	.init_tfm = crypto_shash_init_tfm,
485 	.free = crypto_shash_free_instance,
486 #ifdef CONFIG_PROC_FS
487 	.show = crypto_shash_show,
488 #endif
489 	.report = crypto_shash_report,
490 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
491 	.maskset = CRYPTO_ALG_TYPE_MASK,
492 	.type = CRYPTO_ALG_TYPE_SHASH,
493 	.tfmsize = offsetof(struct crypto_shash, base),
494 };
495 
496 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
497 		      struct crypto_instance *inst,
498 		      const char *name, u32 type, u32 mask)
499 {
500 	spawn->base.frontend = &crypto_shash_type;
501 	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
502 }
503 EXPORT_SYMBOL_GPL(crypto_grab_shash);
504 
505 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
506 					u32 mask)
507 {
508 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
509 }
510 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
511 
512 static int shash_prepare_alg(struct shash_alg *alg)
513 {
514 	struct crypto_alg *base = &alg->base;
515 
516 	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
517 	    alg->descsize > HASH_MAX_DESCSIZE ||
518 	    alg->statesize > HASH_MAX_STATESIZE)
519 		return -EINVAL;
520 
521 	if ((alg->export && !alg->import) || (alg->import && !alg->export))
522 		return -EINVAL;
523 
524 	base->cra_type = &crypto_shash_type;
525 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
526 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
527 
528 	if (!alg->finup)
529 		alg->finup = shash_finup_unaligned;
530 	if (!alg->digest)
531 		alg->digest = shash_digest_unaligned;
532 	if (!alg->export) {
533 		alg->export = shash_default_export;
534 		alg->import = shash_default_import;
535 		alg->statesize = alg->descsize;
536 	}
537 	if (!alg->setkey)
538 		alg->setkey = shash_no_setkey;
539 
540 	return 0;
541 }
542 
543 int crypto_register_shash(struct shash_alg *alg)
544 {
545 	struct crypto_alg *base = &alg->base;
546 	int err;
547 
548 	err = shash_prepare_alg(alg);
549 	if (err)
550 		return err;
551 
552 	return crypto_register_alg(base);
553 }
554 EXPORT_SYMBOL_GPL(crypto_register_shash);
555 
556 void crypto_unregister_shash(struct shash_alg *alg)
557 {
558 	crypto_unregister_alg(&alg->base);
559 }
560 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
561 
562 int crypto_register_shashes(struct shash_alg *algs, int count)
563 {
564 	int i, ret;
565 
566 	for (i = 0; i < count; i++) {
567 		ret = crypto_register_shash(&algs[i]);
568 		if (ret)
569 			goto err;
570 	}
571 
572 	return 0;
573 
574 err:
575 	for (--i; i >= 0; --i)
576 		crypto_unregister_shash(&algs[i]);
577 
578 	return ret;
579 }
580 EXPORT_SYMBOL_GPL(crypto_register_shashes);
581 
582 void crypto_unregister_shashes(struct shash_alg *algs, int count)
583 {
584 	int i;
585 
586 	for (i = count - 1; i >= 0; --i)
587 		crypto_unregister_shash(&algs[i]);
588 }
589 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
590 
591 int shash_register_instance(struct crypto_template *tmpl,
592 			    struct shash_instance *inst)
593 {
594 	int err;
595 
596 	if (WARN_ON(!inst->free))
597 		return -EINVAL;
598 
599 	err = shash_prepare_alg(&inst->alg);
600 	if (err)
601 		return err;
602 
603 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
604 }
605 EXPORT_SYMBOL_GPL(shash_register_instance);
606 
607 void shash_free_singlespawn_instance(struct shash_instance *inst)
608 {
609 	crypto_drop_spawn(shash_instance_ctx(inst));
610 	kfree(inst);
611 }
612 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
613 
614 MODULE_LICENSE("GPL");
615 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
616