xref: /openbmc/linux/crypto/shash.c (revision b830f94f)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Synchronous Cryptographic Hash operations.
4  *
5  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/scatterwalk.h>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/cryptouser.h>
16 #include <net/netlink.h>
17 #include <linux/compiler.h>
18 
19 #include "internal.h"
20 
21 static const struct crypto_type crypto_shash_type;
22 
23 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
24 		    unsigned int keylen)
25 {
26 	return -ENOSYS;
27 }
28 EXPORT_SYMBOL_GPL(shash_no_setkey);
29 
30 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
31 				  unsigned int keylen)
32 {
33 	struct shash_alg *shash = crypto_shash_alg(tfm);
34 	unsigned long alignmask = crypto_shash_alignmask(tfm);
35 	unsigned long absize;
36 	u8 *buffer, *alignbuffer;
37 	int err;
38 
39 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
40 	buffer = kmalloc(absize, GFP_ATOMIC);
41 	if (!buffer)
42 		return -ENOMEM;
43 
44 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
45 	memcpy(alignbuffer, key, keylen);
46 	err = shash->setkey(tfm, alignbuffer, keylen);
47 	kzfree(buffer);
48 	return err;
49 }
50 
51 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
52 {
53 	if (crypto_shash_alg_has_setkey(alg) &&
54 	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
55 		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
56 }
57 
58 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
59 			unsigned int keylen)
60 {
61 	struct shash_alg *shash = crypto_shash_alg(tfm);
62 	unsigned long alignmask = crypto_shash_alignmask(tfm);
63 	int err;
64 
65 	if ((unsigned long)key & alignmask)
66 		err = shash_setkey_unaligned(tfm, key, keylen);
67 	else
68 		err = shash->setkey(tfm, key, keylen);
69 
70 	if (unlikely(err)) {
71 		shash_set_needkey(tfm, shash);
72 		return err;
73 	}
74 
75 	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
76 	return 0;
77 }
78 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
79 
80 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
81 				  unsigned int len)
82 {
83 	struct crypto_shash *tfm = desc->tfm;
84 	struct shash_alg *shash = crypto_shash_alg(tfm);
85 	unsigned long alignmask = crypto_shash_alignmask(tfm);
86 	unsigned int unaligned_len = alignmask + 1 -
87 				     ((unsigned long)data & alignmask);
88 	/*
89 	 * We cannot count on __aligned() working for large values:
90 	 * https://patchwork.kernel.org/patch/9507697/
91 	 */
92 	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
93 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
94 	int err;
95 
96 	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
97 		return -EINVAL;
98 
99 	if (unaligned_len > len)
100 		unaligned_len = len;
101 
102 	memcpy(buf, data, unaligned_len);
103 	err = shash->update(desc, buf, unaligned_len);
104 	memset(buf, 0, unaligned_len);
105 
106 	return err ?:
107 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
108 }
109 
110 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
111 			unsigned int len)
112 {
113 	struct crypto_shash *tfm = desc->tfm;
114 	struct shash_alg *shash = crypto_shash_alg(tfm);
115 	unsigned long alignmask = crypto_shash_alignmask(tfm);
116 
117 	if ((unsigned long)data & alignmask)
118 		return shash_update_unaligned(desc, data, len);
119 
120 	return shash->update(desc, data, len);
121 }
122 EXPORT_SYMBOL_GPL(crypto_shash_update);
123 
124 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
125 {
126 	struct crypto_shash *tfm = desc->tfm;
127 	unsigned long alignmask = crypto_shash_alignmask(tfm);
128 	struct shash_alg *shash = crypto_shash_alg(tfm);
129 	unsigned int ds = crypto_shash_digestsize(tfm);
130 	/*
131 	 * We cannot count on __aligned() working for large values:
132 	 * https://patchwork.kernel.org/patch/9507697/
133 	 */
134 	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
135 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
136 	int err;
137 
138 	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
139 		return -EINVAL;
140 
141 	err = shash->final(desc, buf);
142 	if (err)
143 		goto out;
144 
145 	memcpy(out, buf, ds);
146 
147 out:
148 	memset(buf, 0, ds);
149 	return err;
150 }
151 
152 int crypto_shash_final(struct shash_desc *desc, u8 *out)
153 {
154 	struct crypto_shash *tfm = desc->tfm;
155 	struct shash_alg *shash = crypto_shash_alg(tfm);
156 	unsigned long alignmask = crypto_shash_alignmask(tfm);
157 
158 	if ((unsigned long)out & alignmask)
159 		return shash_final_unaligned(desc, out);
160 
161 	return shash->final(desc, out);
162 }
163 EXPORT_SYMBOL_GPL(crypto_shash_final);
164 
165 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
166 				 unsigned int len, u8 *out)
167 {
168 	return crypto_shash_update(desc, data, len) ?:
169 	       crypto_shash_final(desc, out);
170 }
171 
172 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
173 		       unsigned int len, u8 *out)
174 {
175 	struct crypto_shash *tfm = desc->tfm;
176 	struct shash_alg *shash = crypto_shash_alg(tfm);
177 	unsigned long alignmask = crypto_shash_alignmask(tfm);
178 
179 	if (((unsigned long)data | (unsigned long)out) & alignmask)
180 		return shash_finup_unaligned(desc, data, len, out);
181 
182 	return shash->finup(desc, data, len, out);
183 }
184 EXPORT_SYMBOL_GPL(crypto_shash_finup);
185 
186 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
187 				  unsigned int len, u8 *out)
188 {
189 	return crypto_shash_init(desc) ?:
190 	       crypto_shash_finup(desc, data, len, out);
191 }
192 
193 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
194 			unsigned int len, u8 *out)
195 {
196 	struct crypto_shash *tfm = desc->tfm;
197 	struct shash_alg *shash = crypto_shash_alg(tfm);
198 	unsigned long alignmask = crypto_shash_alignmask(tfm);
199 
200 	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
201 		return -ENOKEY;
202 
203 	if (((unsigned long)data | (unsigned long)out) & alignmask)
204 		return shash_digest_unaligned(desc, data, len, out);
205 
206 	return shash->digest(desc, data, len, out);
207 }
208 EXPORT_SYMBOL_GPL(crypto_shash_digest);
209 
210 static int shash_default_export(struct shash_desc *desc, void *out)
211 {
212 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
213 	return 0;
214 }
215 
216 static int shash_default_import(struct shash_desc *desc, const void *in)
217 {
218 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
219 	return 0;
220 }
221 
222 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
223 			      unsigned int keylen)
224 {
225 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
226 
227 	return crypto_shash_setkey(*ctx, key, keylen);
228 }
229 
230 static int shash_async_init(struct ahash_request *req)
231 {
232 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
233 	struct shash_desc *desc = ahash_request_ctx(req);
234 
235 	desc->tfm = *ctx;
236 
237 	return crypto_shash_init(desc);
238 }
239 
240 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
241 {
242 	struct crypto_hash_walk walk;
243 	int nbytes;
244 
245 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
246 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
247 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
248 
249 	return nbytes;
250 }
251 EXPORT_SYMBOL_GPL(shash_ahash_update);
252 
253 static int shash_async_update(struct ahash_request *req)
254 {
255 	return shash_ahash_update(req, ahash_request_ctx(req));
256 }
257 
258 static int shash_async_final(struct ahash_request *req)
259 {
260 	return crypto_shash_final(ahash_request_ctx(req), req->result);
261 }
262 
263 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
264 {
265 	struct crypto_hash_walk walk;
266 	int nbytes;
267 
268 	nbytes = crypto_hash_walk_first(req, &walk);
269 	if (!nbytes)
270 		return crypto_shash_final(desc, req->result);
271 
272 	do {
273 		nbytes = crypto_hash_walk_last(&walk) ?
274 			 crypto_shash_finup(desc, walk.data, nbytes,
275 					    req->result) :
276 			 crypto_shash_update(desc, walk.data, nbytes);
277 		nbytes = crypto_hash_walk_done(&walk, nbytes);
278 	} while (nbytes > 0);
279 
280 	return nbytes;
281 }
282 EXPORT_SYMBOL_GPL(shash_ahash_finup);
283 
284 static int shash_async_finup(struct ahash_request *req)
285 {
286 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
287 	struct shash_desc *desc = ahash_request_ctx(req);
288 
289 	desc->tfm = *ctx;
290 
291 	return shash_ahash_finup(req, desc);
292 }
293 
294 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
295 {
296 	unsigned int nbytes = req->nbytes;
297 	struct scatterlist *sg;
298 	unsigned int offset;
299 	int err;
300 
301 	if (nbytes &&
302 	    (sg = req->src, offset = sg->offset,
303 	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
304 		void *data;
305 
306 		data = kmap_atomic(sg_page(sg));
307 		err = crypto_shash_digest(desc, data + offset, nbytes,
308 					  req->result);
309 		kunmap_atomic(data);
310 	} else
311 		err = crypto_shash_init(desc) ?:
312 		      shash_ahash_finup(req, desc);
313 
314 	return err;
315 }
316 EXPORT_SYMBOL_GPL(shash_ahash_digest);
317 
318 static int shash_async_digest(struct ahash_request *req)
319 {
320 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
321 	struct shash_desc *desc = ahash_request_ctx(req);
322 
323 	desc->tfm = *ctx;
324 
325 	return shash_ahash_digest(req, desc);
326 }
327 
328 static int shash_async_export(struct ahash_request *req, void *out)
329 {
330 	return crypto_shash_export(ahash_request_ctx(req), out);
331 }
332 
333 static int shash_async_import(struct ahash_request *req, const void *in)
334 {
335 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336 	struct shash_desc *desc = ahash_request_ctx(req);
337 
338 	desc->tfm = *ctx;
339 
340 	return crypto_shash_import(desc, in);
341 }
342 
343 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
344 {
345 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
346 
347 	crypto_free_shash(*ctx);
348 }
349 
350 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
351 {
352 	struct crypto_alg *calg = tfm->__crt_alg;
353 	struct shash_alg *alg = __crypto_shash_alg(calg);
354 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
355 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
356 	struct crypto_shash *shash;
357 
358 	if (!crypto_mod_get(calg))
359 		return -EAGAIN;
360 
361 	shash = crypto_create_tfm(calg, &crypto_shash_type);
362 	if (IS_ERR(shash)) {
363 		crypto_mod_put(calg);
364 		return PTR_ERR(shash);
365 	}
366 
367 	*ctx = shash;
368 	tfm->exit = crypto_exit_shash_ops_async;
369 
370 	crt->init = shash_async_init;
371 	crt->update = shash_async_update;
372 	crt->final = shash_async_final;
373 	crt->finup = shash_async_finup;
374 	crt->digest = shash_async_digest;
375 	if (crypto_shash_alg_has_setkey(alg))
376 		crt->setkey = shash_async_setkey;
377 
378 	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
379 				    CRYPTO_TFM_NEED_KEY);
380 
381 	crt->export = shash_async_export;
382 	crt->import = shash_async_import;
383 
384 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
385 
386 	return 0;
387 }
388 
389 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
390 {
391 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
392 	struct shash_alg *alg = crypto_shash_alg(hash);
393 
394 	hash->descsize = alg->descsize;
395 
396 	shash_set_needkey(hash, alg);
397 
398 	return 0;
399 }
400 
401 #ifdef CONFIG_NET
402 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403 {
404 	struct crypto_report_hash rhash;
405 	struct shash_alg *salg = __crypto_shash_alg(alg);
406 
407 	memset(&rhash, 0, sizeof(rhash));
408 
409 	strscpy(rhash.type, "shash", sizeof(rhash.type));
410 
411 	rhash.blocksize = alg->cra_blocksize;
412 	rhash.digestsize = salg->digestsize;
413 
414 	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
415 }
416 #else
417 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
418 {
419 	return -ENOSYS;
420 }
421 #endif
422 
423 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
424 	__maybe_unused;
425 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
426 {
427 	struct shash_alg *salg = __crypto_shash_alg(alg);
428 
429 	seq_printf(m, "type         : shash\n");
430 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
431 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
432 }
433 
434 static const struct crypto_type crypto_shash_type = {
435 	.extsize = crypto_alg_extsize,
436 	.init_tfm = crypto_shash_init_tfm,
437 #ifdef CONFIG_PROC_FS
438 	.show = crypto_shash_show,
439 #endif
440 	.report = crypto_shash_report,
441 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
442 	.maskset = CRYPTO_ALG_TYPE_MASK,
443 	.type = CRYPTO_ALG_TYPE_SHASH,
444 	.tfmsize = offsetof(struct crypto_shash, base),
445 };
446 
447 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
448 					u32 mask)
449 {
450 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
451 }
452 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
453 
454 static int shash_prepare_alg(struct shash_alg *alg)
455 {
456 	struct crypto_alg *base = &alg->base;
457 
458 	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
459 	    alg->descsize > HASH_MAX_DESCSIZE ||
460 	    alg->statesize > HASH_MAX_STATESIZE)
461 		return -EINVAL;
462 
463 	if ((alg->export && !alg->import) || (alg->import && !alg->export))
464 		return -EINVAL;
465 
466 	base->cra_type = &crypto_shash_type;
467 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469 
470 	if (!alg->finup)
471 		alg->finup = shash_finup_unaligned;
472 	if (!alg->digest)
473 		alg->digest = shash_digest_unaligned;
474 	if (!alg->export) {
475 		alg->export = shash_default_export;
476 		alg->import = shash_default_import;
477 		alg->statesize = alg->descsize;
478 	}
479 	if (!alg->setkey)
480 		alg->setkey = shash_no_setkey;
481 
482 	return 0;
483 }
484 
485 int crypto_register_shash(struct shash_alg *alg)
486 {
487 	struct crypto_alg *base = &alg->base;
488 	int err;
489 
490 	err = shash_prepare_alg(alg);
491 	if (err)
492 		return err;
493 
494 	return crypto_register_alg(base);
495 }
496 EXPORT_SYMBOL_GPL(crypto_register_shash);
497 
498 int crypto_unregister_shash(struct shash_alg *alg)
499 {
500 	return crypto_unregister_alg(&alg->base);
501 }
502 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503 
504 int crypto_register_shashes(struct shash_alg *algs, int count)
505 {
506 	int i, ret;
507 
508 	for (i = 0; i < count; i++) {
509 		ret = crypto_register_shash(&algs[i]);
510 		if (ret)
511 			goto err;
512 	}
513 
514 	return 0;
515 
516 err:
517 	for (--i; i >= 0; --i)
518 		crypto_unregister_shash(&algs[i]);
519 
520 	return ret;
521 }
522 EXPORT_SYMBOL_GPL(crypto_register_shashes);
523 
524 int crypto_unregister_shashes(struct shash_alg *algs, int count)
525 {
526 	int i, ret;
527 
528 	for (i = count - 1; i >= 0; --i) {
529 		ret = crypto_unregister_shash(&algs[i]);
530 		if (ret)
531 			pr_err("Failed to unregister %s %s: %d\n",
532 			       algs[i].base.cra_driver_name,
533 			       algs[i].base.cra_name, ret);
534 	}
535 
536 	return 0;
537 }
538 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539 
540 int shash_register_instance(struct crypto_template *tmpl,
541 			    struct shash_instance *inst)
542 {
543 	int err;
544 
545 	err = shash_prepare_alg(&inst->alg);
546 	if (err)
547 		return err;
548 
549 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550 }
551 EXPORT_SYMBOL_GPL(shash_register_instance);
552 
553 void shash_free_instance(struct crypto_instance *inst)
554 {
555 	crypto_drop_spawn(crypto_instance_ctx(inst));
556 	kfree(shash_instance(inst));
557 }
558 EXPORT_SYMBOL_GPL(shash_free_instance);
559 
560 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561 			    struct shash_alg *alg,
562 			    struct crypto_instance *inst)
563 {
564 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565 				  &crypto_shash_type);
566 }
567 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568 
569 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570 {
571 	struct crypto_alg *alg;
572 
573 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574 	return IS_ERR(alg) ? ERR_CAST(alg) :
575 	       container_of(alg, struct shash_alg, base);
576 }
577 EXPORT_SYMBOL_GPL(shash_attr_alg);
578 
579 MODULE_LICENSE("GPL");
580 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
581