1 /* SPDX-License-Identifier: GPL-2.0 OR MIT */
2 /*
3  * Helper functions for BLAKE2s implementations.
4  * Keep this in sync with the corresponding BLAKE2b header.
5  */
6 
7 #ifndef _CRYPTO_INTERNAL_BLAKE2S_H
8 #define _CRYPTO_INTERNAL_BLAKE2S_H
9 
10 #include <crypto/blake2s.h>
11 #include <crypto/internal/hash.h>
12 #include <linux/string.h>
13 
14 void blake2s_compress_generic(struct blake2s_state *state, const u8 *block,
15 			      size_t nblocks, const u32 inc);
16 
17 void blake2s_compress(struct blake2s_state *state, const u8 *block,
18 		      size_t nblocks, const u32 inc);
19 
20 bool blake2s_selftest(void);
21 
22 static inline void blake2s_set_lastblock(struct blake2s_state *state)
23 {
24 	state->f[0] = -1;
25 }
26 
27 /* Helper functions for BLAKE2s shared by the library and shash APIs */
28 
29 static __always_inline void
30 __blake2s_update(struct blake2s_state *state, const u8 *in, size_t inlen,
31 		 bool force_generic)
32 {
33 	const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen;
34 
35 	if (unlikely(!inlen))
36 		return;
37 	if (inlen > fill) {
38 		memcpy(state->buf + state->buflen, in, fill);
39 		if (force_generic)
40 			blake2s_compress_generic(state, state->buf, 1,
41 						 BLAKE2S_BLOCK_SIZE);
42 		else
43 			blake2s_compress(state, state->buf, 1,
44 					 BLAKE2S_BLOCK_SIZE);
45 		state->buflen = 0;
46 		in += fill;
47 		inlen -= fill;
48 	}
49 	if (inlen > BLAKE2S_BLOCK_SIZE) {
50 		const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE);
51 		/* Hash one less (full) block than strictly possible */
52 		if (force_generic)
53 			blake2s_compress_generic(state, in, nblocks - 1,
54 						 BLAKE2S_BLOCK_SIZE);
55 		else
56 			blake2s_compress(state, in, nblocks - 1,
57 					 BLAKE2S_BLOCK_SIZE);
58 		in += BLAKE2S_BLOCK_SIZE * (nblocks - 1);
59 		inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1);
60 	}
61 	memcpy(state->buf + state->buflen, in, inlen);
62 	state->buflen += inlen;
63 }
64 
65 static __always_inline void
66 __blake2s_final(struct blake2s_state *state, u8 *out, bool force_generic)
67 {
68 	blake2s_set_lastblock(state);
69 	memset(state->buf + state->buflen, 0,
70 	       BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */
71 	if (force_generic)
72 		blake2s_compress_generic(state, state->buf, 1, state->buflen);
73 	else
74 		blake2s_compress(state, state->buf, 1, state->buflen);
75 	cpu_to_le32_array(state->h, ARRAY_SIZE(state->h));
76 	memcpy(out, state->h, state->outlen);
77 }
78 
79 /* Helper functions for shash implementations of BLAKE2s */
80 
81 struct blake2s_tfm_ctx {
82 	u8 key[BLAKE2S_KEY_SIZE];
83 	unsigned int keylen;
84 };
85 
86 static inline int crypto_blake2s_setkey(struct crypto_shash *tfm,
87 					const u8 *key, unsigned int keylen)
88 {
89 	struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm);
90 
91 	if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE)
92 		return -EINVAL;
93 
94 	memcpy(tctx->key, key, keylen);
95 	tctx->keylen = keylen;
96 
97 	return 0;
98 }
99 
100 static inline int crypto_blake2s_init(struct shash_desc *desc)
101 {
102 	const struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
103 	struct blake2s_state *state = shash_desc_ctx(desc);
104 	unsigned int outlen = crypto_shash_digestsize(desc->tfm);
105 
106 	__blake2s_init(state, outlen, tctx->key, tctx->keylen);
107 	return 0;
108 }
109 
110 static inline int crypto_blake2s_update(struct shash_desc *desc,
111 					const u8 *in, unsigned int inlen,
112 					bool force_generic)
113 {
114 	struct blake2s_state *state = shash_desc_ctx(desc);
115 
116 	__blake2s_update(state, in, inlen, force_generic);
117 	return 0;
118 }
119 
120 static inline int crypto_blake2s_final(struct shash_desc *desc, u8 *out,
121 				       bool force_generic)
122 {
123 	struct blake2s_state *state = shash_desc_ctx(desc);
124 
125 	__blake2s_final(state, out, force_generic);
126 	return 0;
127 }
128 
129 #endif /* _CRYPTO_INTERNAL_BLAKE2S_H */
130