1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
6  * Supplemental SSE3 instructions.
7  *
8  * This file is based on sha1_generic.c
9  *
10  * Copyright (c) Alan Smithee.
11  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
12  * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
13  * Copyright (c) Mathias Krause <minipli@googlemail.com>
14  * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
15  */
16 
17 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
18 
19 #include <crypto/internal/hash.h>
20 #include <crypto/internal/simd.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/cryptohash.h>
25 #include <linux/types.h>
26 #include <crypto/sha.h>
27 #include <crypto/sha1_base.h>
28 #include <asm/simd.h>
29 
30 static int sha1_update(struct shash_desc *desc, const u8 *data,
31 			     unsigned int len, sha1_block_fn *sha1_xform)
32 {
33 	struct sha1_state *sctx = shash_desc_ctx(desc);
34 
35 	if (!crypto_simd_usable() ||
36 	    (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
37 		return crypto_sha1_update(desc, data, len);
38 
39 	/*
40 	 * Make sure struct sha1_state begins directly with the SHA1
41 	 * 160-bit internal state, as this is what the asm functions expect.
42 	 */
43 	BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
44 
45 	kernel_fpu_begin();
46 	sha1_base_do_update(desc, data, len, sha1_xform);
47 	kernel_fpu_end();
48 
49 	return 0;
50 }
51 
52 static int sha1_finup(struct shash_desc *desc, const u8 *data,
53 		      unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
54 {
55 	if (!crypto_simd_usable())
56 		return crypto_sha1_finup(desc, data, len, out);
57 
58 	kernel_fpu_begin();
59 	if (len)
60 		sha1_base_do_update(desc, data, len, sha1_xform);
61 	sha1_base_do_finalize(desc, sha1_xform);
62 	kernel_fpu_end();
63 
64 	return sha1_base_finish(desc, out);
65 }
66 
67 asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
68 				     const u8 *data, int blocks);
69 
70 static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
71 			     unsigned int len)
72 {
73 	return sha1_update(desc, data, len, sha1_transform_ssse3);
74 }
75 
76 static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
77 			      unsigned int len, u8 *out)
78 {
79 	return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
80 }
81 
82 /* Add padding and return the message digest. */
83 static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
84 {
85 	return sha1_ssse3_finup(desc, NULL, 0, out);
86 }
87 
88 static struct shash_alg sha1_ssse3_alg = {
89 	.digestsize	=	SHA1_DIGEST_SIZE,
90 	.init		=	sha1_base_init,
91 	.update		=	sha1_ssse3_update,
92 	.final		=	sha1_ssse3_final,
93 	.finup		=	sha1_ssse3_finup,
94 	.descsize	=	sizeof(struct sha1_state),
95 	.base		=	{
96 		.cra_name	=	"sha1",
97 		.cra_driver_name =	"sha1-ssse3",
98 		.cra_priority	=	150,
99 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
100 		.cra_module	=	THIS_MODULE,
101 	}
102 };
103 
104 static int register_sha1_ssse3(void)
105 {
106 	if (boot_cpu_has(X86_FEATURE_SSSE3))
107 		return crypto_register_shash(&sha1_ssse3_alg);
108 	return 0;
109 }
110 
111 static void unregister_sha1_ssse3(void)
112 {
113 	if (boot_cpu_has(X86_FEATURE_SSSE3))
114 		crypto_unregister_shash(&sha1_ssse3_alg);
115 }
116 
117 #ifdef CONFIG_AS_AVX
118 asmlinkage void sha1_transform_avx(struct sha1_state *state,
119 				   const u8 *data, int blocks);
120 
121 static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
122 			     unsigned int len)
123 {
124 	return sha1_update(desc, data, len, sha1_transform_avx);
125 }
126 
127 static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
128 			      unsigned int len, u8 *out)
129 {
130 	return sha1_finup(desc, data, len, out, sha1_transform_avx);
131 }
132 
133 static int sha1_avx_final(struct shash_desc *desc, u8 *out)
134 {
135 	return sha1_avx_finup(desc, NULL, 0, out);
136 }
137 
138 static struct shash_alg sha1_avx_alg = {
139 	.digestsize	=	SHA1_DIGEST_SIZE,
140 	.init		=	sha1_base_init,
141 	.update		=	sha1_avx_update,
142 	.final		=	sha1_avx_final,
143 	.finup		=	sha1_avx_finup,
144 	.descsize	=	sizeof(struct sha1_state),
145 	.base		=	{
146 		.cra_name	=	"sha1",
147 		.cra_driver_name =	"sha1-avx",
148 		.cra_priority	=	160,
149 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
150 		.cra_module	=	THIS_MODULE,
151 	}
152 };
153 
154 static bool avx_usable(void)
155 {
156 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
157 		if (boot_cpu_has(X86_FEATURE_AVX))
158 			pr_info("AVX detected but unusable.\n");
159 		return false;
160 	}
161 
162 	return true;
163 }
164 
165 static int register_sha1_avx(void)
166 {
167 	if (avx_usable())
168 		return crypto_register_shash(&sha1_avx_alg);
169 	return 0;
170 }
171 
172 static void unregister_sha1_avx(void)
173 {
174 	if (avx_usable())
175 		crypto_unregister_shash(&sha1_avx_alg);
176 }
177 
178 #else  /* CONFIG_AS_AVX */
179 static inline int register_sha1_avx(void) { return 0; }
180 static inline void unregister_sha1_avx(void) { }
181 #endif /* CONFIG_AS_AVX */
182 
183 
184 #if defined(CONFIG_AS_AVX2) && (CONFIG_AS_AVX)
185 #define SHA1_AVX2_BLOCK_OPTSIZE	4	/* optimal 4*64 bytes of SHA1 blocks */
186 
187 asmlinkage void sha1_transform_avx2(struct sha1_state *state,
188 				    const u8 *data, int blocks);
189 
190 static bool avx2_usable(void)
191 {
192 	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
193 		&& boot_cpu_has(X86_FEATURE_BMI1)
194 		&& boot_cpu_has(X86_FEATURE_BMI2))
195 		return true;
196 
197 	return false;
198 }
199 
200 static void sha1_apply_transform_avx2(struct sha1_state *state,
201 				      const u8 *data, int blocks)
202 {
203 	/* Select the optimal transform based on data block size */
204 	if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
205 		sha1_transform_avx2(state, data, blocks);
206 	else
207 		sha1_transform_avx(state, data, blocks);
208 }
209 
210 static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
211 			     unsigned int len)
212 {
213 	return sha1_update(desc, data, len, sha1_apply_transform_avx2);
214 }
215 
216 static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
217 			      unsigned int len, u8 *out)
218 {
219 	return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
220 }
221 
222 static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
223 {
224 	return sha1_avx2_finup(desc, NULL, 0, out);
225 }
226 
227 static struct shash_alg sha1_avx2_alg = {
228 	.digestsize	=	SHA1_DIGEST_SIZE,
229 	.init		=	sha1_base_init,
230 	.update		=	sha1_avx2_update,
231 	.final		=	sha1_avx2_final,
232 	.finup		=	sha1_avx2_finup,
233 	.descsize	=	sizeof(struct sha1_state),
234 	.base		=	{
235 		.cra_name	=	"sha1",
236 		.cra_driver_name =	"sha1-avx2",
237 		.cra_priority	=	170,
238 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
239 		.cra_module	=	THIS_MODULE,
240 	}
241 };
242 
243 static int register_sha1_avx2(void)
244 {
245 	if (avx2_usable())
246 		return crypto_register_shash(&sha1_avx2_alg);
247 	return 0;
248 }
249 
250 static void unregister_sha1_avx2(void)
251 {
252 	if (avx2_usable())
253 		crypto_unregister_shash(&sha1_avx2_alg);
254 }
255 
256 #else
257 static inline int register_sha1_avx2(void) { return 0; }
258 static inline void unregister_sha1_avx2(void) { }
259 #endif
260 
261 #ifdef CONFIG_AS_SHA1_NI
262 asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
263 				  int rounds);
264 
265 static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
266 			     unsigned int len)
267 {
268 	return sha1_update(desc, data, len, sha1_ni_transform);
269 }
270 
271 static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
272 			      unsigned int len, u8 *out)
273 {
274 	return sha1_finup(desc, data, len, out, sha1_ni_transform);
275 }
276 
277 static int sha1_ni_final(struct shash_desc *desc, u8 *out)
278 {
279 	return sha1_ni_finup(desc, NULL, 0, out);
280 }
281 
282 static struct shash_alg sha1_ni_alg = {
283 	.digestsize	=	SHA1_DIGEST_SIZE,
284 	.init		=	sha1_base_init,
285 	.update		=	sha1_ni_update,
286 	.final		=	sha1_ni_final,
287 	.finup		=	sha1_ni_finup,
288 	.descsize	=	sizeof(struct sha1_state),
289 	.base		=	{
290 		.cra_name	=	"sha1",
291 		.cra_driver_name =	"sha1-ni",
292 		.cra_priority	=	250,
293 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
294 		.cra_module	=	THIS_MODULE,
295 	}
296 };
297 
298 static int register_sha1_ni(void)
299 {
300 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
301 		return crypto_register_shash(&sha1_ni_alg);
302 	return 0;
303 }
304 
305 static void unregister_sha1_ni(void)
306 {
307 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
308 		crypto_unregister_shash(&sha1_ni_alg);
309 }
310 
311 #else
312 static inline int register_sha1_ni(void) { return 0; }
313 static inline void unregister_sha1_ni(void) { }
314 #endif
315 
316 static int __init sha1_ssse3_mod_init(void)
317 {
318 	if (register_sha1_ssse3())
319 		goto fail;
320 
321 	if (register_sha1_avx()) {
322 		unregister_sha1_ssse3();
323 		goto fail;
324 	}
325 
326 	if (register_sha1_avx2()) {
327 		unregister_sha1_avx();
328 		unregister_sha1_ssse3();
329 		goto fail;
330 	}
331 
332 	if (register_sha1_ni()) {
333 		unregister_sha1_avx2();
334 		unregister_sha1_avx();
335 		unregister_sha1_ssse3();
336 		goto fail;
337 	}
338 
339 	return 0;
340 fail:
341 	return -ENODEV;
342 }
343 
344 static void __exit sha1_ssse3_mod_fini(void)
345 {
346 	unregister_sha1_ni();
347 	unregister_sha1_avx2();
348 	unregister_sha1_avx();
349 	unregister_sha1_ssse3();
350 }
351 
352 module_init(sha1_ssse3_mod_init);
353 module_exit(sha1_ssse3_mod_fini);
354 
355 MODULE_LICENSE("GPL");
356 MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
357 
358 MODULE_ALIAS_CRYPTO("sha1");
359 MODULE_ALIAS_CRYPTO("sha1-ssse3");
360 MODULE_ALIAS_CRYPTO("sha1-avx");
361 MODULE_ALIAS_CRYPTO("sha1-avx2");
362 #ifdef CONFIG_AS_SHA1_NI
363 MODULE_ALIAS_CRYPTO("sha1-ni");
364 #endif
365