xref: /openbmc/linux/arch/x86/crypto/sha1_ssse3_glue.c (revision a89aa749ece9c6fee7932163472d2ee0efd6ddd3)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API.
4  *
5  * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
6  * Supplemental SSE3 instructions.
7  *
8  * This file is based on sha1_generic.c
9  *
10  * Copyright (c) Alan Smithee.
11  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
12  * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
13  * Copyright (c) Mathias Krause <minipli@googlemail.com>
14  * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
15  */
16 
17 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
18 
19 #include <crypto/internal/hash.h>
20 #include <crypto/internal/simd.h>
21 #include <linux/init.h>
22 #include <linux/module.h>
23 #include <linux/mm.h>
24 #include <linux/cryptohash.h>
25 #include <linux/types.h>
26 #include <crypto/sha.h>
27 #include <crypto/sha1_base.h>
28 #include <asm/simd.h>
29 
30 static int sha1_update(struct shash_desc *desc, const u8 *data,
31 			     unsigned int len, sha1_block_fn *sha1_xform)
32 {
33 	struct sha1_state *sctx = shash_desc_ctx(desc);
34 
35 	if (!crypto_simd_usable() ||
36 	    (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
37 		return crypto_sha1_update(desc, data, len);
38 
39 	/*
40 	 * Make sure struct sha1_state begins directly with the SHA1
41 	 * 160-bit internal state, as this is what the asm functions expect.
42 	 */
43 	BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
44 
45 	kernel_fpu_begin();
46 	sha1_base_do_update(desc, data, len, sha1_xform);
47 	kernel_fpu_end();
48 
49 	return 0;
50 }
51 
52 static int sha1_finup(struct shash_desc *desc, const u8 *data,
53 		      unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
54 {
55 	if (!crypto_simd_usable())
56 		return crypto_sha1_finup(desc, data, len, out);
57 
58 	kernel_fpu_begin();
59 	if (len)
60 		sha1_base_do_update(desc, data, len, sha1_xform);
61 	sha1_base_do_finalize(desc, sha1_xform);
62 	kernel_fpu_end();
63 
64 	return sha1_base_finish(desc, out);
65 }
66 
67 asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
68 				     const u8 *data, int blocks);
69 
70 static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
71 			     unsigned int len)
72 {
73 	return sha1_update(desc, data, len, sha1_transform_ssse3);
74 }
75 
76 static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
77 			      unsigned int len, u8 *out)
78 {
79 	return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
80 }
81 
82 /* Add padding and return the message digest. */
83 static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
84 {
85 	return sha1_ssse3_finup(desc, NULL, 0, out);
86 }
87 
88 static struct shash_alg sha1_ssse3_alg = {
89 	.digestsize	=	SHA1_DIGEST_SIZE,
90 	.init		=	sha1_base_init,
91 	.update		=	sha1_ssse3_update,
92 	.final		=	sha1_ssse3_final,
93 	.finup		=	sha1_ssse3_finup,
94 	.descsize	=	sizeof(struct sha1_state),
95 	.base		=	{
96 		.cra_name	=	"sha1",
97 		.cra_driver_name =	"sha1-ssse3",
98 		.cra_priority	=	150,
99 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
100 		.cra_module	=	THIS_MODULE,
101 	}
102 };
103 
104 static int register_sha1_ssse3(void)
105 {
106 	if (boot_cpu_has(X86_FEATURE_SSSE3))
107 		return crypto_register_shash(&sha1_ssse3_alg);
108 	return 0;
109 }
110 
111 static void unregister_sha1_ssse3(void)
112 {
113 	if (boot_cpu_has(X86_FEATURE_SSSE3))
114 		crypto_unregister_shash(&sha1_ssse3_alg);
115 }
116 
117 asmlinkage void sha1_transform_avx(struct sha1_state *state,
118 				   const u8 *data, int blocks);
119 
120 static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
121 			     unsigned int len)
122 {
123 	return sha1_update(desc, data, len, sha1_transform_avx);
124 }
125 
126 static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
127 			      unsigned int len, u8 *out)
128 {
129 	return sha1_finup(desc, data, len, out, sha1_transform_avx);
130 }
131 
132 static int sha1_avx_final(struct shash_desc *desc, u8 *out)
133 {
134 	return sha1_avx_finup(desc, NULL, 0, out);
135 }
136 
137 static struct shash_alg sha1_avx_alg = {
138 	.digestsize	=	SHA1_DIGEST_SIZE,
139 	.init		=	sha1_base_init,
140 	.update		=	sha1_avx_update,
141 	.final		=	sha1_avx_final,
142 	.finup		=	sha1_avx_finup,
143 	.descsize	=	sizeof(struct sha1_state),
144 	.base		=	{
145 		.cra_name	=	"sha1",
146 		.cra_driver_name =	"sha1-avx",
147 		.cra_priority	=	160,
148 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
149 		.cra_module	=	THIS_MODULE,
150 	}
151 };
152 
153 static bool avx_usable(void)
154 {
155 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
156 		if (boot_cpu_has(X86_FEATURE_AVX))
157 			pr_info("AVX detected but unusable.\n");
158 		return false;
159 	}
160 
161 	return true;
162 }
163 
164 static int register_sha1_avx(void)
165 {
166 	if (avx_usable())
167 		return crypto_register_shash(&sha1_avx_alg);
168 	return 0;
169 }
170 
171 static void unregister_sha1_avx(void)
172 {
173 	if (avx_usable())
174 		crypto_unregister_shash(&sha1_avx_alg);
175 }
176 
177 #define SHA1_AVX2_BLOCK_OPTSIZE	4	/* optimal 4*64 bytes of SHA1 blocks */
178 
179 asmlinkage void sha1_transform_avx2(struct sha1_state *state,
180 				    const u8 *data, int blocks);
181 
182 static bool avx2_usable(void)
183 {
184 	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
185 		&& boot_cpu_has(X86_FEATURE_BMI1)
186 		&& boot_cpu_has(X86_FEATURE_BMI2))
187 		return true;
188 
189 	return false;
190 }
191 
192 static void sha1_apply_transform_avx2(struct sha1_state *state,
193 				      const u8 *data, int blocks)
194 {
195 	/* Select the optimal transform based on data block size */
196 	if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
197 		sha1_transform_avx2(state, data, blocks);
198 	else
199 		sha1_transform_avx(state, data, blocks);
200 }
201 
202 static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
203 			     unsigned int len)
204 {
205 	return sha1_update(desc, data, len, sha1_apply_transform_avx2);
206 }
207 
208 static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
209 			      unsigned int len, u8 *out)
210 {
211 	return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
212 }
213 
214 static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
215 {
216 	return sha1_avx2_finup(desc, NULL, 0, out);
217 }
218 
219 static struct shash_alg sha1_avx2_alg = {
220 	.digestsize	=	SHA1_DIGEST_SIZE,
221 	.init		=	sha1_base_init,
222 	.update		=	sha1_avx2_update,
223 	.final		=	sha1_avx2_final,
224 	.finup		=	sha1_avx2_finup,
225 	.descsize	=	sizeof(struct sha1_state),
226 	.base		=	{
227 		.cra_name	=	"sha1",
228 		.cra_driver_name =	"sha1-avx2",
229 		.cra_priority	=	170,
230 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
231 		.cra_module	=	THIS_MODULE,
232 	}
233 };
234 
235 static int register_sha1_avx2(void)
236 {
237 	if (avx2_usable())
238 		return crypto_register_shash(&sha1_avx2_alg);
239 	return 0;
240 }
241 
242 static void unregister_sha1_avx2(void)
243 {
244 	if (avx2_usable())
245 		crypto_unregister_shash(&sha1_avx2_alg);
246 }
247 
248 #ifdef CONFIG_AS_SHA1_NI
249 asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
250 				  int rounds);
251 
252 static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
253 			     unsigned int len)
254 {
255 	return sha1_update(desc, data, len, sha1_ni_transform);
256 }
257 
258 static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
259 			      unsigned int len, u8 *out)
260 {
261 	return sha1_finup(desc, data, len, out, sha1_ni_transform);
262 }
263 
264 static int sha1_ni_final(struct shash_desc *desc, u8 *out)
265 {
266 	return sha1_ni_finup(desc, NULL, 0, out);
267 }
268 
269 static struct shash_alg sha1_ni_alg = {
270 	.digestsize	=	SHA1_DIGEST_SIZE,
271 	.init		=	sha1_base_init,
272 	.update		=	sha1_ni_update,
273 	.final		=	sha1_ni_final,
274 	.finup		=	sha1_ni_finup,
275 	.descsize	=	sizeof(struct sha1_state),
276 	.base		=	{
277 		.cra_name	=	"sha1",
278 		.cra_driver_name =	"sha1-ni",
279 		.cra_priority	=	250,
280 		.cra_blocksize	=	SHA1_BLOCK_SIZE,
281 		.cra_module	=	THIS_MODULE,
282 	}
283 };
284 
285 static int register_sha1_ni(void)
286 {
287 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
288 		return crypto_register_shash(&sha1_ni_alg);
289 	return 0;
290 }
291 
292 static void unregister_sha1_ni(void)
293 {
294 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
295 		crypto_unregister_shash(&sha1_ni_alg);
296 }
297 
298 #else
299 static inline int register_sha1_ni(void) { return 0; }
300 static inline void unregister_sha1_ni(void) { }
301 #endif
302 
303 static int __init sha1_ssse3_mod_init(void)
304 {
305 	if (register_sha1_ssse3())
306 		goto fail;
307 
308 	if (register_sha1_avx()) {
309 		unregister_sha1_ssse3();
310 		goto fail;
311 	}
312 
313 	if (register_sha1_avx2()) {
314 		unregister_sha1_avx();
315 		unregister_sha1_ssse3();
316 		goto fail;
317 	}
318 
319 	if (register_sha1_ni()) {
320 		unregister_sha1_avx2();
321 		unregister_sha1_avx();
322 		unregister_sha1_ssse3();
323 		goto fail;
324 	}
325 
326 	return 0;
327 fail:
328 	return -ENODEV;
329 }
330 
331 static void __exit sha1_ssse3_mod_fini(void)
332 {
333 	unregister_sha1_ni();
334 	unregister_sha1_avx2();
335 	unregister_sha1_avx();
336 	unregister_sha1_ssse3();
337 }
338 
339 module_init(sha1_ssse3_mod_init);
340 module_exit(sha1_ssse3_mod_fini);
341 
342 MODULE_LICENSE("GPL");
343 MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
344 
345 MODULE_ALIAS_CRYPTO("sha1");
346 MODULE_ALIAS_CRYPTO("sha1-ssse3");
347 MODULE_ALIAS_CRYPTO("sha1-avx");
348 MODULE_ALIAS_CRYPTO("sha1-avx2");
349 #ifdef CONFIG_AS_SHA1_NI
350 MODULE_ALIAS_CRYPTO("sha1-ni");
351 #endif
352