1 /*
2  * Cryptographic API.
3  *
4  * Glue code for the SHA256 Secure Hash Algorithm assembler
5  * implementation using supplemental SSE3 / AVX / AVX2 instructions.
6  *
7  * This file is based on sha256_generic.c
8  *
9  * Copyright (C) 2013 Intel Corporation.
10  *
11  * Author:
12  *     Tim Chen <tim.c.chen@linux.intel.com>
13  *
14  * This program is free software; you can redistribute it and/or modify it
15  * under the terms of the GNU General Public License as published by the Free
16  * Software Foundation; either version 2 of the License, or (at your option)
17  * any later version.
18  *
19  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
22  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
23  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
24  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
25  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26  * SOFTWARE.
27  */
28 
29 
30 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
31 
32 #include <crypto/internal/hash.h>
33 #include <crypto/internal/simd.h>
34 #include <linux/init.h>
35 #include <linux/module.h>
36 #include <linux/mm.h>
37 #include <linux/types.h>
38 #include <crypto/sha2.h>
39 #include <crypto/sha256_base.h>
40 #include <linux/string.h>
41 #include <asm/cpu_device_id.h>
42 #include <asm/simd.h>
43 
44 asmlinkage void sha256_transform_ssse3(struct sha256_state *state,
45 				       const u8 *data, int blocks);
46 
47 static const struct x86_cpu_id module_cpu_ids[] = {
48 	X86_MATCH_FEATURE(X86_FEATURE_AVX2, NULL),
49 	X86_MATCH_FEATURE(X86_FEATURE_AVX, NULL),
50 	X86_MATCH_FEATURE(X86_FEATURE_SSSE3, NULL),
51 	{}
52 };
53 MODULE_DEVICE_TABLE(x86cpu, module_cpu_ids);
54 
_sha256_update(struct shash_desc * desc,const u8 * data,unsigned int len,sha256_block_fn * sha256_xform)55 static int _sha256_update(struct shash_desc *desc, const u8 *data,
56 			  unsigned int len, sha256_block_fn *sha256_xform)
57 {
58 	struct sha256_state *sctx = shash_desc_ctx(desc);
59 
60 	if (!crypto_simd_usable() ||
61 	    (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
62 		return crypto_sha256_update(desc, data, len);
63 
64 	/*
65 	 * Make sure struct sha256_state begins directly with the SHA256
66 	 * 256-bit internal state, as this is what the asm functions expect.
67 	 */
68 	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
69 
70 	kernel_fpu_begin();
71 	sha256_base_do_update(desc, data, len, sha256_xform);
72 	kernel_fpu_end();
73 
74 	return 0;
75 }
76 
sha256_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out,sha256_block_fn * sha256_xform)77 static int sha256_finup(struct shash_desc *desc, const u8 *data,
78 	      unsigned int len, u8 *out, sha256_block_fn *sha256_xform)
79 {
80 	if (!crypto_simd_usable())
81 		return crypto_sha256_finup(desc, data, len, out);
82 
83 	kernel_fpu_begin();
84 	if (len)
85 		sha256_base_do_update(desc, data, len, sha256_xform);
86 	sha256_base_do_finalize(desc, sha256_xform);
87 	kernel_fpu_end();
88 
89 	return sha256_base_finish(desc, out);
90 }
91 
sha256_ssse3_update(struct shash_desc * desc,const u8 * data,unsigned int len)92 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data,
93 			 unsigned int len)
94 {
95 	return _sha256_update(desc, data, len, sha256_transform_ssse3);
96 }
97 
sha256_ssse3_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)98 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data,
99 	      unsigned int len, u8 *out)
100 {
101 	return sha256_finup(desc, data, len, out, sha256_transform_ssse3);
102 }
103 
104 /* Add padding and return the message digest. */
sha256_ssse3_final(struct shash_desc * desc,u8 * out)105 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out)
106 {
107 	return sha256_ssse3_finup(desc, NULL, 0, out);
108 }
109 
110 static struct shash_alg sha256_ssse3_algs[] = { {
111 	.digestsize	=	SHA256_DIGEST_SIZE,
112 	.init		=	sha256_base_init,
113 	.update		=	sha256_ssse3_update,
114 	.final		=	sha256_ssse3_final,
115 	.finup		=	sha256_ssse3_finup,
116 	.descsize	=	sizeof(struct sha256_state),
117 	.base		=	{
118 		.cra_name	=	"sha256",
119 		.cra_driver_name =	"sha256-ssse3",
120 		.cra_priority	=	150,
121 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
122 		.cra_module	=	THIS_MODULE,
123 	}
124 }, {
125 	.digestsize	=	SHA224_DIGEST_SIZE,
126 	.init		=	sha224_base_init,
127 	.update		=	sha256_ssse3_update,
128 	.final		=	sha256_ssse3_final,
129 	.finup		=	sha256_ssse3_finup,
130 	.descsize	=	sizeof(struct sha256_state),
131 	.base		=	{
132 		.cra_name	=	"sha224",
133 		.cra_driver_name =	"sha224-ssse3",
134 		.cra_priority	=	150,
135 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
136 		.cra_module	=	THIS_MODULE,
137 	}
138 } };
139 
register_sha256_ssse3(void)140 static int register_sha256_ssse3(void)
141 {
142 	if (boot_cpu_has(X86_FEATURE_SSSE3))
143 		return crypto_register_shashes(sha256_ssse3_algs,
144 				ARRAY_SIZE(sha256_ssse3_algs));
145 	return 0;
146 }
147 
unregister_sha256_ssse3(void)148 static void unregister_sha256_ssse3(void)
149 {
150 	if (boot_cpu_has(X86_FEATURE_SSSE3))
151 		crypto_unregister_shashes(sha256_ssse3_algs,
152 				ARRAY_SIZE(sha256_ssse3_algs));
153 }
154 
155 asmlinkage void sha256_transform_avx(struct sha256_state *state,
156 				     const u8 *data, int blocks);
157 
sha256_avx_update(struct shash_desc * desc,const u8 * data,unsigned int len)158 static int sha256_avx_update(struct shash_desc *desc, const u8 *data,
159 			 unsigned int len)
160 {
161 	return _sha256_update(desc, data, len, sha256_transform_avx);
162 }
163 
sha256_avx_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)164 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data,
165 		      unsigned int len, u8 *out)
166 {
167 	return sha256_finup(desc, data, len, out, sha256_transform_avx);
168 }
169 
sha256_avx_final(struct shash_desc * desc,u8 * out)170 static int sha256_avx_final(struct shash_desc *desc, u8 *out)
171 {
172 	return sha256_avx_finup(desc, NULL, 0, out);
173 }
174 
175 static struct shash_alg sha256_avx_algs[] = { {
176 	.digestsize	=	SHA256_DIGEST_SIZE,
177 	.init		=	sha256_base_init,
178 	.update		=	sha256_avx_update,
179 	.final		=	sha256_avx_final,
180 	.finup		=	sha256_avx_finup,
181 	.descsize	=	sizeof(struct sha256_state),
182 	.base		=	{
183 		.cra_name	=	"sha256",
184 		.cra_driver_name =	"sha256-avx",
185 		.cra_priority	=	160,
186 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
187 		.cra_module	=	THIS_MODULE,
188 	}
189 }, {
190 	.digestsize	=	SHA224_DIGEST_SIZE,
191 	.init		=	sha224_base_init,
192 	.update		=	sha256_avx_update,
193 	.final		=	sha256_avx_final,
194 	.finup		=	sha256_avx_finup,
195 	.descsize	=	sizeof(struct sha256_state),
196 	.base		=	{
197 		.cra_name	=	"sha224",
198 		.cra_driver_name =	"sha224-avx",
199 		.cra_priority	=	160,
200 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
201 		.cra_module	=	THIS_MODULE,
202 	}
203 } };
204 
avx_usable(void)205 static bool avx_usable(void)
206 {
207 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
208 		if (boot_cpu_has(X86_FEATURE_AVX))
209 			pr_info("AVX detected but unusable.\n");
210 		return false;
211 	}
212 
213 	return true;
214 }
215 
register_sha256_avx(void)216 static int register_sha256_avx(void)
217 {
218 	if (avx_usable())
219 		return crypto_register_shashes(sha256_avx_algs,
220 				ARRAY_SIZE(sha256_avx_algs));
221 	return 0;
222 }
223 
unregister_sha256_avx(void)224 static void unregister_sha256_avx(void)
225 {
226 	if (avx_usable())
227 		crypto_unregister_shashes(sha256_avx_algs,
228 				ARRAY_SIZE(sha256_avx_algs));
229 }
230 
231 asmlinkage void sha256_transform_rorx(struct sha256_state *state,
232 				      const u8 *data, int blocks);
233 
sha256_avx2_update(struct shash_desc * desc,const u8 * data,unsigned int len)234 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data,
235 			 unsigned int len)
236 {
237 	return _sha256_update(desc, data, len, sha256_transform_rorx);
238 }
239 
sha256_avx2_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)240 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data,
241 		      unsigned int len, u8 *out)
242 {
243 	return sha256_finup(desc, data, len, out, sha256_transform_rorx);
244 }
245 
sha256_avx2_final(struct shash_desc * desc,u8 * out)246 static int sha256_avx2_final(struct shash_desc *desc, u8 *out)
247 {
248 	return sha256_avx2_finup(desc, NULL, 0, out);
249 }
250 
251 static struct shash_alg sha256_avx2_algs[] = { {
252 	.digestsize	=	SHA256_DIGEST_SIZE,
253 	.init		=	sha256_base_init,
254 	.update		=	sha256_avx2_update,
255 	.final		=	sha256_avx2_final,
256 	.finup		=	sha256_avx2_finup,
257 	.descsize	=	sizeof(struct sha256_state),
258 	.base		=	{
259 		.cra_name	=	"sha256",
260 		.cra_driver_name =	"sha256-avx2",
261 		.cra_priority	=	170,
262 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
263 		.cra_module	=	THIS_MODULE,
264 	}
265 }, {
266 	.digestsize	=	SHA224_DIGEST_SIZE,
267 	.init		=	sha224_base_init,
268 	.update		=	sha256_avx2_update,
269 	.final		=	sha256_avx2_final,
270 	.finup		=	sha256_avx2_finup,
271 	.descsize	=	sizeof(struct sha256_state),
272 	.base		=	{
273 		.cra_name	=	"sha224",
274 		.cra_driver_name =	"sha224-avx2",
275 		.cra_priority	=	170,
276 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
277 		.cra_module	=	THIS_MODULE,
278 	}
279 } };
280 
avx2_usable(void)281 static bool avx2_usable(void)
282 {
283 	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) &&
284 		    boot_cpu_has(X86_FEATURE_BMI2))
285 		return true;
286 
287 	return false;
288 }
289 
register_sha256_avx2(void)290 static int register_sha256_avx2(void)
291 {
292 	if (avx2_usable())
293 		return crypto_register_shashes(sha256_avx2_algs,
294 				ARRAY_SIZE(sha256_avx2_algs));
295 	return 0;
296 }
297 
unregister_sha256_avx2(void)298 static void unregister_sha256_avx2(void)
299 {
300 	if (avx2_usable())
301 		crypto_unregister_shashes(sha256_avx2_algs,
302 				ARRAY_SIZE(sha256_avx2_algs));
303 }
304 
305 #ifdef CONFIG_AS_SHA256_NI
306 asmlinkage void sha256_ni_transform(struct sha256_state *digest,
307 				    const u8 *data, int rounds);
308 
sha256_ni_update(struct shash_desc * desc,const u8 * data,unsigned int len)309 static int sha256_ni_update(struct shash_desc *desc, const u8 *data,
310 			 unsigned int len)
311 {
312 	return _sha256_update(desc, data, len, sha256_ni_transform);
313 }
314 
sha256_ni_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)315 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data,
316 		      unsigned int len, u8 *out)
317 {
318 	return sha256_finup(desc, data, len, out, sha256_ni_transform);
319 }
320 
sha256_ni_final(struct shash_desc * desc,u8 * out)321 static int sha256_ni_final(struct shash_desc *desc, u8 *out)
322 {
323 	return sha256_ni_finup(desc, NULL, 0, out);
324 }
325 
326 static struct shash_alg sha256_ni_algs[] = { {
327 	.digestsize	=	SHA256_DIGEST_SIZE,
328 	.init		=	sha256_base_init,
329 	.update		=	sha256_ni_update,
330 	.final		=	sha256_ni_final,
331 	.finup		=	sha256_ni_finup,
332 	.descsize	=	sizeof(struct sha256_state),
333 	.base		=	{
334 		.cra_name	=	"sha256",
335 		.cra_driver_name =	"sha256-ni",
336 		.cra_priority	=	250,
337 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
338 		.cra_module	=	THIS_MODULE,
339 	}
340 }, {
341 	.digestsize	=	SHA224_DIGEST_SIZE,
342 	.init		=	sha224_base_init,
343 	.update		=	sha256_ni_update,
344 	.final		=	sha256_ni_final,
345 	.finup		=	sha256_ni_finup,
346 	.descsize	=	sizeof(struct sha256_state),
347 	.base		=	{
348 		.cra_name	=	"sha224",
349 		.cra_driver_name =	"sha224-ni",
350 		.cra_priority	=	250,
351 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
352 		.cra_module	=	THIS_MODULE,
353 	}
354 } };
355 
register_sha256_ni(void)356 static int register_sha256_ni(void)
357 {
358 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
359 		return crypto_register_shashes(sha256_ni_algs,
360 				ARRAY_SIZE(sha256_ni_algs));
361 	return 0;
362 }
363 
unregister_sha256_ni(void)364 static void unregister_sha256_ni(void)
365 {
366 	if (boot_cpu_has(X86_FEATURE_SHA_NI))
367 		crypto_unregister_shashes(sha256_ni_algs,
368 				ARRAY_SIZE(sha256_ni_algs));
369 }
370 
371 #else
register_sha256_ni(void)372 static inline int register_sha256_ni(void) { return 0; }
unregister_sha256_ni(void)373 static inline void unregister_sha256_ni(void) { }
374 #endif
375 
sha256_ssse3_mod_init(void)376 static int __init sha256_ssse3_mod_init(void)
377 {
378 	if (!x86_match_cpu(module_cpu_ids))
379 		return -ENODEV;
380 
381 	if (register_sha256_ssse3())
382 		goto fail;
383 
384 	if (register_sha256_avx()) {
385 		unregister_sha256_ssse3();
386 		goto fail;
387 	}
388 
389 	if (register_sha256_avx2()) {
390 		unregister_sha256_avx();
391 		unregister_sha256_ssse3();
392 		goto fail;
393 	}
394 
395 	if (register_sha256_ni()) {
396 		unregister_sha256_avx2();
397 		unregister_sha256_avx();
398 		unregister_sha256_ssse3();
399 		goto fail;
400 	}
401 
402 	return 0;
403 fail:
404 	return -ENODEV;
405 }
406 
sha256_ssse3_mod_fini(void)407 static void __exit sha256_ssse3_mod_fini(void)
408 {
409 	unregister_sha256_ni();
410 	unregister_sha256_avx2();
411 	unregister_sha256_avx();
412 	unregister_sha256_ssse3();
413 }
414 
415 module_init(sha256_ssse3_mod_init);
416 module_exit(sha256_ssse3_mod_fini);
417 
418 MODULE_LICENSE("GPL");
419 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated");
420 
421 MODULE_ALIAS_CRYPTO("sha256");
422 MODULE_ALIAS_CRYPTO("sha256-ssse3");
423 MODULE_ALIAS_CRYPTO("sha256-avx");
424 MODULE_ALIAS_CRYPTO("sha256-avx2");
425 MODULE_ALIAS_CRYPTO("sha224");
426 MODULE_ALIAS_CRYPTO("sha224-ssse3");
427 MODULE_ALIAS_CRYPTO("sha224-avx");
428 MODULE_ALIAS_CRYPTO("sha224-avx2");
429 #ifdef CONFIG_AS_SHA256_NI
430 MODULE_ALIAS_CRYPTO("sha256-ni");
431 MODULE_ALIAS_CRYPTO("sha224-ni");
432 #endif
433