1 /* 2 * Cryptographic API. 3 * 4 * Glue code for the SHA256 Secure Hash Algorithm assembler 5 * implementation using supplemental SSE3 / AVX / AVX2 instructions. 6 * 7 * This file is based on sha256_generic.c 8 * 9 * Copyright (C) 2013 Intel Corporation. 10 * 11 * Author: 12 * Tim Chen <tim.c.chen@linux.intel.com> 13 * 14 * This program is free software; you can redistribute it and/or modify it 15 * under the terms of the GNU General Public License as published by the Free 16 * Software Foundation; either version 2 of the License, or (at your option) 17 * any later version. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 * SOFTWARE. 27 */ 28 29 30 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 31 32 #include <crypto/internal/hash.h> 33 #include <crypto/internal/simd.h> 34 #include <linux/init.h> 35 #include <linux/module.h> 36 #include <linux/mm.h> 37 #include <linux/cryptohash.h> 38 #include <linux/types.h> 39 #include <crypto/sha.h> 40 #include <crypto/sha256_base.h> 41 #include <linux/string.h> 42 #include <asm/simd.h> 43 44 asmlinkage void sha256_transform_ssse3(struct sha256_state *state, 45 const u8 *data, int blocks); 46 47 static int _sha256_update(struct shash_desc *desc, const u8 *data, 48 unsigned int len, sha256_block_fn *sha256_xform) 49 { 50 struct sha256_state *sctx = shash_desc_ctx(desc); 51 52 if (!crypto_simd_usable() || 53 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) 54 return crypto_sha256_update(desc, data, len); 55 56 /* 57 * Make sure struct sha256_state begins directly with the SHA256 58 * 256-bit internal state, as this is what the asm functions expect. 59 */ 60 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); 61 62 kernel_fpu_begin(); 63 sha256_base_do_update(desc, data, len, sha256_xform); 64 kernel_fpu_end(); 65 66 return 0; 67 } 68 69 static int sha256_finup(struct shash_desc *desc, const u8 *data, 70 unsigned int len, u8 *out, sha256_block_fn *sha256_xform) 71 { 72 if (!crypto_simd_usable()) 73 return crypto_sha256_finup(desc, data, len, out); 74 75 kernel_fpu_begin(); 76 if (len) 77 sha256_base_do_update(desc, data, len, sha256_xform); 78 sha256_base_do_finalize(desc, sha256_xform); 79 kernel_fpu_end(); 80 81 return sha256_base_finish(desc, out); 82 } 83 84 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, 85 unsigned int len) 86 { 87 return _sha256_update(desc, data, len, sha256_transform_ssse3); 88 } 89 90 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data, 91 unsigned int len, u8 *out) 92 { 93 return sha256_finup(desc, data, len, out, sha256_transform_ssse3); 94 } 95 96 /* Add padding and return the message digest. */ 97 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) 98 { 99 return sha256_ssse3_finup(desc, NULL, 0, out); 100 } 101 102 static struct shash_alg sha256_ssse3_algs[] = { { 103 .digestsize = SHA256_DIGEST_SIZE, 104 .init = sha256_base_init, 105 .update = sha256_ssse3_update, 106 .final = sha256_ssse3_final, 107 .finup = sha256_ssse3_finup, 108 .descsize = sizeof(struct sha256_state), 109 .base = { 110 .cra_name = "sha256", 111 .cra_driver_name = "sha256-ssse3", 112 .cra_priority = 150, 113 .cra_blocksize = SHA256_BLOCK_SIZE, 114 .cra_module = THIS_MODULE, 115 } 116 }, { 117 .digestsize = SHA224_DIGEST_SIZE, 118 .init = sha224_base_init, 119 .update = sha256_ssse3_update, 120 .final = sha256_ssse3_final, 121 .finup = sha256_ssse3_finup, 122 .descsize = sizeof(struct sha256_state), 123 .base = { 124 .cra_name = "sha224", 125 .cra_driver_name = "sha224-ssse3", 126 .cra_priority = 150, 127 .cra_blocksize = SHA224_BLOCK_SIZE, 128 .cra_module = THIS_MODULE, 129 } 130 } }; 131 132 static int register_sha256_ssse3(void) 133 { 134 if (boot_cpu_has(X86_FEATURE_SSSE3)) 135 return crypto_register_shashes(sha256_ssse3_algs, 136 ARRAY_SIZE(sha256_ssse3_algs)); 137 return 0; 138 } 139 140 static void unregister_sha256_ssse3(void) 141 { 142 if (boot_cpu_has(X86_FEATURE_SSSE3)) 143 crypto_unregister_shashes(sha256_ssse3_algs, 144 ARRAY_SIZE(sha256_ssse3_algs)); 145 } 146 147 asmlinkage void sha256_transform_avx(struct sha256_state *state, 148 const u8 *data, int blocks); 149 150 static int sha256_avx_update(struct shash_desc *desc, const u8 *data, 151 unsigned int len) 152 { 153 return _sha256_update(desc, data, len, sha256_transform_avx); 154 } 155 156 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data, 157 unsigned int len, u8 *out) 158 { 159 return sha256_finup(desc, data, len, out, sha256_transform_avx); 160 } 161 162 static int sha256_avx_final(struct shash_desc *desc, u8 *out) 163 { 164 return sha256_avx_finup(desc, NULL, 0, out); 165 } 166 167 static struct shash_alg sha256_avx_algs[] = { { 168 .digestsize = SHA256_DIGEST_SIZE, 169 .init = sha256_base_init, 170 .update = sha256_avx_update, 171 .final = sha256_avx_final, 172 .finup = sha256_avx_finup, 173 .descsize = sizeof(struct sha256_state), 174 .base = { 175 .cra_name = "sha256", 176 .cra_driver_name = "sha256-avx", 177 .cra_priority = 160, 178 .cra_blocksize = SHA256_BLOCK_SIZE, 179 .cra_module = THIS_MODULE, 180 } 181 }, { 182 .digestsize = SHA224_DIGEST_SIZE, 183 .init = sha224_base_init, 184 .update = sha256_avx_update, 185 .final = sha256_avx_final, 186 .finup = sha256_avx_finup, 187 .descsize = sizeof(struct sha256_state), 188 .base = { 189 .cra_name = "sha224", 190 .cra_driver_name = "sha224-avx", 191 .cra_priority = 160, 192 .cra_blocksize = SHA224_BLOCK_SIZE, 193 .cra_module = THIS_MODULE, 194 } 195 } }; 196 197 static bool avx_usable(void) 198 { 199 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 200 if (boot_cpu_has(X86_FEATURE_AVX)) 201 pr_info("AVX detected but unusable.\n"); 202 return false; 203 } 204 205 return true; 206 } 207 208 static int register_sha256_avx(void) 209 { 210 if (avx_usable()) 211 return crypto_register_shashes(sha256_avx_algs, 212 ARRAY_SIZE(sha256_avx_algs)); 213 return 0; 214 } 215 216 static void unregister_sha256_avx(void) 217 { 218 if (avx_usable()) 219 crypto_unregister_shashes(sha256_avx_algs, 220 ARRAY_SIZE(sha256_avx_algs)); 221 } 222 223 asmlinkage void sha256_transform_rorx(struct sha256_state *state, 224 const u8 *data, int blocks); 225 226 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data, 227 unsigned int len) 228 { 229 return _sha256_update(desc, data, len, sha256_transform_rorx); 230 } 231 232 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data, 233 unsigned int len, u8 *out) 234 { 235 return sha256_finup(desc, data, len, out, sha256_transform_rorx); 236 } 237 238 static int sha256_avx2_final(struct shash_desc *desc, u8 *out) 239 { 240 return sha256_avx2_finup(desc, NULL, 0, out); 241 } 242 243 static struct shash_alg sha256_avx2_algs[] = { { 244 .digestsize = SHA256_DIGEST_SIZE, 245 .init = sha256_base_init, 246 .update = sha256_avx2_update, 247 .final = sha256_avx2_final, 248 .finup = sha256_avx2_finup, 249 .descsize = sizeof(struct sha256_state), 250 .base = { 251 .cra_name = "sha256", 252 .cra_driver_name = "sha256-avx2", 253 .cra_priority = 170, 254 .cra_blocksize = SHA256_BLOCK_SIZE, 255 .cra_module = THIS_MODULE, 256 } 257 }, { 258 .digestsize = SHA224_DIGEST_SIZE, 259 .init = sha224_base_init, 260 .update = sha256_avx2_update, 261 .final = sha256_avx2_final, 262 .finup = sha256_avx2_finup, 263 .descsize = sizeof(struct sha256_state), 264 .base = { 265 .cra_name = "sha224", 266 .cra_driver_name = "sha224-avx2", 267 .cra_priority = 170, 268 .cra_blocksize = SHA224_BLOCK_SIZE, 269 .cra_module = THIS_MODULE, 270 } 271 } }; 272 273 static bool avx2_usable(void) 274 { 275 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) && 276 boot_cpu_has(X86_FEATURE_BMI2)) 277 return true; 278 279 return false; 280 } 281 282 static int register_sha256_avx2(void) 283 { 284 if (avx2_usable()) 285 return crypto_register_shashes(sha256_avx2_algs, 286 ARRAY_SIZE(sha256_avx2_algs)); 287 return 0; 288 } 289 290 static void unregister_sha256_avx2(void) 291 { 292 if (avx2_usable()) 293 crypto_unregister_shashes(sha256_avx2_algs, 294 ARRAY_SIZE(sha256_avx2_algs)); 295 } 296 297 #ifdef CONFIG_AS_SHA256_NI 298 asmlinkage void sha256_ni_transform(struct sha256_state *digest, 299 const u8 *data, int rounds); 300 301 static int sha256_ni_update(struct shash_desc *desc, const u8 *data, 302 unsigned int len) 303 { 304 return _sha256_update(desc, data, len, sha256_ni_transform); 305 } 306 307 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data, 308 unsigned int len, u8 *out) 309 { 310 return sha256_finup(desc, data, len, out, sha256_ni_transform); 311 } 312 313 static int sha256_ni_final(struct shash_desc *desc, u8 *out) 314 { 315 return sha256_ni_finup(desc, NULL, 0, out); 316 } 317 318 static struct shash_alg sha256_ni_algs[] = { { 319 .digestsize = SHA256_DIGEST_SIZE, 320 .init = sha256_base_init, 321 .update = sha256_ni_update, 322 .final = sha256_ni_final, 323 .finup = sha256_ni_finup, 324 .descsize = sizeof(struct sha256_state), 325 .base = { 326 .cra_name = "sha256", 327 .cra_driver_name = "sha256-ni", 328 .cra_priority = 250, 329 .cra_blocksize = SHA256_BLOCK_SIZE, 330 .cra_module = THIS_MODULE, 331 } 332 }, { 333 .digestsize = SHA224_DIGEST_SIZE, 334 .init = sha224_base_init, 335 .update = sha256_ni_update, 336 .final = sha256_ni_final, 337 .finup = sha256_ni_finup, 338 .descsize = sizeof(struct sha256_state), 339 .base = { 340 .cra_name = "sha224", 341 .cra_driver_name = "sha224-ni", 342 .cra_priority = 250, 343 .cra_blocksize = SHA224_BLOCK_SIZE, 344 .cra_module = THIS_MODULE, 345 } 346 } }; 347 348 static int register_sha256_ni(void) 349 { 350 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 351 return crypto_register_shashes(sha256_ni_algs, 352 ARRAY_SIZE(sha256_ni_algs)); 353 return 0; 354 } 355 356 static void unregister_sha256_ni(void) 357 { 358 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 359 crypto_unregister_shashes(sha256_ni_algs, 360 ARRAY_SIZE(sha256_ni_algs)); 361 } 362 363 #else 364 static inline int register_sha256_ni(void) { return 0; } 365 static inline void unregister_sha256_ni(void) { } 366 #endif 367 368 static int __init sha256_ssse3_mod_init(void) 369 { 370 if (register_sha256_ssse3()) 371 goto fail; 372 373 if (register_sha256_avx()) { 374 unregister_sha256_ssse3(); 375 goto fail; 376 } 377 378 if (register_sha256_avx2()) { 379 unregister_sha256_avx(); 380 unregister_sha256_ssse3(); 381 goto fail; 382 } 383 384 if (register_sha256_ni()) { 385 unregister_sha256_avx2(); 386 unregister_sha256_avx(); 387 unregister_sha256_ssse3(); 388 goto fail; 389 } 390 391 return 0; 392 fail: 393 return -ENODEV; 394 } 395 396 static void __exit sha256_ssse3_mod_fini(void) 397 { 398 unregister_sha256_ni(); 399 unregister_sha256_avx2(); 400 unregister_sha256_avx(); 401 unregister_sha256_ssse3(); 402 } 403 404 module_init(sha256_ssse3_mod_init); 405 module_exit(sha256_ssse3_mod_fini); 406 407 MODULE_LICENSE("GPL"); 408 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated"); 409 410 MODULE_ALIAS_CRYPTO("sha256"); 411 MODULE_ALIAS_CRYPTO("sha256-ssse3"); 412 MODULE_ALIAS_CRYPTO("sha256-avx"); 413 MODULE_ALIAS_CRYPTO("sha256-avx2"); 414 MODULE_ALIAS_CRYPTO("sha224"); 415 MODULE_ALIAS_CRYPTO("sha224-ssse3"); 416 MODULE_ALIAS_CRYPTO("sha224-avx"); 417 MODULE_ALIAS_CRYPTO("sha224-avx2"); 418 #ifdef CONFIG_AS_SHA256_NI 419 MODULE_ALIAS_CRYPTO("sha256-ni"); 420 MODULE_ALIAS_CRYPTO("sha224-ni"); 421 #endif 422