1 /* 2 * Cryptographic API. 3 * 4 * Glue code for the SHA256 Secure Hash Algorithm assembler 5 * implementation using supplemental SSE3 / AVX / AVX2 instructions. 6 * 7 * This file is based on sha256_generic.c 8 * 9 * Copyright (C) 2013 Intel Corporation. 10 * 11 * Author: 12 * Tim Chen <tim.c.chen@linux.intel.com> 13 * 14 * This program is free software; you can redistribute it and/or modify it 15 * under the terms of the GNU General Public License as published by the Free 16 * Software Foundation; either version 2 of the License, or (at your option) 17 * any later version. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 23 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 24 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 25 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 26 * SOFTWARE. 27 */ 28 29 30 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 31 32 #include <crypto/internal/hash.h> 33 #include <linux/init.h> 34 #include <linux/module.h> 35 #include <linux/mm.h> 36 #include <linux/cryptohash.h> 37 #include <linux/types.h> 38 #include <crypto/sha.h> 39 #include <crypto/sha256_base.h> 40 #include <asm/fpu/api.h> 41 #include <linux/string.h> 42 43 asmlinkage void sha256_transform_ssse3(u32 *digest, const char *data, 44 u64 rounds); 45 typedef void (sha256_transform_fn)(u32 *digest, const char *data, u64 rounds); 46 47 static int sha256_update(struct shash_desc *desc, const u8 *data, 48 unsigned int len, sha256_transform_fn *sha256_xform) 49 { 50 struct sha256_state *sctx = shash_desc_ctx(desc); 51 52 if (!irq_fpu_usable() || 53 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) 54 return crypto_sha256_update(desc, data, len); 55 56 /* make sure casting to sha256_block_fn() is safe */ 57 BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0); 58 59 kernel_fpu_begin(); 60 sha256_base_do_update(desc, data, len, 61 (sha256_block_fn *)sha256_xform); 62 kernel_fpu_end(); 63 64 return 0; 65 } 66 67 static int sha256_finup(struct shash_desc *desc, const u8 *data, 68 unsigned int len, u8 *out, sha256_transform_fn *sha256_xform) 69 { 70 if (!irq_fpu_usable()) 71 return crypto_sha256_finup(desc, data, len, out); 72 73 kernel_fpu_begin(); 74 if (len) 75 sha256_base_do_update(desc, data, len, 76 (sha256_block_fn *)sha256_xform); 77 sha256_base_do_finalize(desc, (sha256_block_fn *)sha256_xform); 78 kernel_fpu_end(); 79 80 return sha256_base_finish(desc, out); 81 } 82 83 static int sha256_ssse3_update(struct shash_desc *desc, const u8 *data, 84 unsigned int len) 85 { 86 return sha256_update(desc, data, len, sha256_transform_ssse3); 87 } 88 89 static int sha256_ssse3_finup(struct shash_desc *desc, const u8 *data, 90 unsigned int len, u8 *out) 91 { 92 return sha256_finup(desc, data, len, out, sha256_transform_ssse3); 93 } 94 95 /* Add padding and return the message digest. */ 96 static int sha256_ssse3_final(struct shash_desc *desc, u8 *out) 97 { 98 return sha256_ssse3_finup(desc, NULL, 0, out); 99 } 100 101 static struct shash_alg sha256_ssse3_algs[] = { { 102 .digestsize = SHA256_DIGEST_SIZE, 103 .init = sha256_base_init, 104 .update = sha256_ssse3_update, 105 .final = sha256_ssse3_final, 106 .finup = sha256_ssse3_finup, 107 .descsize = sizeof(struct sha256_state), 108 .base = { 109 .cra_name = "sha256", 110 .cra_driver_name = "sha256-ssse3", 111 .cra_priority = 150, 112 .cra_blocksize = SHA256_BLOCK_SIZE, 113 .cra_module = THIS_MODULE, 114 } 115 }, { 116 .digestsize = SHA224_DIGEST_SIZE, 117 .init = sha224_base_init, 118 .update = sha256_ssse3_update, 119 .final = sha256_ssse3_final, 120 .finup = sha256_ssse3_finup, 121 .descsize = sizeof(struct sha256_state), 122 .base = { 123 .cra_name = "sha224", 124 .cra_driver_name = "sha224-ssse3", 125 .cra_priority = 150, 126 .cra_blocksize = SHA224_BLOCK_SIZE, 127 .cra_module = THIS_MODULE, 128 } 129 } }; 130 131 static int register_sha256_ssse3(void) 132 { 133 if (boot_cpu_has(X86_FEATURE_SSSE3)) 134 return crypto_register_shashes(sha256_ssse3_algs, 135 ARRAY_SIZE(sha256_ssse3_algs)); 136 return 0; 137 } 138 139 static void unregister_sha256_ssse3(void) 140 { 141 if (boot_cpu_has(X86_FEATURE_SSSE3)) 142 crypto_unregister_shashes(sha256_ssse3_algs, 143 ARRAY_SIZE(sha256_ssse3_algs)); 144 } 145 146 #ifdef CONFIG_AS_AVX 147 asmlinkage void sha256_transform_avx(u32 *digest, const char *data, 148 u64 rounds); 149 150 static int sha256_avx_update(struct shash_desc *desc, const u8 *data, 151 unsigned int len) 152 { 153 return sha256_update(desc, data, len, sha256_transform_avx); 154 } 155 156 static int sha256_avx_finup(struct shash_desc *desc, const u8 *data, 157 unsigned int len, u8 *out) 158 { 159 return sha256_finup(desc, data, len, out, sha256_transform_avx); 160 } 161 162 static int sha256_avx_final(struct shash_desc *desc, u8 *out) 163 { 164 return sha256_avx_finup(desc, NULL, 0, out); 165 } 166 167 static struct shash_alg sha256_avx_algs[] = { { 168 .digestsize = SHA256_DIGEST_SIZE, 169 .init = sha256_base_init, 170 .update = sha256_avx_update, 171 .final = sha256_avx_final, 172 .finup = sha256_avx_finup, 173 .descsize = sizeof(struct sha256_state), 174 .base = { 175 .cra_name = "sha256", 176 .cra_driver_name = "sha256-avx", 177 .cra_priority = 160, 178 .cra_blocksize = SHA256_BLOCK_SIZE, 179 .cra_module = THIS_MODULE, 180 } 181 }, { 182 .digestsize = SHA224_DIGEST_SIZE, 183 .init = sha224_base_init, 184 .update = sha256_avx_update, 185 .final = sha256_avx_final, 186 .finup = sha256_avx_finup, 187 .descsize = sizeof(struct sha256_state), 188 .base = { 189 .cra_name = "sha224", 190 .cra_driver_name = "sha224-avx", 191 .cra_priority = 160, 192 .cra_blocksize = SHA224_BLOCK_SIZE, 193 .cra_module = THIS_MODULE, 194 } 195 } }; 196 197 static bool avx_usable(void) 198 { 199 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 200 if (boot_cpu_has(X86_FEATURE_AVX)) 201 pr_info("AVX detected but unusable.\n"); 202 return false; 203 } 204 205 return true; 206 } 207 208 static int register_sha256_avx(void) 209 { 210 if (avx_usable()) 211 return crypto_register_shashes(sha256_avx_algs, 212 ARRAY_SIZE(sha256_avx_algs)); 213 return 0; 214 } 215 216 static void unregister_sha256_avx(void) 217 { 218 if (avx_usable()) 219 crypto_unregister_shashes(sha256_avx_algs, 220 ARRAY_SIZE(sha256_avx_algs)); 221 } 222 223 #else 224 static inline int register_sha256_avx(void) { return 0; } 225 static inline void unregister_sha256_avx(void) { } 226 #endif 227 228 #if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX) 229 asmlinkage void sha256_transform_rorx(u32 *digest, const char *data, 230 u64 rounds); 231 232 static int sha256_avx2_update(struct shash_desc *desc, const u8 *data, 233 unsigned int len) 234 { 235 return sha256_update(desc, data, len, sha256_transform_rorx); 236 } 237 238 static int sha256_avx2_finup(struct shash_desc *desc, const u8 *data, 239 unsigned int len, u8 *out) 240 { 241 return sha256_finup(desc, data, len, out, sha256_transform_rorx); 242 } 243 244 static int sha256_avx2_final(struct shash_desc *desc, u8 *out) 245 { 246 return sha256_avx2_finup(desc, NULL, 0, out); 247 } 248 249 static struct shash_alg sha256_avx2_algs[] = { { 250 .digestsize = SHA256_DIGEST_SIZE, 251 .init = sha256_base_init, 252 .update = sha256_avx2_update, 253 .final = sha256_avx2_final, 254 .finup = sha256_avx2_finup, 255 .descsize = sizeof(struct sha256_state), 256 .base = { 257 .cra_name = "sha256", 258 .cra_driver_name = "sha256-avx2", 259 .cra_priority = 170, 260 .cra_blocksize = SHA256_BLOCK_SIZE, 261 .cra_module = THIS_MODULE, 262 } 263 }, { 264 .digestsize = SHA224_DIGEST_SIZE, 265 .init = sha224_base_init, 266 .update = sha256_avx2_update, 267 .final = sha256_avx2_final, 268 .finup = sha256_avx2_finup, 269 .descsize = sizeof(struct sha256_state), 270 .base = { 271 .cra_name = "sha224", 272 .cra_driver_name = "sha224-avx2", 273 .cra_priority = 170, 274 .cra_blocksize = SHA224_BLOCK_SIZE, 275 .cra_module = THIS_MODULE, 276 } 277 } }; 278 279 static bool avx2_usable(void) 280 { 281 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) && 282 boot_cpu_has(X86_FEATURE_BMI2)) 283 return true; 284 285 return false; 286 } 287 288 static int register_sha256_avx2(void) 289 { 290 if (avx2_usable()) 291 return crypto_register_shashes(sha256_avx2_algs, 292 ARRAY_SIZE(sha256_avx2_algs)); 293 return 0; 294 } 295 296 static void unregister_sha256_avx2(void) 297 { 298 if (avx2_usable()) 299 crypto_unregister_shashes(sha256_avx2_algs, 300 ARRAY_SIZE(sha256_avx2_algs)); 301 } 302 303 #else 304 static inline int register_sha256_avx2(void) { return 0; } 305 static inline void unregister_sha256_avx2(void) { } 306 #endif 307 308 #ifdef CONFIG_AS_SHA256_NI 309 asmlinkage void sha256_ni_transform(u32 *digest, const char *data, 310 u64 rounds); /*unsigned int rounds);*/ 311 312 static int sha256_ni_update(struct shash_desc *desc, const u8 *data, 313 unsigned int len) 314 { 315 return sha256_update(desc, data, len, sha256_ni_transform); 316 } 317 318 static int sha256_ni_finup(struct shash_desc *desc, const u8 *data, 319 unsigned int len, u8 *out) 320 { 321 return sha256_finup(desc, data, len, out, sha256_ni_transform); 322 } 323 324 static int sha256_ni_final(struct shash_desc *desc, u8 *out) 325 { 326 return sha256_ni_finup(desc, NULL, 0, out); 327 } 328 329 static struct shash_alg sha256_ni_algs[] = { { 330 .digestsize = SHA256_DIGEST_SIZE, 331 .init = sha256_base_init, 332 .update = sha256_ni_update, 333 .final = sha256_ni_final, 334 .finup = sha256_ni_finup, 335 .descsize = sizeof(struct sha256_state), 336 .base = { 337 .cra_name = "sha256", 338 .cra_driver_name = "sha256-ni", 339 .cra_priority = 250, 340 .cra_blocksize = SHA256_BLOCK_SIZE, 341 .cra_module = THIS_MODULE, 342 } 343 }, { 344 .digestsize = SHA224_DIGEST_SIZE, 345 .init = sha224_base_init, 346 .update = sha256_ni_update, 347 .final = sha256_ni_final, 348 .finup = sha256_ni_finup, 349 .descsize = sizeof(struct sha256_state), 350 .base = { 351 .cra_name = "sha224", 352 .cra_driver_name = "sha224-ni", 353 .cra_priority = 250, 354 .cra_blocksize = SHA224_BLOCK_SIZE, 355 .cra_module = THIS_MODULE, 356 } 357 } }; 358 359 static int register_sha256_ni(void) 360 { 361 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 362 return crypto_register_shashes(sha256_ni_algs, 363 ARRAY_SIZE(sha256_ni_algs)); 364 return 0; 365 } 366 367 static void unregister_sha256_ni(void) 368 { 369 if (boot_cpu_has(X86_FEATURE_SHA_NI)) 370 crypto_unregister_shashes(sha256_ni_algs, 371 ARRAY_SIZE(sha256_ni_algs)); 372 } 373 374 #else 375 static inline int register_sha256_ni(void) { return 0; } 376 static inline void unregister_sha256_ni(void) { } 377 #endif 378 379 static int __init sha256_ssse3_mod_init(void) 380 { 381 if (register_sha256_ssse3()) 382 goto fail; 383 384 if (register_sha256_avx()) { 385 unregister_sha256_ssse3(); 386 goto fail; 387 } 388 389 if (register_sha256_avx2()) { 390 unregister_sha256_avx(); 391 unregister_sha256_ssse3(); 392 goto fail; 393 } 394 395 if (register_sha256_ni()) { 396 unregister_sha256_avx2(); 397 unregister_sha256_avx(); 398 unregister_sha256_ssse3(); 399 goto fail; 400 } 401 402 return 0; 403 fail: 404 return -ENODEV; 405 } 406 407 static void __exit sha256_ssse3_mod_fini(void) 408 { 409 unregister_sha256_ni(); 410 unregister_sha256_avx2(); 411 unregister_sha256_avx(); 412 unregister_sha256_ssse3(); 413 } 414 415 module_init(sha256_ssse3_mod_init); 416 module_exit(sha256_ssse3_mod_fini); 417 418 MODULE_LICENSE("GPL"); 419 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm, Supplemental SSE3 accelerated"); 420 421 MODULE_ALIAS_CRYPTO("sha256"); 422 MODULE_ALIAS_CRYPTO("sha256-ssse3"); 423 MODULE_ALIAS_CRYPTO("sha256-avx"); 424 MODULE_ALIAS_CRYPTO("sha256-avx2"); 425 MODULE_ALIAS_CRYPTO("sha224"); 426 MODULE_ALIAS_CRYPTO("sha224-ssse3"); 427 MODULE_ALIAS_CRYPTO("sha224-avx"); 428 MODULE_ALIAS_CRYPTO("sha224-avx2"); 429 #ifdef CONFIG_AS_SHA256_NI 430 MODULE_ALIAS_CRYPTO("sha256-ni"); 431 MODULE_ALIAS_CRYPTO("sha224-ni"); 432 #endif 433