1 /* 2 * Cryptographic API. 3 * 4 * Glue code for the SHA512 Secure Hash Algorithm assembler 5 * implementation using supplemental SSE3 / AVX / AVX2 instructions. 6 * 7 * This file is based on sha512_generic.c 8 * 9 * Copyright (C) 2013 Intel Corporation 10 * Author: Tim Chen <tim.c.chen@linux.intel.com> 11 * 12 * This program is free software; you can redistribute it and/or modify it 13 * under the terms of the GNU General Public License as published by the Free 14 * Software Foundation; either version 2 of the License, or (at your option) 15 * any later version. 16 * 17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 21 * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 22 * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 23 * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 * SOFTWARE. 25 * 26 */ 27 28 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 29 30 #include <crypto/internal/hash.h> 31 #include <crypto/internal/simd.h> 32 #include <linux/init.h> 33 #include <linux/module.h> 34 #include <linux/mm.h> 35 #include <linux/cryptohash.h> 36 #include <linux/string.h> 37 #include <linux/types.h> 38 #include <crypto/sha.h> 39 #include <crypto/sha512_base.h> 40 #include <asm/simd.h> 41 42 asmlinkage void sha512_transform_ssse3(struct sha512_state *state, 43 const u8 *data, int blocks); 44 45 static int sha512_update(struct shash_desc *desc, const u8 *data, 46 unsigned int len, sha512_block_fn *sha512_xform) 47 { 48 struct sha512_state *sctx = shash_desc_ctx(desc); 49 50 if (!crypto_simd_usable() || 51 (sctx->count[0] % SHA512_BLOCK_SIZE) + len < SHA512_BLOCK_SIZE) 52 return crypto_sha512_update(desc, data, len); 53 54 /* 55 * Make sure struct sha512_state begins directly with the SHA512 56 * 512-bit internal state, as this is what the asm functions expect. 57 */ 58 BUILD_BUG_ON(offsetof(struct sha512_state, state) != 0); 59 60 kernel_fpu_begin(); 61 sha512_base_do_update(desc, data, len, sha512_xform); 62 kernel_fpu_end(); 63 64 return 0; 65 } 66 67 static int sha512_finup(struct shash_desc *desc, const u8 *data, 68 unsigned int len, u8 *out, sha512_block_fn *sha512_xform) 69 { 70 if (!crypto_simd_usable()) 71 return crypto_sha512_finup(desc, data, len, out); 72 73 kernel_fpu_begin(); 74 if (len) 75 sha512_base_do_update(desc, data, len, sha512_xform); 76 sha512_base_do_finalize(desc, sha512_xform); 77 kernel_fpu_end(); 78 79 return sha512_base_finish(desc, out); 80 } 81 82 static int sha512_ssse3_update(struct shash_desc *desc, const u8 *data, 83 unsigned int len) 84 { 85 return sha512_update(desc, data, len, sha512_transform_ssse3); 86 } 87 88 static int sha512_ssse3_finup(struct shash_desc *desc, const u8 *data, 89 unsigned int len, u8 *out) 90 { 91 return sha512_finup(desc, data, len, out, sha512_transform_ssse3); 92 } 93 94 /* Add padding and return the message digest. */ 95 static int sha512_ssse3_final(struct shash_desc *desc, u8 *out) 96 { 97 return sha512_ssse3_finup(desc, NULL, 0, out); 98 } 99 100 static struct shash_alg sha512_ssse3_algs[] = { { 101 .digestsize = SHA512_DIGEST_SIZE, 102 .init = sha512_base_init, 103 .update = sha512_ssse3_update, 104 .final = sha512_ssse3_final, 105 .finup = sha512_ssse3_finup, 106 .descsize = sizeof(struct sha512_state), 107 .base = { 108 .cra_name = "sha512", 109 .cra_driver_name = "sha512-ssse3", 110 .cra_priority = 150, 111 .cra_blocksize = SHA512_BLOCK_SIZE, 112 .cra_module = THIS_MODULE, 113 } 114 }, { 115 .digestsize = SHA384_DIGEST_SIZE, 116 .init = sha384_base_init, 117 .update = sha512_ssse3_update, 118 .final = sha512_ssse3_final, 119 .finup = sha512_ssse3_finup, 120 .descsize = sizeof(struct sha512_state), 121 .base = { 122 .cra_name = "sha384", 123 .cra_driver_name = "sha384-ssse3", 124 .cra_priority = 150, 125 .cra_blocksize = SHA384_BLOCK_SIZE, 126 .cra_module = THIS_MODULE, 127 } 128 } }; 129 130 static int register_sha512_ssse3(void) 131 { 132 if (boot_cpu_has(X86_FEATURE_SSSE3)) 133 return crypto_register_shashes(sha512_ssse3_algs, 134 ARRAY_SIZE(sha512_ssse3_algs)); 135 return 0; 136 } 137 138 static void unregister_sha512_ssse3(void) 139 { 140 if (boot_cpu_has(X86_FEATURE_SSSE3)) 141 crypto_unregister_shashes(sha512_ssse3_algs, 142 ARRAY_SIZE(sha512_ssse3_algs)); 143 } 144 145 #ifdef CONFIG_AS_AVX 146 asmlinkage void sha512_transform_avx(struct sha512_state *state, 147 const u8 *data, int blocks); 148 static bool avx_usable(void) 149 { 150 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 151 if (boot_cpu_has(X86_FEATURE_AVX)) 152 pr_info("AVX detected but unusable.\n"); 153 return false; 154 } 155 156 return true; 157 } 158 159 static int sha512_avx_update(struct shash_desc *desc, const u8 *data, 160 unsigned int len) 161 { 162 return sha512_update(desc, data, len, sha512_transform_avx); 163 } 164 165 static int sha512_avx_finup(struct shash_desc *desc, const u8 *data, 166 unsigned int len, u8 *out) 167 { 168 return sha512_finup(desc, data, len, out, sha512_transform_avx); 169 } 170 171 /* Add padding and return the message digest. */ 172 static int sha512_avx_final(struct shash_desc *desc, u8 *out) 173 { 174 return sha512_avx_finup(desc, NULL, 0, out); 175 } 176 177 static struct shash_alg sha512_avx_algs[] = { { 178 .digestsize = SHA512_DIGEST_SIZE, 179 .init = sha512_base_init, 180 .update = sha512_avx_update, 181 .final = sha512_avx_final, 182 .finup = sha512_avx_finup, 183 .descsize = sizeof(struct sha512_state), 184 .base = { 185 .cra_name = "sha512", 186 .cra_driver_name = "sha512-avx", 187 .cra_priority = 160, 188 .cra_blocksize = SHA512_BLOCK_SIZE, 189 .cra_module = THIS_MODULE, 190 } 191 }, { 192 .digestsize = SHA384_DIGEST_SIZE, 193 .init = sha384_base_init, 194 .update = sha512_avx_update, 195 .final = sha512_avx_final, 196 .finup = sha512_avx_finup, 197 .descsize = sizeof(struct sha512_state), 198 .base = { 199 .cra_name = "sha384", 200 .cra_driver_name = "sha384-avx", 201 .cra_priority = 160, 202 .cra_blocksize = SHA384_BLOCK_SIZE, 203 .cra_module = THIS_MODULE, 204 } 205 } }; 206 207 static int register_sha512_avx(void) 208 { 209 if (avx_usable()) 210 return crypto_register_shashes(sha512_avx_algs, 211 ARRAY_SIZE(sha512_avx_algs)); 212 return 0; 213 } 214 215 static void unregister_sha512_avx(void) 216 { 217 if (avx_usable()) 218 crypto_unregister_shashes(sha512_avx_algs, 219 ARRAY_SIZE(sha512_avx_algs)); 220 } 221 #else 222 static inline int register_sha512_avx(void) { return 0; } 223 static inline void unregister_sha512_avx(void) { } 224 #endif 225 226 #if defined(CONFIG_AS_AVX2) && defined(CONFIG_AS_AVX) 227 asmlinkage void sha512_transform_rorx(struct sha512_state *state, 228 const u8 *data, int blocks); 229 230 static int sha512_avx2_update(struct shash_desc *desc, const u8 *data, 231 unsigned int len) 232 { 233 return sha512_update(desc, data, len, sha512_transform_rorx); 234 } 235 236 static int sha512_avx2_finup(struct shash_desc *desc, const u8 *data, 237 unsigned int len, u8 *out) 238 { 239 return sha512_finup(desc, data, len, out, sha512_transform_rorx); 240 } 241 242 /* Add padding and return the message digest. */ 243 static int sha512_avx2_final(struct shash_desc *desc, u8 *out) 244 { 245 return sha512_avx2_finup(desc, NULL, 0, out); 246 } 247 248 static struct shash_alg sha512_avx2_algs[] = { { 249 .digestsize = SHA512_DIGEST_SIZE, 250 .init = sha512_base_init, 251 .update = sha512_avx2_update, 252 .final = sha512_avx2_final, 253 .finup = sha512_avx2_finup, 254 .descsize = sizeof(struct sha512_state), 255 .base = { 256 .cra_name = "sha512", 257 .cra_driver_name = "sha512-avx2", 258 .cra_priority = 170, 259 .cra_blocksize = SHA512_BLOCK_SIZE, 260 .cra_module = THIS_MODULE, 261 } 262 }, { 263 .digestsize = SHA384_DIGEST_SIZE, 264 .init = sha384_base_init, 265 .update = sha512_avx2_update, 266 .final = sha512_avx2_final, 267 .finup = sha512_avx2_finup, 268 .descsize = sizeof(struct sha512_state), 269 .base = { 270 .cra_name = "sha384", 271 .cra_driver_name = "sha384-avx2", 272 .cra_priority = 170, 273 .cra_blocksize = SHA384_BLOCK_SIZE, 274 .cra_module = THIS_MODULE, 275 } 276 } }; 277 278 static bool avx2_usable(void) 279 { 280 if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2) && 281 boot_cpu_has(X86_FEATURE_BMI2)) 282 return true; 283 284 return false; 285 } 286 287 static int register_sha512_avx2(void) 288 { 289 if (avx2_usable()) 290 return crypto_register_shashes(sha512_avx2_algs, 291 ARRAY_SIZE(sha512_avx2_algs)); 292 return 0; 293 } 294 295 static void unregister_sha512_avx2(void) 296 { 297 if (avx2_usable()) 298 crypto_unregister_shashes(sha512_avx2_algs, 299 ARRAY_SIZE(sha512_avx2_algs)); 300 } 301 #else 302 static inline int register_sha512_avx2(void) { return 0; } 303 static inline void unregister_sha512_avx2(void) { } 304 #endif 305 306 static int __init sha512_ssse3_mod_init(void) 307 { 308 309 if (register_sha512_ssse3()) 310 goto fail; 311 312 if (register_sha512_avx()) { 313 unregister_sha512_ssse3(); 314 goto fail; 315 } 316 317 if (register_sha512_avx2()) { 318 unregister_sha512_avx(); 319 unregister_sha512_ssse3(); 320 goto fail; 321 } 322 323 return 0; 324 fail: 325 return -ENODEV; 326 } 327 328 static void __exit sha512_ssse3_mod_fini(void) 329 { 330 unregister_sha512_avx2(); 331 unregister_sha512_avx(); 332 unregister_sha512_ssse3(); 333 } 334 335 module_init(sha512_ssse3_mod_init); 336 module_exit(sha512_ssse3_mod_fini); 337 338 MODULE_LICENSE("GPL"); 339 MODULE_DESCRIPTION("SHA512 Secure Hash Algorithm, Supplemental SSE3 accelerated"); 340 341 MODULE_ALIAS_CRYPTO("sha512"); 342 MODULE_ALIAS_CRYPTO("sha512-ssse3"); 343 MODULE_ALIAS_CRYPTO("sha512-avx"); 344 MODULE_ALIAS_CRYPTO("sha512-avx2"); 345 MODULE_ALIAS_CRYPTO("sha384"); 346 MODULE_ALIAS_CRYPTO("sha384-ssse3"); 347 MODULE_ALIAS_CRYPTO("sha384-avx"); 348 MODULE_ALIAS_CRYPTO("sha384-avx2"); 349