1 // SPDX-License-Identifier: GPL-2.0 OR MIT 2 /* 3 * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved. 4 */ 5 6 #include <crypto/internal/blake2s.h> 7 #include <crypto/internal/simd.h> 8 #include <crypto/internal/hash.h> 9 10 #include <linux/types.h> 11 #include <linux/jump_label.h> 12 #include <linux/kernel.h> 13 #include <linux/module.h> 14 #include <linux/sizes.h> 15 16 #include <asm/cpufeature.h> 17 #include <asm/fpu/api.h> 18 #include <asm/processor.h> 19 #include <asm/simd.h> 20 21 asmlinkage void blake2s_compress_ssse3(struct blake2s_state *state, 22 const u8 *block, const size_t nblocks, 23 const u32 inc); 24 asmlinkage void blake2s_compress_avx512(struct blake2s_state *state, 25 const u8 *block, const size_t nblocks, 26 const u32 inc); 27 28 static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_ssse3); 29 static __ro_after_init DEFINE_STATIC_KEY_FALSE(blake2s_use_avx512); 30 31 void blake2s_compress_arch(struct blake2s_state *state, 32 const u8 *block, size_t nblocks, 33 const u32 inc) 34 { 35 /* SIMD disables preemption, so relax after processing each page. */ 36 BUILD_BUG_ON(SZ_4K / BLAKE2S_BLOCK_SIZE < 8); 37 38 if (!static_branch_likely(&blake2s_use_ssse3) || !crypto_simd_usable()) { 39 blake2s_compress_generic(state, block, nblocks, inc); 40 return; 41 } 42 43 do { 44 const size_t blocks = min_t(size_t, nblocks, 45 SZ_4K / BLAKE2S_BLOCK_SIZE); 46 47 kernel_fpu_begin(); 48 if (IS_ENABLED(CONFIG_AS_AVX512) && 49 static_branch_likely(&blake2s_use_avx512)) 50 blake2s_compress_avx512(state, block, blocks, inc); 51 else 52 blake2s_compress_ssse3(state, block, blocks, inc); 53 kernel_fpu_end(); 54 55 nblocks -= blocks; 56 block += blocks * BLAKE2S_BLOCK_SIZE; 57 } while (nblocks); 58 } 59 EXPORT_SYMBOL(blake2s_compress_arch); 60 61 static int crypto_blake2s_setkey(struct crypto_shash *tfm, const u8 *key, 62 unsigned int keylen) 63 { 64 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(tfm); 65 66 if (keylen == 0 || keylen > BLAKE2S_KEY_SIZE) 67 return -EINVAL; 68 69 memcpy(tctx->key, key, keylen); 70 tctx->keylen = keylen; 71 72 return 0; 73 } 74 75 static int crypto_blake2s_init(struct shash_desc *desc) 76 { 77 struct blake2s_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm); 78 struct blake2s_state *state = shash_desc_ctx(desc); 79 const int outlen = crypto_shash_digestsize(desc->tfm); 80 81 if (tctx->keylen) 82 blake2s_init_key(state, outlen, tctx->key, tctx->keylen); 83 else 84 blake2s_init(state, outlen); 85 86 return 0; 87 } 88 89 static int crypto_blake2s_update(struct shash_desc *desc, const u8 *in, 90 unsigned int inlen) 91 { 92 struct blake2s_state *state = shash_desc_ctx(desc); 93 const size_t fill = BLAKE2S_BLOCK_SIZE - state->buflen; 94 95 if (unlikely(!inlen)) 96 return 0; 97 if (inlen > fill) { 98 memcpy(state->buf + state->buflen, in, fill); 99 blake2s_compress_arch(state, state->buf, 1, BLAKE2S_BLOCK_SIZE); 100 state->buflen = 0; 101 in += fill; 102 inlen -= fill; 103 } 104 if (inlen > BLAKE2S_BLOCK_SIZE) { 105 const size_t nblocks = DIV_ROUND_UP(inlen, BLAKE2S_BLOCK_SIZE); 106 /* Hash one less (full) block than strictly possible */ 107 blake2s_compress_arch(state, in, nblocks - 1, BLAKE2S_BLOCK_SIZE); 108 in += BLAKE2S_BLOCK_SIZE * (nblocks - 1); 109 inlen -= BLAKE2S_BLOCK_SIZE * (nblocks - 1); 110 } 111 memcpy(state->buf + state->buflen, in, inlen); 112 state->buflen += inlen; 113 114 return 0; 115 } 116 117 static int crypto_blake2s_final(struct shash_desc *desc, u8 *out) 118 { 119 struct blake2s_state *state = shash_desc_ctx(desc); 120 121 blake2s_set_lastblock(state); 122 memset(state->buf + state->buflen, 0, 123 BLAKE2S_BLOCK_SIZE - state->buflen); /* Padding */ 124 blake2s_compress_arch(state, state->buf, 1, state->buflen); 125 cpu_to_le32_array(state->h, ARRAY_SIZE(state->h)); 126 memcpy(out, state->h, state->outlen); 127 memzero_explicit(state, sizeof(*state)); 128 129 return 0; 130 } 131 132 static struct shash_alg blake2s_algs[] = {{ 133 .base.cra_name = "blake2s-128", 134 .base.cra_driver_name = "blake2s-128-x86", 135 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY, 136 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx), 137 .base.cra_priority = 200, 138 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE, 139 .base.cra_module = THIS_MODULE, 140 141 .digestsize = BLAKE2S_128_HASH_SIZE, 142 .setkey = crypto_blake2s_setkey, 143 .init = crypto_blake2s_init, 144 .update = crypto_blake2s_update, 145 .final = crypto_blake2s_final, 146 .descsize = sizeof(struct blake2s_state), 147 }, { 148 .base.cra_name = "blake2s-160", 149 .base.cra_driver_name = "blake2s-160-x86", 150 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY, 151 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx), 152 .base.cra_priority = 200, 153 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE, 154 .base.cra_module = THIS_MODULE, 155 156 .digestsize = BLAKE2S_160_HASH_SIZE, 157 .setkey = crypto_blake2s_setkey, 158 .init = crypto_blake2s_init, 159 .update = crypto_blake2s_update, 160 .final = crypto_blake2s_final, 161 .descsize = sizeof(struct blake2s_state), 162 }, { 163 .base.cra_name = "blake2s-224", 164 .base.cra_driver_name = "blake2s-224-x86", 165 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY, 166 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx), 167 .base.cra_priority = 200, 168 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE, 169 .base.cra_module = THIS_MODULE, 170 171 .digestsize = BLAKE2S_224_HASH_SIZE, 172 .setkey = crypto_blake2s_setkey, 173 .init = crypto_blake2s_init, 174 .update = crypto_blake2s_update, 175 .final = crypto_blake2s_final, 176 .descsize = sizeof(struct blake2s_state), 177 }, { 178 .base.cra_name = "blake2s-256", 179 .base.cra_driver_name = "blake2s-256-x86", 180 .base.cra_flags = CRYPTO_ALG_OPTIONAL_KEY, 181 .base.cra_ctxsize = sizeof(struct blake2s_tfm_ctx), 182 .base.cra_priority = 200, 183 .base.cra_blocksize = BLAKE2S_BLOCK_SIZE, 184 .base.cra_module = THIS_MODULE, 185 186 .digestsize = BLAKE2S_256_HASH_SIZE, 187 .setkey = crypto_blake2s_setkey, 188 .init = crypto_blake2s_init, 189 .update = crypto_blake2s_update, 190 .final = crypto_blake2s_final, 191 .descsize = sizeof(struct blake2s_state), 192 }}; 193 194 static int __init blake2s_mod_init(void) 195 { 196 if (!boot_cpu_has(X86_FEATURE_SSSE3)) 197 return 0; 198 199 static_branch_enable(&blake2s_use_ssse3); 200 201 if (IS_ENABLED(CONFIG_AS_AVX512) && 202 boot_cpu_has(X86_FEATURE_AVX) && 203 boot_cpu_has(X86_FEATURE_AVX2) && 204 boot_cpu_has(X86_FEATURE_AVX512F) && 205 boot_cpu_has(X86_FEATURE_AVX512VL) && 206 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM | 207 XFEATURE_MASK_AVX512, NULL)) 208 static_branch_enable(&blake2s_use_avx512); 209 210 return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? 211 crypto_register_shashes(blake2s_algs, 212 ARRAY_SIZE(blake2s_algs)) : 0; 213 } 214 215 static void __exit blake2s_mod_exit(void) 216 { 217 if (IS_REACHABLE(CONFIG_CRYPTO_HASH) && boot_cpu_has(X86_FEATURE_SSSE3)) 218 crypto_unregister_shashes(blake2s_algs, ARRAY_SIZE(blake2s_algs)); 219 } 220 221 module_init(blake2s_mod_init); 222 module_exit(blake2s_mod_exit); 223 224 MODULE_ALIAS_CRYPTO("blake2s-128"); 225 MODULE_ALIAS_CRYPTO("blake2s-128-x86"); 226 MODULE_ALIAS_CRYPTO("blake2s-160"); 227 MODULE_ALIAS_CRYPTO("blake2s-160-x86"); 228 MODULE_ALIAS_CRYPTO("blake2s-224"); 229 MODULE_ALIAS_CRYPTO("blake2s-224-x86"); 230 MODULE_ALIAS_CRYPTO("blake2s-256"); 231 MODULE_ALIAS_CRYPTO("blake2s-256-x86"); 232 MODULE_LICENSE("GPL v2"); 233