1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * x64 SIMD accelerated ChaCha and XChaCha stream ciphers, 4 * including ChaCha20 (RFC7539) 5 * 6 * Copyright (C) 2015 Martin Willi 7 */ 8 9 #include <crypto/algapi.h> 10 #include <crypto/internal/chacha.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/internal/skcipher.h> 13 #include <linux/kernel.h> 14 #include <linux/module.h> 15 #include <asm/simd.h> 16 17 asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src, 18 unsigned int len, int nrounds); 19 asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src, 20 unsigned int len, int nrounds); 21 asmlinkage void hchacha_block_ssse3(const u32 *state, u32 *out, int nrounds); 22 23 asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src, 24 unsigned int len, int nrounds); 25 asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src, 26 unsigned int len, int nrounds); 27 asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src, 28 unsigned int len, int nrounds); 29 30 asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, 31 unsigned int len, int nrounds); 32 asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, 33 unsigned int len, int nrounds); 34 asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src, 35 unsigned int len, int nrounds); 36 37 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd); 38 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2); 39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl); 40 41 static unsigned int chacha_advance(unsigned int len, unsigned int maxblocks) 42 { 43 len = min(len, maxblocks * CHACHA_BLOCK_SIZE); 44 return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE; 45 } 46 47 static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src, 48 unsigned int bytes, int nrounds) 49 { 50 if (IS_ENABLED(CONFIG_AS_AVX512) && 51 static_branch_likely(&chacha_use_avx512vl)) { 52 while (bytes >= CHACHA_BLOCK_SIZE * 8) { 53 chacha_8block_xor_avx512vl(state, dst, src, bytes, 54 nrounds); 55 bytes -= CHACHA_BLOCK_SIZE * 8; 56 src += CHACHA_BLOCK_SIZE * 8; 57 dst += CHACHA_BLOCK_SIZE * 8; 58 state[12] += 8; 59 } 60 if (bytes > CHACHA_BLOCK_SIZE * 4) { 61 chacha_8block_xor_avx512vl(state, dst, src, bytes, 62 nrounds); 63 state[12] += chacha_advance(bytes, 8); 64 return; 65 } 66 if (bytes > CHACHA_BLOCK_SIZE * 2) { 67 chacha_4block_xor_avx512vl(state, dst, src, bytes, 68 nrounds); 69 state[12] += chacha_advance(bytes, 4); 70 return; 71 } 72 if (bytes) { 73 chacha_2block_xor_avx512vl(state, dst, src, bytes, 74 nrounds); 75 state[12] += chacha_advance(bytes, 2); 76 return; 77 } 78 } 79 80 if (static_branch_likely(&chacha_use_avx2)) { 81 while (bytes >= CHACHA_BLOCK_SIZE * 8) { 82 chacha_8block_xor_avx2(state, dst, src, bytes, nrounds); 83 bytes -= CHACHA_BLOCK_SIZE * 8; 84 src += CHACHA_BLOCK_SIZE * 8; 85 dst += CHACHA_BLOCK_SIZE * 8; 86 state[12] += 8; 87 } 88 if (bytes > CHACHA_BLOCK_SIZE * 4) { 89 chacha_8block_xor_avx2(state, dst, src, bytes, nrounds); 90 state[12] += chacha_advance(bytes, 8); 91 return; 92 } 93 if (bytes > CHACHA_BLOCK_SIZE * 2) { 94 chacha_4block_xor_avx2(state, dst, src, bytes, nrounds); 95 state[12] += chacha_advance(bytes, 4); 96 return; 97 } 98 if (bytes > CHACHA_BLOCK_SIZE) { 99 chacha_2block_xor_avx2(state, dst, src, bytes, nrounds); 100 state[12] += chacha_advance(bytes, 2); 101 return; 102 } 103 } 104 105 while (bytes >= CHACHA_BLOCK_SIZE * 4) { 106 chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds); 107 bytes -= CHACHA_BLOCK_SIZE * 4; 108 src += CHACHA_BLOCK_SIZE * 4; 109 dst += CHACHA_BLOCK_SIZE * 4; 110 state[12] += 4; 111 } 112 if (bytes > CHACHA_BLOCK_SIZE) { 113 chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds); 114 state[12] += chacha_advance(bytes, 4); 115 return; 116 } 117 if (bytes) { 118 chacha_block_xor_ssse3(state, dst, src, bytes, nrounds); 119 state[12]++; 120 } 121 } 122 123 void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) 124 { 125 if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) { 126 hchacha_block_generic(state, stream, nrounds); 127 } else { 128 kernel_fpu_begin(); 129 hchacha_block_ssse3(state, stream, nrounds); 130 kernel_fpu_end(); 131 } 132 } 133 EXPORT_SYMBOL(hchacha_block_arch); 134 135 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) 136 { 137 chacha_init_generic(state, key, iv); 138 } 139 EXPORT_SYMBOL(chacha_init_arch); 140 141 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, 142 int nrounds) 143 { 144 if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() || 145 bytes <= CHACHA_BLOCK_SIZE) 146 return chacha_crypt_generic(state, dst, src, bytes, nrounds); 147 148 do { 149 unsigned int todo = min_t(unsigned int, bytes, SZ_4K); 150 151 kernel_fpu_begin(); 152 chacha_dosimd(state, dst, src, todo, nrounds); 153 kernel_fpu_end(); 154 155 bytes -= todo; 156 src += todo; 157 dst += todo; 158 } while (bytes); 159 } 160 EXPORT_SYMBOL(chacha_crypt_arch); 161 162 static int chacha_simd_stream_xor(struct skcipher_request *req, 163 const struct chacha_ctx *ctx, const u8 *iv) 164 { 165 u32 state[CHACHA_STATE_WORDS] __aligned(8); 166 struct skcipher_walk walk; 167 int err; 168 169 err = skcipher_walk_virt(&walk, req, false); 170 171 chacha_init_generic(state, ctx->key, iv); 172 173 while (walk.nbytes > 0) { 174 unsigned int nbytes = walk.nbytes; 175 176 if (nbytes < walk.total) 177 nbytes = round_down(nbytes, walk.stride); 178 179 if (!static_branch_likely(&chacha_use_simd) || 180 !crypto_simd_usable()) { 181 chacha_crypt_generic(state, walk.dst.virt.addr, 182 walk.src.virt.addr, nbytes, 183 ctx->nrounds); 184 } else { 185 kernel_fpu_begin(); 186 chacha_dosimd(state, walk.dst.virt.addr, 187 walk.src.virt.addr, nbytes, 188 ctx->nrounds); 189 kernel_fpu_end(); 190 } 191 err = skcipher_walk_done(&walk, walk.nbytes - nbytes); 192 } 193 194 return err; 195 } 196 197 static int chacha_simd(struct skcipher_request *req) 198 { 199 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 200 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); 201 202 return chacha_simd_stream_xor(req, ctx, req->iv); 203 } 204 205 static int xchacha_simd(struct skcipher_request *req) 206 { 207 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 208 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm); 209 u32 state[CHACHA_STATE_WORDS] __aligned(8); 210 struct chacha_ctx subctx; 211 u8 real_iv[16]; 212 213 chacha_init_generic(state, ctx->key, req->iv); 214 215 if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) { 216 kernel_fpu_begin(); 217 hchacha_block_ssse3(state, subctx.key, ctx->nrounds); 218 kernel_fpu_end(); 219 } else { 220 hchacha_block_generic(state, subctx.key, ctx->nrounds); 221 } 222 subctx.nrounds = ctx->nrounds; 223 224 memcpy(&real_iv[0], req->iv + 24, 8); 225 memcpy(&real_iv[8], req->iv + 16, 8); 226 return chacha_simd_stream_xor(req, &subctx, real_iv); 227 } 228 229 static struct skcipher_alg algs[] = { 230 { 231 .base.cra_name = "chacha20", 232 .base.cra_driver_name = "chacha20-simd", 233 .base.cra_priority = 300, 234 .base.cra_blocksize = 1, 235 .base.cra_ctxsize = sizeof(struct chacha_ctx), 236 .base.cra_module = THIS_MODULE, 237 238 .min_keysize = CHACHA_KEY_SIZE, 239 .max_keysize = CHACHA_KEY_SIZE, 240 .ivsize = CHACHA_IV_SIZE, 241 .chunksize = CHACHA_BLOCK_SIZE, 242 .setkey = chacha20_setkey, 243 .encrypt = chacha_simd, 244 .decrypt = chacha_simd, 245 }, { 246 .base.cra_name = "xchacha20", 247 .base.cra_driver_name = "xchacha20-simd", 248 .base.cra_priority = 300, 249 .base.cra_blocksize = 1, 250 .base.cra_ctxsize = sizeof(struct chacha_ctx), 251 .base.cra_module = THIS_MODULE, 252 253 .min_keysize = CHACHA_KEY_SIZE, 254 .max_keysize = CHACHA_KEY_SIZE, 255 .ivsize = XCHACHA_IV_SIZE, 256 .chunksize = CHACHA_BLOCK_SIZE, 257 .setkey = chacha20_setkey, 258 .encrypt = xchacha_simd, 259 .decrypt = xchacha_simd, 260 }, { 261 .base.cra_name = "xchacha12", 262 .base.cra_driver_name = "xchacha12-simd", 263 .base.cra_priority = 300, 264 .base.cra_blocksize = 1, 265 .base.cra_ctxsize = sizeof(struct chacha_ctx), 266 .base.cra_module = THIS_MODULE, 267 268 .min_keysize = CHACHA_KEY_SIZE, 269 .max_keysize = CHACHA_KEY_SIZE, 270 .ivsize = XCHACHA_IV_SIZE, 271 .chunksize = CHACHA_BLOCK_SIZE, 272 .setkey = chacha12_setkey, 273 .encrypt = xchacha_simd, 274 .decrypt = xchacha_simd, 275 }, 276 }; 277 278 static int __init chacha_simd_mod_init(void) 279 { 280 if (!boot_cpu_has(X86_FEATURE_SSSE3)) 281 return 0; 282 283 static_branch_enable(&chacha_use_simd); 284 285 if (boot_cpu_has(X86_FEATURE_AVX) && 286 boot_cpu_has(X86_FEATURE_AVX2) && 287 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) { 288 static_branch_enable(&chacha_use_avx2); 289 290 if (IS_ENABLED(CONFIG_AS_AVX512) && 291 boot_cpu_has(X86_FEATURE_AVX512VL) && 292 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */ 293 static_branch_enable(&chacha_use_avx512vl); 294 } 295 return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ? 296 crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0; 297 } 298 299 static void __exit chacha_simd_mod_fini(void) 300 { 301 if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3)) 302 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs)); 303 } 304 305 module_init(chacha_simd_mod_init); 306 module_exit(chacha_simd_mod_fini); 307 308 MODULE_LICENSE("GPL"); 309 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>"); 310 MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (x64 SIMD accelerated)"); 311 MODULE_ALIAS_CRYPTO("chacha20"); 312 MODULE_ALIAS_CRYPTO("chacha20-simd"); 313 MODULE_ALIAS_CRYPTO("xchacha20"); 314 MODULE_ALIAS_CRYPTO("xchacha20-simd"); 315 MODULE_ALIAS_CRYPTO("xchacha12"); 316 MODULE_ALIAS_CRYPTO("xchacha12-simd"); 317