1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions. 4 * 5 * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org> 6 */ 7 8 #include <asm/hwcap.h> 9 #include <asm/neon.h> 10 #include <asm/simd.h> 11 #include <asm/unaligned.h> 12 #include <crypto/b128ops.h> 13 #include <crypto/cryptd.h> 14 #include <crypto/internal/hash.h> 15 #include <crypto/internal/simd.h> 16 #include <crypto/gf128mul.h> 17 #include <linux/cpufeature.h> 18 #include <linux/crypto.h> 19 #include <linux/module.h> 20 21 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions"); 22 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>"); 23 MODULE_LICENSE("GPL v2"); 24 MODULE_ALIAS_CRYPTO("ghash"); 25 26 #define GHASH_BLOCK_SIZE 16 27 #define GHASH_DIGEST_SIZE 16 28 29 struct ghash_key { 30 u64 h[2]; 31 u64 h2[2]; 32 u64 h3[2]; 33 u64 h4[2]; 34 35 be128 k; 36 }; 37 38 struct ghash_desc_ctx { 39 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)]; 40 u8 buf[GHASH_BLOCK_SIZE]; 41 u32 count; 42 }; 43 44 struct ghash_async_ctx { 45 struct cryptd_ahash *cryptd_tfm; 46 }; 47 48 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src, 49 struct ghash_key const *k, 50 const char *head); 51 52 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src, 53 struct ghash_key const *k, 54 const char *head); 55 56 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src, 57 struct ghash_key const *k, 58 const char *head); 59 60 static int ghash_init(struct shash_desc *desc) 61 { 62 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 63 64 *ctx = (struct ghash_desc_ctx){}; 65 return 0; 66 } 67 68 static void ghash_do_update(int blocks, u64 dg[], const char *src, 69 struct ghash_key *key, const char *head) 70 { 71 if (likely(crypto_simd_usable())) { 72 kernel_neon_begin(); 73 pmull_ghash_update(blocks, dg, src, key, head); 74 kernel_neon_end(); 75 } else { 76 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) }; 77 78 do { 79 const u8 *in = src; 80 81 if (head) { 82 in = head; 83 blocks++; 84 head = NULL; 85 } else { 86 src += GHASH_BLOCK_SIZE; 87 } 88 89 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); 90 gf128mul_lle(&dst, &key->k); 91 } while (--blocks); 92 93 dg[0] = be64_to_cpu(dst.b); 94 dg[1] = be64_to_cpu(dst.a); 95 } 96 } 97 98 static int ghash_update(struct shash_desc *desc, const u8 *src, 99 unsigned int len) 100 { 101 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 102 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; 103 104 ctx->count += len; 105 106 if ((partial + len) >= GHASH_BLOCK_SIZE) { 107 struct ghash_key *key = crypto_shash_ctx(desc->tfm); 108 int blocks; 109 110 if (partial) { 111 int p = GHASH_BLOCK_SIZE - partial; 112 113 memcpy(ctx->buf + partial, src, p); 114 src += p; 115 len -= p; 116 } 117 118 blocks = len / GHASH_BLOCK_SIZE; 119 len %= GHASH_BLOCK_SIZE; 120 121 ghash_do_update(blocks, ctx->digest, src, key, 122 partial ? ctx->buf : NULL); 123 src += blocks * GHASH_BLOCK_SIZE; 124 partial = 0; 125 } 126 if (len) 127 memcpy(ctx->buf + partial, src, len); 128 return 0; 129 } 130 131 static int ghash_final(struct shash_desc *desc, u8 *dst) 132 { 133 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 134 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; 135 136 if (partial) { 137 struct ghash_key *key = crypto_shash_ctx(desc->tfm); 138 139 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); 140 ghash_do_update(1, ctx->digest, ctx->buf, key, NULL); 141 } 142 put_unaligned_be64(ctx->digest[1], dst); 143 put_unaligned_be64(ctx->digest[0], dst + 8); 144 145 *ctx = (struct ghash_desc_ctx){}; 146 return 0; 147 } 148 149 static void ghash_reflect(u64 h[], const be128 *k) 150 { 151 u64 carry = be64_to_cpu(k->a) >> 63; 152 153 h[0] = (be64_to_cpu(k->b) << 1) | carry; 154 h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63); 155 156 if (carry) 157 h[1] ^= 0xc200000000000000UL; 158 } 159 160 static int ghash_setkey(struct crypto_shash *tfm, 161 const u8 *inkey, unsigned int keylen) 162 { 163 struct ghash_key *key = crypto_shash_ctx(tfm); 164 be128 h; 165 166 if (keylen != GHASH_BLOCK_SIZE) { 167 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 168 return -EINVAL; 169 } 170 171 /* needed for the fallback */ 172 memcpy(&key->k, inkey, GHASH_BLOCK_SIZE); 173 ghash_reflect(key->h, &key->k); 174 175 h = key->k; 176 gf128mul_lle(&h, &key->k); 177 ghash_reflect(key->h2, &h); 178 179 gf128mul_lle(&h, &key->k); 180 ghash_reflect(key->h3, &h); 181 182 gf128mul_lle(&h, &key->k); 183 ghash_reflect(key->h4, &h); 184 185 return 0; 186 } 187 188 static struct shash_alg ghash_alg = { 189 .digestsize = GHASH_DIGEST_SIZE, 190 .init = ghash_init, 191 .update = ghash_update, 192 .final = ghash_final, 193 .setkey = ghash_setkey, 194 .descsize = sizeof(struct ghash_desc_ctx), 195 196 .base.cra_name = "ghash", 197 .base.cra_driver_name = "ghash-ce-sync", 198 .base.cra_priority = 300 - 1, 199 .base.cra_blocksize = GHASH_BLOCK_SIZE, 200 .base.cra_ctxsize = sizeof(struct ghash_key), 201 .base.cra_module = THIS_MODULE, 202 }; 203 204 static int ghash_async_init(struct ahash_request *req) 205 { 206 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 207 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 208 struct ahash_request *cryptd_req = ahash_request_ctx(req); 209 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 210 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 211 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm); 212 213 desc->tfm = child; 214 return crypto_shash_init(desc); 215 } 216 217 static int ghash_async_update(struct ahash_request *req) 218 { 219 struct ahash_request *cryptd_req = ahash_request_ctx(req); 220 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 221 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 222 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 223 224 if (!crypto_simd_usable() || 225 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) { 226 memcpy(cryptd_req, req, sizeof(*req)); 227 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); 228 return crypto_ahash_update(cryptd_req); 229 } else { 230 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 231 return shash_ahash_update(req, desc); 232 } 233 } 234 235 static int ghash_async_final(struct ahash_request *req) 236 { 237 struct ahash_request *cryptd_req = ahash_request_ctx(req); 238 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 239 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 240 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 241 242 if (!crypto_simd_usable() || 243 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) { 244 memcpy(cryptd_req, req, sizeof(*req)); 245 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); 246 return crypto_ahash_final(cryptd_req); 247 } else { 248 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 249 return crypto_shash_final(desc, req->result); 250 } 251 } 252 253 static int ghash_async_digest(struct ahash_request *req) 254 { 255 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 256 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 257 struct ahash_request *cryptd_req = ahash_request_ctx(req); 258 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm; 259 260 if (!crypto_simd_usable() || 261 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) { 262 memcpy(cryptd_req, req, sizeof(*req)); 263 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base); 264 return crypto_ahash_digest(cryptd_req); 265 } else { 266 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 267 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm); 268 269 desc->tfm = child; 270 return shash_ahash_digest(req, desc); 271 } 272 } 273 274 static int ghash_async_import(struct ahash_request *req, const void *in) 275 { 276 struct ahash_request *cryptd_req = ahash_request_ctx(req); 277 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 278 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 279 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 280 281 desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm); 282 283 return crypto_shash_import(desc, in); 284 } 285 286 static int ghash_async_export(struct ahash_request *req, void *out) 287 { 288 struct ahash_request *cryptd_req = ahash_request_ctx(req); 289 struct shash_desc *desc = cryptd_shash_desc(cryptd_req); 290 291 return crypto_shash_export(desc, out); 292 } 293 294 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key, 295 unsigned int keylen) 296 { 297 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm); 298 struct crypto_ahash *child = &ctx->cryptd_tfm->base; 299 int err; 300 301 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK); 302 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm) 303 & CRYPTO_TFM_REQ_MASK); 304 err = crypto_ahash_setkey(child, key, keylen); 305 crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child) 306 & CRYPTO_TFM_RES_MASK); 307 308 return err; 309 } 310 311 static int ghash_async_init_tfm(struct crypto_tfm *tfm) 312 { 313 struct cryptd_ahash *cryptd_tfm; 314 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); 315 316 cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0); 317 if (IS_ERR(cryptd_tfm)) 318 return PTR_ERR(cryptd_tfm); 319 ctx->cryptd_tfm = cryptd_tfm; 320 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 321 sizeof(struct ahash_request) + 322 crypto_ahash_reqsize(&cryptd_tfm->base)); 323 324 return 0; 325 } 326 327 static void ghash_async_exit_tfm(struct crypto_tfm *tfm) 328 { 329 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); 330 331 cryptd_free_ahash(ctx->cryptd_tfm); 332 } 333 334 static struct ahash_alg ghash_async_alg = { 335 .init = ghash_async_init, 336 .update = ghash_async_update, 337 .final = ghash_async_final, 338 .setkey = ghash_async_setkey, 339 .digest = ghash_async_digest, 340 .import = ghash_async_import, 341 .export = ghash_async_export, 342 .halg.digestsize = GHASH_DIGEST_SIZE, 343 .halg.statesize = sizeof(struct ghash_desc_ctx), 344 .halg.base = { 345 .cra_name = "ghash", 346 .cra_driver_name = "ghash-ce", 347 .cra_priority = 300, 348 .cra_flags = CRYPTO_ALG_ASYNC, 349 .cra_blocksize = GHASH_BLOCK_SIZE, 350 .cra_ctxsize = sizeof(struct ghash_async_ctx), 351 .cra_module = THIS_MODULE, 352 .cra_init = ghash_async_init_tfm, 353 .cra_exit = ghash_async_exit_tfm, 354 }, 355 }; 356 357 static int __init ghash_ce_mod_init(void) 358 { 359 int err; 360 361 if (!(elf_hwcap & HWCAP_NEON)) 362 return -ENODEV; 363 364 if (elf_hwcap2 & HWCAP2_PMULL) 365 pmull_ghash_update = pmull_ghash_update_p64; 366 else 367 pmull_ghash_update = pmull_ghash_update_p8; 368 369 err = crypto_register_shash(&ghash_alg); 370 if (err) 371 return err; 372 err = crypto_register_ahash(&ghash_async_alg); 373 if (err) 374 goto err_shash; 375 376 return 0; 377 378 err_shash: 379 crypto_unregister_shash(&ghash_alg); 380 return err; 381 } 382 383 static void __exit ghash_ce_mod_exit(void) 384 { 385 crypto_unregister_ahash(&ghash_async_alg); 386 crypto_unregister_shash(&ghash_alg); 387 } 388 389 module_init(ghash_ce_mod_init); 390 module_exit(ghash_ce_mod_exit); 391