1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Poly1305 authenticator algorithm, RFC7539, SIMD glue code 4 * 5 * Copyright (C) 2015 Martin Willi 6 */ 7 8 #include <crypto/algapi.h> 9 #include <crypto/internal/hash.h> 10 #include <crypto/internal/simd.h> 11 #include <crypto/poly1305.h> 12 #include <linux/crypto.h> 13 #include <linux/kernel.h> 14 #include <linux/module.h> 15 #include <asm/simd.h> 16 17 struct poly1305_simd_desc_ctx { 18 struct poly1305_desc_ctx base; 19 /* derived key u set? */ 20 bool uset; 21 #ifdef CONFIG_AS_AVX2 22 /* derived keys r^3, r^4 set? */ 23 bool wset; 24 #endif 25 /* derived Poly1305 key r^2 */ 26 u32 u[5]; 27 /* ... silently appended r^3 and r^4 when using AVX2 */ 28 }; 29 30 asmlinkage void poly1305_block_sse2(u32 *h, const u8 *src, 31 const u32 *r, unsigned int blocks); 32 asmlinkage void poly1305_2block_sse2(u32 *h, const u8 *src, const u32 *r, 33 unsigned int blocks, const u32 *u); 34 #ifdef CONFIG_AS_AVX2 35 asmlinkage void poly1305_4block_avx2(u32 *h, const u8 *src, const u32 *r, 36 unsigned int blocks, const u32 *u); 37 static bool poly1305_use_avx2; 38 #endif 39 40 static int poly1305_simd_init(struct shash_desc *desc) 41 { 42 struct poly1305_simd_desc_ctx *sctx = shash_desc_ctx(desc); 43 44 sctx->uset = false; 45 #ifdef CONFIG_AS_AVX2 46 sctx->wset = false; 47 #endif 48 49 return crypto_poly1305_init(desc); 50 } 51 52 static void poly1305_simd_mult(u32 *a, const u32 *b) 53 { 54 u8 m[POLY1305_BLOCK_SIZE]; 55 56 memset(m, 0, sizeof(m)); 57 /* The poly1305 block function adds a hi-bit to the accumulator which 58 * we don't need for key multiplication; compensate for it. */ 59 a[4] -= 1 << 24; 60 poly1305_block_sse2(a, m, b, 1); 61 } 62 63 static unsigned int poly1305_simd_blocks(struct poly1305_desc_ctx *dctx, 64 const u8 *src, unsigned int srclen) 65 { 66 struct poly1305_simd_desc_ctx *sctx; 67 unsigned int blocks, datalen; 68 69 BUILD_BUG_ON(offsetof(struct poly1305_simd_desc_ctx, base)); 70 sctx = container_of(dctx, struct poly1305_simd_desc_ctx, base); 71 72 if (unlikely(!dctx->sset)) { 73 datalen = crypto_poly1305_setdesckey(dctx, src, srclen); 74 src += srclen - datalen; 75 srclen = datalen; 76 } 77 78 #ifdef CONFIG_AS_AVX2 79 if (poly1305_use_avx2 && srclen >= POLY1305_BLOCK_SIZE * 4) { 80 if (unlikely(!sctx->wset)) { 81 if (!sctx->uset) { 82 memcpy(sctx->u, dctx->r.r, sizeof(sctx->u)); 83 poly1305_simd_mult(sctx->u, dctx->r.r); 84 sctx->uset = true; 85 } 86 memcpy(sctx->u + 5, sctx->u, sizeof(sctx->u)); 87 poly1305_simd_mult(sctx->u + 5, dctx->r.r); 88 memcpy(sctx->u + 10, sctx->u + 5, sizeof(sctx->u)); 89 poly1305_simd_mult(sctx->u + 10, dctx->r.r); 90 sctx->wset = true; 91 } 92 blocks = srclen / (POLY1305_BLOCK_SIZE * 4); 93 poly1305_4block_avx2(dctx->h.h, src, dctx->r.r, blocks, 94 sctx->u); 95 src += POLY1305_BLOCK_SIZE * 4 * blocks; 96 srclen -= POLY1305_BLOCK_SIZE * 4 * blocks; 97 } 98 #endif 99 if (likely(srclen >= POLY1305_BLOCK_SIZE * 2)) { 100 if (unlikely(!sctx->uset)) { 101 memcpy(sctx->u, dctx->r.r, sizeof(sctx->u)); 102 poly1305_simd_mult(sctx->u, dctx->r.r); 103 sctx->uset = true; 104 } 105 blocks = srclen / (POLY1305_BLOCK_SIZE * 2); 106 poly1305_2block_sse2(dctx->h.h, src, dctx->r.r, blocks, 107 sctx->u); 108 src += POLY1305_BLOCK_SIZE * 2 * blocks; 109 srclen -= POLY1305_BLOCK_SIZE * 2 * blocks; 110 } 111 if (srclen >= POLY1305_BLOCK_SIZE) { 112 poly1305_block_sse2(dctx->h.h, src, dctx->r.r, 1); 113 srclen -= POLY1305_BLOCK_SIZE; 114 } 115 return srclen; 116 } 117 118 static int poly1305_simd_update(struct shash_desc *desc, 119 const u8 *src, unsigned int srclen) 120 { 121 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); 122 unsigned int bytes; 123 124 /* kernel_fpu_begin/end is costly, use fallback for small updates */ 125 if (srclen <= 288 || !crypto_simd_usable()) 126 return crypto_poly1305_update(desc, src, srclen); 127 128 kernel_fpu_begin(); 129 130 if (unlikely(dctx->buflen)) { 131 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); 132 memcpy(dctx->buf + dctx->buflen, src, bytes); 133 src += bytes; 134 srclen -= bytes; 135 dctx->buflen += bytes; 136 137 if (dctx->buflen == POLY1305_BLOCK_SIZE) { 138 poly1305_simd_blocks(dctx, dctx->buf, 139 POLY1305_BLOCK_SIZE); 140 dctx->buflen = 0; 141 } 142 } 143 144 if (likely(srclen >= POLY1305_BLOCK_SIZE)) { 145 bytes = poly1305_simd_blocks(dctx, src, srclen); 146 src += srclen - bytes; 147 srclen = bytes; 148 } 149 150 kernel_fpu_end(); 151 152 if (unlikely(srclen)) { 153 dctx->buflen = srclen; 154 memcpy(dctx->buf, src, srclen); 155 } 156 157 return 0; 158 } 159 160 static struct shash_alg alg = { 161 .digestsize = POLY1305_DIGEST_SIZE, 162 .init = poly1305_simd_init, 163 .update = poly1305_simd_update, 164 .final = crypto_poly1305_final, 165 .descsize = sizeof(struct poly1305_simd_desc_ctx), 166 .base = { 167 .cra_name = "poly1305", 168 .cra_driver_name = "poly1305-simd", 169 .cra_priority = 300, 170 .cra_blocksize = POLY1305_BLOCK_SIZE, 171 .cra_module = THIS_MODULE, 172 }, 173 }; 174 175 static int __init poly1305_simd_mod_init(void) 176 { 177 if (!boot_cpu_has(X86_FEATURE_XMM2)) 178 return -ENODEV; 179 180 #ifdef CONFIG_AS_AVX2 181 poly1305_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) && 182 boot_cpu_has(X86_FEATURE_AVX2) && 183 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL); 184 alg.descsize = sizeof(struct poly1305_simd_desc_ctx); 185 if (poly1305_use_avx2) 186 alg.descsize += 10 * sizeof(u32); 187 #endif 188 return crypto_register_shash(&alg); 189 } 190 191 static void __exit poly1305_simd_mod_exit(void) 192 { 193 crypto_unregister_shash(&alg); 194 } 195 196 module_init(poly1305_simd_mod_init); 197 module_exit(poly1305_simd_mod_exit); 198 199 MODULE_LICENSE("GPL"); 200 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>"); 201 MODULE_DESCRIPTION("Poly1305 authenticator"); 202 MODULE_ALIAS_CRYPTO("poly1305"); 203 MODULE_ALIAS_CRYPTO("poly1305-simd"); 204