1 /* 2 * Poly1305 authenticator algorithm, RFC7539, SIMD glue code 3 * 4 * Copyright (C) 2015 Martin Willi 5 * 6 * This program is free software; you can redistribute it and/or modify 7 * it under the terms of the GNU General Public License as published by 8 * the Free Software Foundation; either version 2 of the License, or 9 * (at your option) any later version. 10 */ 11 12 #include <crypto/algapi.h> 13 #include <crypto/internal/hash.h> 14 #include <crypto/poly1305.h> 15 #include <linux/crypto.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <asm/fpu/api.h> 19 #include <asm/simd.h> 20 21 struct poly1305_simd_desc_ctx { 22 struct poly1305_desc_ctx base; 23 /* derived key u set? */ 24 bool uset; 25 #ifdef CONFIG_AS_AVX2 26 /* derived keys r^3, r^4 set? */ 27 bool wset; 28 #endif 29 /* derived Poly1305 key r^2 */ 30 u32 u[5]; 31 /* ... silently appended r^3 and r^4 when using AVX2 */ 32 }; 33 34 asmlinkage void poly1305_block_sse2(u32 *h, const u8 *src, 35 const u32 *r, unsigned int blocks); 36 asmlinkage void poly1305_2block_sse2(u32 *h, const u8 *src, const u32 *r, 37 unsigned int blocks, const u32 *u); 38 #ifdef CONFIG_AS_AVX2 39 asmlinkage void poly1305_4block_avx2(u32 *h, const u8 *src, const u32 *r, 40 unsigned int blocks, const u32 *u); 41 static bool poly1305_use_avx2; 42 #endif 43 44 static int poly1305_simd_init(struct shash_desc *desc) 45 { 46 struct poly1305_simd_desc_ctx *sctx = shash_desc_ctx(desc); 47 48 sctx->uset = false; 49 #ifdef CONFIG_AS_AVX2 50 sctx->wset = false; 51 #endif 52 53 return crypto_poly1305_init(desc); 54 } 55 56 static void poly1305_simd_mult(u32 *a, const u32 *b) 57 { 58 u8 m[POLY1305_BLOCK_SIZE]; 59 60 memset(m, 0, sizeof(m)); 61 /* The poly1305 block function adds a hi-bit to the accumulator which 62 * we don't need for key multiplication; compensate for it. */ 63 a[4] -= 1 << 24; 64 poly1305_block_sse2(a, m, b, 1); 65 } 66 67 static unsigned int poly1305_simd_blocks(struct poly1305_desc_ctx *dctx, 68 const u8 *src, unsigned int srclen) 69 { 70 struct poly1305_simd_desc_ctx *sctx; 71 unsigned int blocks, datalen; 72 73 BUILD_BUG_ON(offsetof(struct poly1305_simd_desc_ctx, base)); 74 sctx = container_of(dctx, struct poly1305_simd_desc_ctx, base); 75 76 if (unlikely(!dctx->sset)) { 77 datalen = crypto_poly1305_setdesckey(dctx, src, srclen); 78 src += srclen - datalen; 79 srclen = datalen; 80 } 81 82 #ifdef CONFIG_AS_AVX2 83 if (poly1305_use_avx2 && srclen >= POLY1305_BLOCK_SIZE * 4) { 84 if (unlikely(!sctx->wset)) { 85 if (!sctx->uset) { 86 memcpy(sctx->u, dctx->r, sizeof(sctx->u)); 87 poly1305_simd_mult(sctx->u, dctx->r); 88 sctx->uset = true; 89 } 90 memcpy(sctx->u + 5, sctx->u, sizeof(sctx->u)); 91 poly1305_simd_mult(sctx->u + 5, dctx->r); 92 memcpy(sctx->u + 10, sctx->u + 5, sizeof(sctx->u)); 93 poly1305_simd_mult(sctx->u + 10, dctx->r); 94 sctx->wset = true; 95 } 96 blocks = srclen / (POLY1305_BLOCK_SIZE * 4); 97 poly1305_4block_avx2(dctx->h, src, dctx->r, blocks, sctx->u); 98 src += POLY1305_BLOCK_SIZE * 4 * blocks; 99 srclen -= POLY1305_BLOCK_SIZE * 4 * blocks; 100 } 101 #endif 102 if (likely(srclen >= POLY1305_BLOCK_SIZE * 2)) { 103 if (unlikely(!sctx->uset)) { 104 memcpy(sctx->u, dctx->r, sizeof(sctx->u)); 105 poly1305_simd_mult(sctx->u, dctx->r); 106 sctx->uset = true; 107 } 108 blocks = srclen / (POLY1305_BLOCK_SIZE * 2); 109 poly1305_2block_sse2(dctx->h, src, dctx->r, blocks, sctx->u); 110 src += POLY1305_BLOCK_SIZE * 2 * blocks; 111 srclen -= POLY1305_BLOCK_SIZE * 2 * blocks; 112 } 113 if (srclen >= POLY1305_BLOCK_SIZE) { 114 poly1305_block_sse2(dctx->h, src, dctx->r, 1); 115 srclen -= POLY1305_BLOCK_SIZE; 116 } 117 return srclen; 118 } 119 120 static int poly1305_simd_update(struct shash_desc *desc, 121 const u8 *src, unsigned int srclen) 122 { 123 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc); 124 unsigned int bytes; 125 126 /* kernel_fpu_begin/end is costly, use fallback for small updates */ 127 if (srclen <= 288 || !may_use_simd()) 128 return crypto_poly1305_update(desc, src, srclen); 129 130 kernel_fpu_begin(); 131 132 if (unlikely(dctx->buflen)) { 133 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen); 134 memcpy(dctx->buf + dctx->buflen, src, bytes); 135 src += bytes; 136 srclen -= bytes; 137 dctx->buflen += bytes; 138 139 if (dctx->buflen == POLY1305_BLOCK_SIZE) { 140 poly1305_simd_blocks(dctx, dctx->buf, 141 POLY1305_BLOCK_SIZE); 142 dctx->buflen = 0; 143 } 144 } 145 146 if (likely(srclen >= POLY1305_BLOCK_SIZE)) { 147 bytes = poly1305_simd_blocks(dctx, src, srclen); 148 src += srclen - bytes; 149 srclen = bytes; 150 } 151 152 kernel_fpu_end(); 153 154 if (unlikely(srclen)) { 155 dctx->buflen = srclen; 156 memcpy(dctx->buf, src, srclen); 157 } 158 159 return 0; 160 } 161 162 static struct shash_alg alg = { 163 .digestsize = POLY1305_DIGEST_SIZE, 164 .init = poly1305_simd_init, 165 .update = poly1305_simd_update, 166 .final = crypto_poly1305_final, 167 .setkey = crypto_poly1305_setkey, 168 .descsize = sizeof(struct poly1305_simd_desc_ctx), 169 .base = { 170 .cra_name = "poly1305", 171 .cra_driver_name = "poly1305-simd", 172 .cra_priority = 300, 173 .cra_flags = CRYPTO_ALG_TYPE_SHASH, 174 .cra_alignmask = sizeof(u32) - 1, 175 .cra_blocksize = POLY1305_BLOCK_SIZE, 176 .cra_module = THIS_MODULE, 177 }, 178 }; 179 180 static int __init poly1305_simd_mod_init(void) 181 { 182 if (!cpu_has_xmm2) 183 return -ENODEV; 184 185 #ifdef CONFIG_AS_AVX2 186 poly1305_use_avx2 = cpu_has_avx && cpu_has_avx2 && 187 cpu_has_xfeatures(XSTATE_SSE | XSTATE_YMM, NULL); 188 alg.descsize = sizeof(struct poly1305_simd_desc_ctx); 189 if (poly1305_use_avx2) 190 alg.descsize += 10 * sizeof(u32); 191 #endif 192 return crypto_register_shash(&alg); 193 } 194 195 static void __exit poly1305_simd_mod_exit(void) 196 { 197 crypto_unregister_shash(&alg); 198 } 199 200 module_init(poly1305_simd_mod_init); 201 module_exit(poly1305_simd_mod_exit); 202 203 MODULE_LICENSE("GPL"); 204 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>"); 205 MODULE_DESCRIPTION("Poly1305 authenticator"); 206 MODULE_ALIAS_CRYPTO("poly1305"); 207 MODULE_ALIAS_CRYPTO("poly1305-simd"); 208