xref: /openbmc/linux/arch/x86/crypto/poly1305_glue.c (revision 8d195e7a)
1d7d7b853SJason A. Donenfeld // SPDX-License-Identifier: GPL-2.0 OR MIT
2c70f4abeSMartin Willi /*
3d7d7b853SJason A. Donenfeld  * Copyright (C) 2015-2019 Jason A. Donenfeld <Jason@zx2c4.com>. All Rights Reserved.
4c70f4abeSMartin Willi  */
5c70f4abeSMartin Willi 
6c70f4abeSMartin Willi #include <crypto/algapi.h>
7c70f4abeSMartin Willi #include <crypto/internal/hash.h>
848ea8c6eSArd Biesheuvel #include <crypto/internal/poly1305.h>
9f2abe0d7SEric Biggers #include <crypto/internal/simd.h>
10c70f4abeSMartin Willi #include <linux/crypto.h>
11f0e89bcfSArd Biesheuvel #include <linux/jump_label.h>
12c70f4abeSMartin Willi #include <linux/kernel.h>
13c70f4abeSMartin Willi #include <linux/module.h>
140c3dc787SHerbert Xu #include <linux/sizes.h>
15d7d7b853SJason A. Donenfeld #include <asm/intel-family.h>
16c70f4abeSMartin Willi #include <asm/simd.h>
17c70f4abeSMartin Willi 
18d7d7b853SJason A. Donenfeld asmlinkage void poly1305_init_x86_64(void *ctx,
19*8d195e7aSArnd Bergmann 				     const u8 key[POLY1305_BLOCK_SIZE]);
20d7d7b853SJason A. Donenfeld asmlinkage void poly1305_blocks_x86_64(void *ctx, const u8 *inp,
21d7d7b853SJason A. Donenfeld 				       const size_t len, const u32 padbit);
22d7d7b853SJason A. Donenfeld asmlinkage void poly1305_emit_x86_64(void *ctx, u8 mac[POLY1305_DIGEST_SIZE],
23d7d7b853SJason A. Donenfeld 				     const u32 nonce[4]);
24d7d7b853SJason A. Donenfeld asmlinkage void poly1305_emit_avx(void *ctx, u8 mac[POLY1305_DIGEST_SIZE],
25d7d7b853SJason A. Donenfeld 				  const u32 nonce[4]);
26d7d7b853SJason A. Donenfeld asmlinkage void poly1305_blocks_avx(void *ctx, const u8 *inp, const size_t len,
27d7d7b853SJason A. Donenfeld 				    const u32 padbit);
28d7d7b853SJason A. Donenfeld asmlinkage void poly1305_blocks_avx2(void *ctx, const u8 *inp, const size_t len,
29d7d7b853SJason A. Donenfeld 				     const u32 padbit);
30d7d7b853SJason A. Donenfeld asmlinkage void poly1305_blocks_avx512(void *ctx, const u8 *inp,
31d7d7b853SJason A. Donenfeld 				       const size_t len, const u32 padbit);
32da35b22dSMartin Willi 
33d7d7b853SJason A. Donenfeld static __ro_after_init DEFINE_STATIC_KEY_FALSE(poly1305_use_avx);
34f0e89bcfSArd Biesheuvel static __ro_after_init DEFINE_STATIC_KEY_FALSE(poly1305_use_avx2);
35d7d7b853SJason A. Donenfeld static __ro_after_init DEFINE_STATIC_KEY_FALSE(poly1305_use_avx512);
36da35b22dSMartin Willi 
37d7d7b853SJason A. Donenfeld struct poly1305_arch_internal {
38d7d7b853SJason A. Donenfeld 	union {
39d7d7b853SJason A. Donenfeld 		struct {
40d7d7b853SJason A. Donenfeld 			u32 h[5];
41d7d7b853SJason A. Donenfeld 			u32 is_base2_26;
42d7d7b853SJason A. Donenfeld 		};
43d7d7b853SJason A. Donenfeld 		u64 hs[3];
44d7d7b853SJason A. Donenfeld 	};
45d7d7b853SJason A. Donenfeld 	u64 r[2];
46d7d7b853SJason A. Donenfeld 	u64 pad;
47d7d7b853SJason A. Donenfeld 	struct { u32 r2, r1, r4, r3; } rn[9];
48d7d7b853SJason A. Donenfeld };
49d7d7b853SJason A. Donenfeld 
50d7d7b853SJason A. Donenfeld /* The AVX code uses base 2^26, while the scalar code uses base 2^64. If we hit
51d7d7b853SJason A. Donenfeld  * the unfortunate situation of using AVX and then having to go back to scalar
52d7d7b853SJason A. Donenfeld  * -- because the user is silly and has called the update function from two
53d7d7b853SJason A. Donenfeld  * separate contexts -- then we need to convert back to the original base before
54d7d7b853SJason A. Donenfeld  * proceeding. It is possible to reason that the initial reduction below is
55d7d7b853SJason A. Donenfeld  * sufficient given the implementation invariants. However, for an avoidance of
56d7d7b853SJason A. Donenfeld  * doubt and because this is not performance critical, we do the full reduction
57d7d7b853SJason A. Donenfeld  * anyway. Z3 proof of below function: https://xn--4db.cc/ltPtHCKN/py
58d7d7b853SJason A. Donenfeld  */
convert_to_base2_64(void * ctx)59d7d7b853SJason A. Donenfeld static void convert_to_base2_64(void *ctx)
601c08a104SJason A. Donenfeld {
61d7d7b853SJason A. Donenfeld 	struct poly1305_arch_internal *state = ctx;
62d7d7b853SJason A. Donenfeld 	u32 cy;
631c08a104SJason A. Donenfeld 
64d7d7b853SJason A. Donenfeld 	if (!state->is_base2_26)
651c08a104SJason A. Donenfeld 		return;
661c08a104SJason A. Donenfeld 
67d7d7b853SJason A. Donenfeld 	cy = state->h[0] >> 26; state->h[0] &= 0x3ffffff; state->h[1] += cy;
68d7d7b853SJason A. Donenfeld 	cy = state->h[1] >> 26; state->h[1] &= 0x3ffffff; state->h[2] += cy;
69d7d7b853SJason A. Donenfeld 	cy = state->h[2] >> 26; state->h[2] &= 0x3ffffff; state->h[3] += cy;
70d7d7b853SJason A. Donenfeld 	cy = state->h[3] >> 26; state->h[3] &= 0x3ffffff; state->h[4] += cy;
71d7d7b853SJason A. Donenfeld 	state->hs[0] = ((u64)state->h[2] << 52) | ((u64)state->h[1] << 26) | state->h[0];
72d7d7b853SJason A. Donenfeld 	state->hs[1] = ((u64)state->h[4] << 40) | ((u64)state->h[3] << 14) | (state->h[2] >> 12);
73d7d7b853SJason A. Donenfeld 	state->hs[2] = state->h[4] >> 24;
74d7d7b853SJason A. Donenfeld #define ULT(a, b) ((a ^ ((a ^ b) | ((a - b) ^ b))) >> (sizeof(a) * 8 - 1))
75d7d7b853SJason A. Donenfeld 	cy = (state->hs[2] >> 2) + (state->hs[2] & ~3ULL);
76d7d7b853SJason A. Donenfeld 	state->hs[2] &= 3;
77d7d7b853SJason A. Donenfeld 	state->hs[0] += cy;
78d7d7b853SJason A. Donenfeld 	state->hs[1] += (cy = ULT(state->hs[0], cy));
79d7d7b853SJason A. Donenfeld 	state->hs[2] += ULT(state->hs[1], cy);
80d7d7b853SJason A. Donenfeld #undef ULT
81d7d7b853SJason A. Donenfeld 	state->is_base2_26 = 0;
821c08a104SJason A. Donenfeld }
831c08a104SJason A. Donenfeld 
poly1305_simd_init(void * ctx,const u8 key[POLY1305_BLOCK_SIZE])84*8d195e7aSArnd Bergmann static void poly1305_simd_init(void *ctx, const u8 key[POLY1305_BLOCK_SIZE])
851c08a104SJason A. Donenfeld {
86d7d7b853SJason A. Donenfeld 	poly1305_init_x86_64(ctx, key);
871c08a104SJason A. Donenfeld }
881c08a104SJason A. Donenfeld 
poly1305_simd_blocks(void * ctx,const u8 * inp,size_t len,const u32 padbit)89d7d7b853SJason A. Donenfeld static void poly1305_simd_blocks(void *ctx, const u8 *inp, size_t len,
90d7d7b853SJason A. Donenfeld 				 const u32 padbit)
911c08a104SJason A. Donenfeld {
92d7d7b853SJason A. Donenfeld 	struct poly1305_arch_internal *state = ctx;
931c08a104SJason A. Donenfeld 
94d7d7b853SJason A. Donenfeld 	/* SIMD disables preemption, so relax after processing each page. */
95706024a5SJason A. Donenfeld 	BUILD_BUG_ON(SZ_4K < POLY1305_BLOCK_SIZE ||
96706024a5SJason A. Donenfeld 		     SZ_4K % POLY1305_BLOCK_SIZE);
97d7d7b853SJason A. Donenfeld 
9842251572SMasahiro Yamada 	if (!static_branch_likely(&poly1305_use_avx) ||
99d7d7b853SJason A. Donenfeld 	    (len < (POLY1305_BLOCK_SIZE * 18) && !state->is_base2_26) ||
100d7d7b853SJason A. Donenfeld 	    !crypto_simd_usable()) {
101d7d7b853SJason A. Donenfeld 		convert_to_base2_64(ctx);
102d7d7b853SJason A. Donenfeld 		poly1305_blocks_x86_64(ctx, inp, len, padbit);
103d7d7b853SJason A. Donenfeld 		return;
104d7d7b853SJason A. Donenfeld 	}
105d7d7b853SJason A. Donenfeld 
106706024a5SJason A. Donenfeld 	do {
107706024a5SJason A. Donenfeld 		const size_t bytes = min_t(size_t, len, SZ_4K);
108d7d7b853SJason A. Donenfeld 
109d7d7b853SJason A. Donenfeld 		kernel_fpu_begin();
110d7d7b853SJason A. Donenfeld 		if (IS_ENABLED(CONFIG_AS_AVX512) && static_branch_likely(&poly1305_use_avx512))
111d7d7b853SJason A. Donenfeld 			poly1305_blocks_avx512(ctx, inp, bytes, padbit);
112e6abef61SJason A. Donenfeld 		else if (static_branch_likely(&poly1305_use_avx2))
113d7d7b853SJason A. Donenfeld 			poly1305_blocks_avx2(ctx, inp, bytes, padbit);
114d7d7b853SJason A. Donenfeld 		else
115d7d7b853SJason A. Donenfeld 			poly1305_blocks_avx(ctx, inp, bytes, padbit);
116d7d7b853SJason A. Donenfeld 		kernel_fpu_end();
117706024a5SJason A. Donenfeld 
118d7d7b853SJason A. Donenfeld 		len -= bytes;
119d7d7b853SJason A. Donenfeld 		inp += bytes;
120706024a5SJason A. Donenfeld 	} while (len);
121d7d7b853SJason A. Donenfeld }
122d7d7b853SJason A. Donenfeld 
poly1305_simd_emit(void * ctx,u8 mac[POLY1305_DIGEST_SIZE],const u32 nonce[4])123d7d7b853SJason A. Donenfeld static void poly1305_simd_emit(void *ctx, u8 mac[POLY1305_DIGEST_SIZE],
124d7d7b853SJason A. Donenfeld 			       const u32 nonce[4])
1251c08a104SJason A. Donenfeld {
12642251572SMasahiro Yamada 	if (!static_branch_likely(&poly1305_use_avx))
127d7d7b853SJason A. Donenfeld 		poly1305_emit_x86_64(ctx, mac, nonce);
128f9e7fe32SJason A. Donenfeld 	else
129d7d7b853SJason A. Donenfeld 		poly1305_emit_avx(ctx, mac, nonce);
130d7d7b853SJason A. Donenfeld }
131d7d7b853SJason A. Donenfeld 
poly1305_init_arch(struct poly1305_desc_ctx * dctx,const u8 key[POLY1305_KEY_SIZE])132*8d195e7aSArnd Bergmann void poly1305_init_arch(struct poly1305_desc_ctx *dctx, const u8 key[POLY1305_KEY_SIZE])
133d7d7b853SJason A. Donenfeld {
134d7d7b853SJason A. Donenfeld 	poly1305_simd_init(&dctx->h, key);
135d7d7b853SJason A. Donenfeld 	dctx->s[0] = get_unaligned_le32(&key[16]);
136d7d7b853SJason A. Donenfeld 	dctx->s[1] = get_unaligned_le32(&key[20]);
137d7d7b853SJason A. Donenfeld 	dctx->s[2] = get_unaligned_le32(&key[24]);
138d7d7b853SJason A. Donenfeld 	dctx->s[3] = get_unaligned_le32(&key[28]);
139d7d7b853SJason A. Donenfeld 	dctx->buflen = 0;
140d7d7b853SJason A. Donenfeld 	dctx->sset = true;
141d7d7b853SJason A. Donenfeld }
142d7d7b853SJason A. Donenfeld EXPORT_SYMBOL(poly1305_init_arch);
143d7d7b853SJason A. Donenfeld 
crypto_poly1305_setdctxkey(struct poly1305_desc_ctx * dctx,const u8 * inp,unsigned int len)144d7d7b853SJason A. Donenfeld static unsigned int crypto_poly1305_setdctxkey(struct poly1305_desc_ctx *dctx,
145d7d7b853SJason A. Donenfeld 					       const u8 *inp, unsigned int len)
146d7d7b853SJason A. Donenfeld {
147d7d7b853SJason A. Donenfeld 	unsigned int acc = 0;
148d7d7b853SJason A. Donenfeld 	if (unlikely(!dctx->sset)) {
149d7d7b853SJason A. Donenfeld 		if (!dctx->rset && len >= POLY1305_BLOCK_SIZE) {
150d7d7b853SJason A. Donenfeld 			poly1305_simd_init(&dctx->h, inp);
151d7d7b853SJason A. Donenfeld 			inp += POLY1305_BLOCK_SIZE;
152d7d7b853SJason A. Donenfeld 			len -= POLY1305_BLOCK_SIZE;
153d7d7b853SJason A. Donenfeld 			acc += POLY1305_BLOCK_SIZE;
1541c08a104SJason A. Donenfeld 			dctx->rset = 1;
1551c08a104SJason A. Donenfeld 		}
156d7d7b853SJason A. Donenfeld 		if (len >= POLY1305_BLOCK_SIZE) {
157d7d7b853SJason A. Donenfeld 			dctx->s[0] = get_unaligned_le32(&inp[0]);
158d7d7b853SJason A. Donenfeld 			dctx->s[1] = get_unaligned_le32(&inp[4]);
159d7d7b853SJason A. Donenfeld 			dctx->s[2] = get_unaligned_le32(&inp[8]);
160d7d7b853SJason A. Donenfeld 			dctx->s[3] = get_unaligned_le32(&inp[12]);
161c3a98c3aSEric Biggers 			acc += POLY1305_BLOCK_SIZE;
1621c08a104SJason A. Donenfeld 			dctx->sset = true;
1631c08a104SJason A. Donenfeld 		}
1641c08a104SJason A. Donenfeld 	}
165d7d7b853SJason A. Donenfeld 	return acc;
166c70f4abeSMartin Willi }
167c70f4abeSMartin Willi 
poly1305_update_arch(struct poly1305_desc_ctx * dctx,const u8 * src,unsigned int srclen)168f0e89bcfSArd Biesheuvel void poly1305_update_arch(struct poly1305_desc_ctx *dctx, const u8 *src,
169f0e89bcfSArd Biesheuvel 			  unsigned int srclen)
170f0e89bcfSArd Biesheuvel {
171d7d7b853SJason A. Donenfeld 	unsigned int bytes, used;
172c70f4abeSMartin Willi 
173c70f4abeSMartin Willi 	if (unlikely(dctx->buflen)) {
174c70f4abeSMartin Willi 		bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
175c70f4abeSMartin Willi 		memcpy(dctx->buf + dctx->buflen, src, bytes);
176c70f4abeSMartin Willi 		src += bytes;
177c70f4abeSMartin Willi 		srclen -= bytes;
178c70f4abeSMartin Willi 		dctx->buflen += bytes;
179c70f4abeSMartin Willi 
180c70f4abeSMartin Willi 		if (dctx->buflen == POLY1305_BLOCK_SIZE) {
181d7d7b853SJason A. Donenfeld 			if (likely(!crypto_poly1305_setdctxkey(dctx, dctx->buf, POLY1305_BLOCK_SIZE)))
182d7d7b853SJason A. Donenfeld 				poly1305_simd_blocks(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 1);
183c70f4abeSMartin Willi 			dctx->buflen = 0;
184c70f4abeSMartin Willi 		}
185c70f4abeSMartin Willi 	}
186c70f4abeSMartin Willi 
187c70f4abeSMartin Willi 	if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
188d7d7b853SJason A. Donenfeld 		bytes = round_down(srclen, POLY1305_BLOCK_SIZE);
189d7d7b853SJason A. Donenfeld 		srclen -= bytes;
190d7d7b853SJason A. Donenfeld 		used = crypto_poly1305_setdctxkey(dctx, src, bytes);
191d7d7b853SJason A. Donenfeld 		if (likely(bytes - used))
192d7d7b853SJason A. Donenfeld 			poly1305_simd_blocks(&dctx->h, src + used, bytes - used, 1);
193d7d7b853SJason A. Donenfeld 		src += bytes;
194c70f4abeSMartin Willi 	}
195c70f4abeSMartin Willi 
196c70f4abeSMartin Willi 	if (unlikely(srclen)) {
197c70f4abeSMartin Willi 		dctx->buflen = srclen;
198c70f4abeSMartin Willi 		memcpy(dctx->buf, src, srclen);
199c70f4abeSMartin Willi 	}
2001b2c6a51SArd Biesheuvel }
201f0e89bcfSArd Biesheuvel EXPORT_SYMBOL(poly1305_update_arch);
202f0e89bcfSArd Biesheuvel 
poly1305_final_arch(struct poly1305_desc_ctx * dctx,u8 * dst)203d7d7b853SJason A. Donenfeld void poly1305_final_arch(struct poly1305_desc_ctx *dctx, u8 *dst)
204f0e89bcfSArd Biesheuvel {
205d7d7b853SJason A. Donenfeld 	if (unlikely(dctx->buflen)) {
206d7d7b853SJason A. Donenfeld 		dctx->buf[dctx->buflen++] = 1;
207d7d7b853SJason A. Donenfeld 		memset(dctx->buf + dctx->buflen, 0,
208d7d7b853SJason A. Donenfeld 		       POLY1305_BLOCK_SIZE - dctx->buflen);
209d7d7b853SJason A. Donenfeld 		poly1305_simd_blocks(&dctx->h, dctx->buf, POLY1305_BLOCK_SIZE, 0);
2101c08a104SJason A. Donenfeld 	}
2111c08a104SJason A. Donenfeld 
212d7d7b853SJason A. Donenfeld 	poly1305_simd_emit(&dctx->h, dst, dctx->s);
213458c0480SArvind Sankar 	memzero_explicit(dctx, sizeof(*dctx));
214f0e89bcfSArd Biesheuvel }
215f0e89bcfSArd Biesheuvel EXPORT_SYMBOL(poly1305_final_arch);
216c70f4abeSMartin Willi 
crypto_poly1305_init(struct shash_desc * desc)2171b2c6a51SArd Biesheuvel static int crypto_poly1305_init(struct shash_desc *desc)
2181b2c6a51SArd Biesheuvel {
2191b2c6a51SArd Biesheuvel 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
2201b2c6a51SArd Biesheuvel 
221d7d7b853SJason A. Donenfeld 	*dctx = (struct poly1305_desc_ctx){};
222d7d7b853SJason A. Donenfeld 	return 0;
223d7d7b853SJason A. Donenfeld }
2241b2c6a51SArd Biesheuvel 
crypto_poly1305_update(struct shash_desc * desc,const u8 * src,unsigned int srclen)225d7d7b853SJason A. Donenfeld static int crypto_poly1305_update(struct shash_desc *desc,
226d7d7b853SJason A. Donenfeld 				  const u8 *src, unsigned int srclen)
227d7d7b853SJason A. Donenfeld {
228d7d7b853SJason A. Donenfeld 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
229d7d7b853SJason A. Donenfeld 
230d7d7b853SJason A. Donenfeld 	poly1305_update_arch(dctx, src, srclen);
2311b2c6a51SArd Biesheuvel 	return 0;
2321b2c6a51SArd Biesheuvel }
2331b2c6a51SArd Biesheuvel 
crypto_poly1305_final(struct shash_desc * desc,u8 * dst)2341b2c6a51SArd Biesheuvel static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
2351b2c6a51SArd Biesheuvel {
2361b2c6a51SArd Biesheuvel 	struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
2371b2c6a51SArd Biesheuvel 
2381b2c6a51SArd Biesheuvel 	if (unlikely(!dctx->sset))
2391b2c6a51SArd Biesheuvel 		return -ENOKEY;
2401b2c6a51SArd Biesheuvel 
2411c08a104SJason A. Donenfeld 	poly1305_final_arch(dctx, dst);
242c70f4abeSMartin Willi 	return 0;
243c70f4abeSMartin Willi }
244c70f4abeSMartin Willi 
245c70f4abeSMartin Willi static struct shash_alg alg = {
246c70f4abeSMartin Willi 	.digestsize	= POLY1305_DIGEST_SIZE,
247ad8f5b88SArd Biesheuvel 	.init		= crypto_poly1305_init,
248d7d7b853SJason A. Donenfeld 	.update		= crypto_poly1305_update,
249c70f4abeSMartin Willi 	.final		= crypto_poly1305_final,
250ad8f5b88SArd Biesheuvel 	.descsize	= sizeof(struct poly1305_desc_ctx),
251c70f4abeSMartin Willi 	.base		= {
252c70f4abeSMartin Willi 		.cra_name		= "poly1305",
253c70f4abeSMartin Willi 		.cra_driver_name	= "poly1305-simd",
254c70f4abeSMartin Willi 		.cra_priority		= 300,
255c70f4abeSMartin Willi 		.cra_blocksize		= POLY1305_BLOCK_SIZE,
256c70f4abeSMartin Willi 		.cra_module		= THIS_MODULE,
257c70f4abeSMartin Willi 	},
258c70f4abeSMartin Willi };
259c70f4abeSMartin Willi 
poly1305_simd_mod_init(void)260c70f4abeSMartin Willi static int __init poly1305_simd_mod_init(void)
261c70f4abeSMartin Willi {
26242251572SMasahiro Yamada 	if (boot_cpu_has(X86_FEATURE_AVX) &&
263d7d7b853SJason A. Donenfeld 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
264d7d7b853SJason A. Donenfeld 		static_branch_enable(&poly1305_use_avx);
265e6abef61SJason A. Donenfeld 	if (boot_cpu_has(X86_FEATURE_AVX) && boot_cpu_has(X86_FEATURE_AVX2) &&
266f0e89bcfSArd Biesheuvel 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL))
267f0e89bcfSArd Biesheuvel 		static_branch_enable(&poly1305_use_avx2);
268d7d7b853SJason A. Donenfeld 	if (IS_ENABLED(CONFIG_AS_AVX512) && boot_cpu_has(X86_FEATURE_AVX) &&
269d7d7b853SJason A. Donenfeld 	    boot_cpu_has(X86_FEATURE_AVX2) && boot_cpu_has(X86_FEATURE_AVX512F) &&
270d7d7b853SJason A. Donenfeld 	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM | XFEATURE_MASK_AVX512, NULL) &&
271d7d7b853SJason A. Donenfeld 	    /* Skylake downclocks unacceptably much when using zmm, but later generations are fast. */
272d7d7b853SJason A. Donenfeld 	    boot_cpu_data.x86_model != INTEL_FAM6_SKYLAKE_X)
273d7d7b853SJason A. Donenfeld 		static_branch_enable(&poly1305_use_avx512);
2748394bfecSJason A. Donenfeld 	return IS_REACHABLE(CONFIG_CRYPTO_HASH) ? crypto_register_shash(&alg) : 0;
275c70f4abeSMartin Willi }
276c70f4abeSMartin Willi 
poly1305_simd_mod_exit(void)277c70f4abeSMartin Willi static void __exit poly1305_simd_mod_exit(void)
278c70f4abeSMartin Willi {
2798394bfecSJason A. Donenfeld 	if (IS_REACHABLE(CONFIG_CRYPTO_HASH))
280c70f4abeSMartin Willi 		crypto_unregister_shash(&alg);
281c70f4abeSMartin Willi }
282c70f4abeSMartin Willi 
283c70f4abeSMartin Willi module_init(poly1305_simd_mod_init);
284c70f4abeSMartin Willi module_exit(poly1305_simd_mod_exit);
285c70f4abeSMartin Willi 
286c70f4abeSMartin Willi MODULE_LICENSE("GPL");
287d7d7b853SJason A. Donenfeld MODULE_AUTHOR("Jason A. Donenfeld <Jason@zx2c4.com>");
288c70f4abeSMartin Willi MODULE_DESCRIPTION("Poly1305 authenticator");
289c70f4abeSMartin Willi MODULE_ALIAS_CRYPTO("poly1305");
290c70f4abeSMartin Willi MODULE_ALIAS_CRYPTO("poly1305-simd");
291