xref: /openbmc/linux/arch/x86/crypto/aesni-intel_glue.c (revision c900529f3d9161bfde5cca0754f83b4d3c3e0220)
12874c5fdSThomas Gleixner // SPDX-License-Identifier: GPL-2.0-or-later
254b6a1bdSHuang Ying /*
354b6a1bdSHuang Ying  * Support for Intel AES-NI instructions. This file contains glue
454b6a1bdSHuang Ying  * code, the real AES implementation is in intel-aes_asm.S.
554b6a1bdSHuang Ying  *
654b6a1bdSHuang Ying  * Copyright (C) 2008, Intel Corp.
754b6a1bdSHuang Ying  *    Author: Huang Ying <ying.huang@intel.com>
854b6a1bdSHuang Ying  *
90bd82f5fSTadeusz Struk  * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
100bd82f5fSTadeusz Struk  * interface for 64-bit kernels.
110bd82f5fSTadeusz Struk  *    Authors: Adrian Hoban <adrian.hoban@intel.com>
120bd82f5fSTadeusz Struk  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
130bd82f5fSTadeusz Struk  *             Tadeusz Struk (tadeusz.struk@intel.com)
140bd82f5fSTadeusz Struk  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
150bd82f5fSTadeusz Struk  *    Copyright (c) 2010, Intel Corporation.
1654b6a1bdSHuang Ying  */
1754b6a1bdSHuang Ying 
1854b6a1bdSHuang Ying #include <linux/hardirq.h>
1954b6a1bdSHuang Ying #include <linux/types.h>
207c52d551SPaul Gortmaker #include <linux/module.h>
2154b6a1bdSHuang Ying #include <linux/err.h>
2254b6a1bdSHuang Ying #include <crypto/algapi.h>
2354b6a1bdSHuang Ying #include <crypto/aes.h>
2412387a46SHuang Ying #include <crypto/ctr.h>
25023af608SJussi Kivilinna #include <crypto/b128ops.h>
2646d93748SCorentin LABBE #include <crypto/gcm.h>
27023af608SJussi Kivilinna #include <crypto/xts.h>
283bd391f0SAndi Kleen #include <asm/cpu_device_id.h>
29f2abe0d7SEric Biggers #include <asm/simd.h>
300bd82f5fSTadeusz Struk #include <crypto/scatterwalk.h>
310bd82f5fSTadeusz Struk #include <crypto/internal/aead.h>
3285671860SHerbert Xu #include <crypto/internal/simd.h>
3385671860SHerbert Xu #include <crypto/internal/skcipher.h>
34d6cbf4eaSArd Biesheuvel #include <linux/jump_label.h>
350bd82f5fSTadeusz Struk #include <linux/workqueue.h>
360bd82f5fSTadeusz Struk #include <linux/spinlock.h>
3764a49b85SArd Biesheuvel #include <linux/static_call.h>
3854b6a1bdSHuang Ying 
39e31ac32dSTimothy McCaffrey 
40b7c89d9eSHerbert Xu #define AESNI_ALIGN	16
4185671860SHerbert Xu #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
42b7c89d9eSHerbert Xu #define AES_BLOCK_MASK	(~(AES_BLOCK_SIZE - 1))
43b7c89d9eSHerbert Xu #define RFC4106_HASH_SUBKEY_SIZE 16
4485671860SHerbert Xu #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
4585671860SHerbert Xu #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
4685671860SHerbert Xu #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
47b7c89d9eSHerbert Xu 
480bd82f5fSTadeusz Struk /* This data is stored at the end of the crypto_tfm struct.
490bd82f5fSTadeusz Struk  * It's a type of per "session" data storage location.
500bd82f5fSTadeusz Struk  * This needs to be 16 byte aligned.
510bd82f5fSTadeusz Struk  */
520bd82f5fSTadeusz Struk struct aesni_rfc4106_gcm_ctx {
5385671860SHerbert Xu 	u8 hash_subkey[16] AESNI_ALIGN_ATTR;
5485671860SHerbert Xu 	struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
550bd82f5fSTadeusz Struk 	u8 nonce[4];
560bd82f5fSTadeusz Struk };
570bd82f5fSTadeusz Struk 
58cce2ea8dSSabrina Dubroca struct generic_gcmaes_ctx {
59cce2ea8dSSabrina Dubroca 	u8 hash_subkey[16] AESNI_ALIGN_ATTR;
60cce2ea8dSSabrina Dubroca 	struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR;
61cce2ea8dSSabrina Dubroca };
62cce2ea8dSSabrina Dubroca 
63023af608SJussi Kivilinna struct aesni_xts_ctx {
6485671860SHerbert Xu 	u8 raw_tweak_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
6585671860SHerbert Xu 	u8 raw_crypt_ctx[sizeof(struct crypto_aes_ctx)] AESNI_ALIGN_ATTR;
66023af608SJussi Kivilinna };
67023af608SJussi Kivilinna 
689ee4a5dfSDave Watson #define GCM_BLOCK_LEN 16
699ee4a5dfSDave Watson 
709ee4a5dfSDave Watson struct gcm_context_data {
719ee4a5dfSDave Watson 	/* init, update and finalize context data */
729ee4a5dfSDave Watson 	u8 aad_hash[GCM_BLOCK_LEN];
739ee4a5dfSDave Watson 	u64 aad_length;
749ee4a5dfSDave Watson 	u64 in_length;
759ee4a5dfSDave Watson 	u8 partial_block_enc_key[GCM_BLOCK_LEN];
769ee4a5dfSDave Watson 	u8 orig_IV[GCM_BLOCK_LEN];
779ee4a5dfSDave Watson 	u8 current_counter[GCM_BLOCK_LEN];
789ee4a5dfSDave Watson 	u64 partial_block_len;
799ee4a5dfSDave Watson 	u64 unused;
80de85fc46SDave Watson 	u8 hash_keys[GCM_BLOCK_LEN * 16];
819ee4a5dfSDave Watson };
829ee4a5dfSDave Watson 
8354b6a1bdSHuang Ying asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
8454b6a1bdSHuang Ying 			     unsigned int key_len);
859c1e8836SKees Cook asmlinkage void aesni_enc(const void *ctx, u8 *out, const u8 *in);
869c1e8836SKees Cook asmlinkage void aesni_dec(const void *ctx, u8 *out, const u8 *in);
8754b6a1bdSHuang Ying asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
8854b6a1bdSHuang Ying 			      const u8 *in, unsigned int len);
8954b6a1bdSHuang Ying asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
9054b6a1bdSHuang Ying 			      const u8 *in, unsigned int len);
9154b6a1bdSHuang Ying asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
9254b6a1bdSHuang Ying 			      const u8 *in, unsigned int len, u8 *iv);
9354b6a1bdSHuang Ying asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
9454b6a1bdSHuang Ying 			      const u8 *in, unsigned int len, u8 *iv);
95ddf169a9SArd Biesheuvel asmlinkage void aesni_cts_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
96ddf169a9SArd Biesheuvel 				  const u8 *in, unsigned int len, u8 *iv);
97ddf169a9SArd Biesheuvel asmlinkage void aesni_cts_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
98ddf169a9SArd Biesheuvel 				  const u8 *in, unsigned int len, u8 *iv);
999bed4acaSRandy Dunlap 
100d764593aSTim Chen #define AVX_GEN2_OPTSIZE 640
101d764593aSTim Chen #define AVX_GEN4_OPTSIZE 4096
102d764593aSTim Chen 
10386ad60a6SArd Biesheuvel asmlinkage void aesni_xts_encrypt(const struct crypto_aes_ctx *ctx, u8 *out,
10486ad60a6SArd Biesheuvel 				  const u8 *in, unsigned int len, u8 *iv);
10586ad60a6SArd Biesheuvel 
10686ad60a6SArd Biesheuvel asmlinkage void aesni_xts_decrypt(const struct crypto_aes_ctx *ctx, u8 *out,
10786ad60a6SArd Biesheuvel 				  const u8 *in, unsigned int len, u8 *iv);
10886ad60a6SArd Biesheuvel 
1090d258efbSMathias Krause #ifdef CONFIG_X86_64
11022cddcc7Schandramouli narayanan 
11112387a46SHuang Ying asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
11212387a46SHuang Ying 			      const u8 *in, unsigned int len, u8 *iv);
11364a49b85SArd Biesheuvel DEFINE_STATIC_CALL(aesni_ctr_enc_tfm, aesni_ctr_enc);
11454b6a1bdSHuang Ying 
115fb8986e6SDave Watson /* Scatter / Gather routines, with args similar to above */
116fb8986e6SDave Watson asmlinkage void aesni_gcm_init(void *ctx,
117fb8986e6SDave Watson 			       struct gcm_context_data *gdata,
118fb8986e6SDave Watson 			       u8 *iv,
119fb8986e6SDave Watson 			       u8 *hash_subkey, const u8 *aad,
120fb8986e6SDave Watson 			       unsigned long aad_len);
121fb8986e6SDave Watson asmlinkage void aesni_gcm_enc_update(void *ctx,
122fb8986e6SDave Watson 				     struct gcm_context_data *gdata, u8 *out,
123fb8986e6SDave Watson 				     const u8 *in, unsigned long plaintext_len);
124fb8986e6SDave Watson asmlinkage void aesni_gcm_dec_update(void *ctx,
125fb8986e6SDave Watson 				     struct gcm_context_data *gdata, u8 *out,
126fb8986e6SDave Watson 				     const u8 *in,
127fb8986e6SDave Watson 				     unsigned long ciphertext_len);
128fb8986e6SDave Watson asmlinkage void aesni_gcm_finalize(void *ctx,
129fb8986e6SDave Watson 				   struct gcm_context_data *gdata,
130fb8986e6SDave Watson 				   u8 *auth_tag, unsigned long auth_tag_len);
131d764593aSTim Chen 
13222cddcc7Schandramouli narayanan asmlinkage void aes_ctr_enc_128_avx_by8(const u8 *in, u8 *iv,
13322cddcc7Schandramouli narayanan 		void *keys, u8 *out, unsigned int num_bytes);
13422cddcc7Schandramouli narayanan asmlinkage void aes_ctr_enc_192_avx_by8(const u8 *in, u8 *iv,
13522cddcc7Schandramouli narayanan 		void *keys, u8 *out, unsigned int num_bytes);
13622cddcc7Schandramouli narayanan asmlinkage void aes_ctr_enc_256_avx_by8(const u8 *in, u8 *iv,
13722cddcc7Schandramouli narayanan 		void *keys, u8 *out, unsigned int num_bytes);
138fd94fcf0SNathan Huckleberry 
139fd94fcf0SNathan Huckleberry 
140fd94fcf0SNathan Huckleberry asmlinkage void aes_xctr_enc_128_avx_by8(const u8 *in, const u8 *iv,
141fd94fcf0SNathan Huckleberry 	const void *keys, u8 *out, unsigned int num_bytes,
142fd94fcf0SNathan Huckleberry 	unsigned int byte_ctr);
143fd94fcf0SNathan Huckleberry 
144fd94fcf0SNathan Huckleberry asmlinkage void aes_xctr_enc_192_avx_by8(const u8 *in, const u8 *iv,
145fd94fcf0SNathan Huckleberry 	const void *keys, u8 *out, unsigned int num_bytes,
146fd94fcf0SNathan Huckleberry 	unsigned int byte_ctr);
147fd94fcf0SNathan Huckleberry 
148fd94fcf0SNathan Huckleberry asmlinkage void aes_xctr_enc_256_avx_by8(const u8 *in, const u8 *iv,
149fd94fcf0SNathan Huckleberry 	const void *keys, u8 *out, unsigned int num_bytes,
150fd94fcf0SNathan Huckleberry 	unsigned int byte_ctr);
151fd94fcf0SNathan Huckleberry 
152d764593aSTim Chen /*
153603f8c3bSDave Watson  * asmlinkage void aesni_gcm_init_avx_gen2()
154d764593aSTim Chen  * gcm_data *my_ctx_data, context data
155d764593aSTim Chen  * u8 *hash_subkey,  the Hash sub key input. Data starts on a 16-byte boundary.
156d764593aSTim Chen  */
157603f8c3bSDave Watson asmlinkage void aesni_gcm_init_avx_gen2(void *my_ctx_data,
158de85fc46SDave Watson 					struct gcm_context_data *gdata,
15938003cd2SDave Watson 					u8 *iv,
160603f8c3bSDave Watson 					u8 *hash_subkey,
16138003cd2SDave Watson 					const u8 *aad,
16238003cd2SDave Watson 					unsigned long aad_len);
163d764593aSTim Chen 
164603f8c3bSDave Watson asmlinkage void aesni_gcm_enc_update_avx_gen2(void *ctx,
165603f8c3bSDave Watson 				     struct gcm_context_data *gdata, u8 *out,
166603f8c3bSDave Watson 				     const u8 *in, unsigned long plaintext_len);
167603f8c3bSDave Watson asmlinkage void aesni_gcm_dec_update_avx_gen2(void *ctx,
168603f8c3bSDave Watson 				     struct gcm_context_data *gdata, u8 *out,
169603f8c3bSDave Watson 				     const u8 *in,
170603f8c3bSDave Watson 				     unsigned long ciphertext_len);
171603f8c3bSDave Watson asmlinkage void aesni_gcm_finalize_avx_gen2(void *ctx,
172603f8c3bSDave Watson 				   struct gcm_context_data *gdata,
173603f8c3bSDave Watson 				   u8 *auth_tag, unsigned long auth_tag_len);
174603f8c3bSDave Watson 
175d764593aSTim Chen /*
176603f8c3bSDave Watson  * asmlinkage void aesni_gcm_init_avx_gen4()
177d764593aSTim Chen  * gcm_data *my_ctx_data, context data
178d764593aSTim Chen  * u8 *hash_subkey,  the Hash sub key input. Data starts on a 16-byte boundary.
179d764593aSTim Chen  */
180603f8c3bSDave Watson asmlinkage void aesni_gcm_init_avx_gen4(void *my_ctx_data,
181de85fc46SDave Watson 					struct gcm_context_data *gdata,
18238003cd2SDave Watson 					u8 *iv,
183603f8c3bSDave Watson 					u8 *hash_subkey,
18438003cd2SDave Watson 					const u8 *aad,
18538003cd2SDave Watson 					unsigned long aad_len);
186d764593aSTim Chen 
187603f8c3bSDave Watson asmlinkage void aesni_gcm_enc_update_avx_gen4(void *ctx,
188603f8c3bSDave Watson 				     struct gcm_context_data *gdata, u8 *out,
189603f8c3bSDave Watson 				     const u8 *in, unsigned long plaintext_len);
190603f8c3bSDave Watson asmlinkage void aesni_gcm_dec_update_avx_gen4(void *ctx,
191603f8c3bSDave Watson 				     struct gcm_context_data *gdata, u8 *out,
192603f8c3bSDave Watson 				     const u8 *in,
193603f8c3bSDave Watson 				     unsigned long ciphertext_len);
194603f8c3bSDave Watson asmlinkage void aesni_gcm_finalize_avx_gen4(void *ctx,
195603f8c3bSDave Watson 				   struct gcm_context_data *gdata,
196603f8c3bSDave Watson 				   u8 *auth_tag, unsigned long auth_tag_len);
197603f8c3bSDave Watson 
198d6cbf4eaSArd Biesheuvel static __ro_after_init DEFINE_STATIC_KEY_FALSE(gcm_use_avx);
199d6cbf4eaSArd Biesheuvel static __ro_after_init DEFINE_STATIC_KEY_FALSE(gcm_use_avx2);
200d764593aSTim Chen 
2010bd82f5fSTadeusz Struk static inline struct
aesni_rfc4106_gcm_ctx_get(struct crypto_aead * tfm)2020bd82f5fSTadeusz Struk aesni_rfc4106_gcm_ctx *aesni_rfc4106_gcm_ctx_get(struct crypto_aead *tfm)
2030bd82f5fSTadeusz Struk {
204b7c89d9eSHerbert Xu 	unsigned long align = AESNI_ALIGN;
205b7c89d9eSHerbert Xu 
206b7c89d9eSHerbert Xu 	if (align <= crypto_tfm_ctx_alignment())
207b7c89d9eSHerbert Xu 		align = 1;
208b7c89d9eSHerbert Xu 	return PTR_ALIGN(crypto_aead_ctx(tfm), align);
2090bd82f5fSTadeusz Struk }
210cce2ea8dSSabrina Dubroca 
211cce2ea8dSSabrina Dubroca static inline struct
generic_gcmaes_ctx_get(struct crypto_aead * tfm)212cce2ea8dSSabrina Dubroca generic_gcmaes_ctx *generic_gcmaes_ctx_get(struct crypto_aead *tfm)
213cce2ea8dSSabrina Dubroca {
214cce2ea8dSSabrina Dubroca 	unsigned long align = AESNI_ALIGN;
215cce2ea8dSSabrina Dubroca 
216cce2ea8dSSabrina Dubroca 	if (align <= crypto_tfm_ctx_alignment())
217cce2ea8dSSabrina Dubroca 		align = 1;
218cce2ea8dSSabrina Dubroca 	return PTR_ALIGN(crypto_aead_ctx(tfm), align);
219cce2ea8dSSabrina Dubroca }
220559ad0ffSMathias Krause #endif
2210bd82f5fSTadeusz Struk 
aes_ctx(void * raw_ctx)22254b6a1bdSHuang Ying static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
22354b6a1bdSHuang Ying {
22454b6a1bdSHuang Ying 	unsigned long addr = (unsigned long)raw_ctx;
22554b6a1bdSHuang Ying 	unsigned long align = AESNI_ALIGN;
22654b6a1bdSHuang Ying 
22754b6a1bdSHuang Ying 	if (align <= crypto_tfm_ctx_alignment())
22854b6a1bdSHuang Ying 		align = 1;
22954b6a1bdSHuang Ying 	return (struct crypto_aes_ctx *)ALIGN(addr, align);
23054b6a1bdSHuang Ying }
23154b6a1bdSHuang Ying 
aes_set_key_common(struct crypto_aes_ctx * ctx,const u8 * in_key,unsigned int key_len)232*28b77609SEric Biggers static int aes_set_key_common(struct crypto_aes_ctx *ctx,
23354b6a1bdSHuang Ying 			      const u8 *in_key, unsigned int key_len)
23454b6a1bdSHuang Ying {
23554b6a1bdSHuang Ying 	int err;
23654b6a1bdSHuang Ying 
23754b6a1bdSHuang Ying 	if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
238674f368aSEric Biggers 	    key_len != AES_KEYSIZE_256)
23954b6a1bdSHuang Ying 		return -EINVAL;
24054b6a1bdSHuang Ying 
241f2abe0d7SEric Biggers 	if (!crypto_simd_usable())
2422c53fd11SArd Biesheuvel 		err = aes_expandkey(ctx, in_key, key_len);
24354b6a1bdSHuang Ying 	else {
24454b6a1bdSHuang Ying 		kernel_fpu_begin();
24554b6a1bdSHuang Ying 		err = aesni_set_key(ctx, in_key, key_len);
24654b6a1bdSHuang Ying 		kernel_fpu_end();
24754b6a1bdSHuang Ying 	}
24854b6a1bdSHuang Ying 
24954b6a1bdSHuang Ying 	return err;
25054b6a1bdSHuang Ying }
25154b6a1bdSHuang Ying 
aes_set_key(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)25254b6a1bdSHuang Ying static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
25354b6a1bdSHuang Ying 		       unsigned int key_len)
25454b6a1bdSHuang Ying {
255*28b77609SEric Biggers 	return aes_set_key_common(aes_ctx(crypto_tfm_ctx(tfm)), in_key,
256*28b77609SEric Biggers 				  key_len);
25754b6a1bdSHuang Ying }
25854b6a1bdSHuang Ying 
aesni_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)259724ecd3cSArd Biesheuvel static void aesni_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
26054b6a1bdSHuang Ying {
26154b6a1bdSHuang Ying 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
26254b6a1bdSHuang Ying 
2632c53fd11SArd Biesheuvel 	if (!crypto_simd_usable()) {
2642c53fd11SArd Biesheuvel 		aes_encrypt(ctx, dst, src);
2652c53fd11SArd Biesheuvel 	} else {
26654b6a1bdSHuang Ying 		kernel_fpu_begin();
26754b6a1bdSHuang Ying 		aesni_enc(ctx, dst, src);
26854b6a1bdSHuang Ying 		kernel_fpu_end();
26954b6a1bdSHuang Ying 	}
27054b6a1bdSHuang Ying }
27154b6a1bdSHuang Ying 
aesni_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)272724ecd3cSArd Biesheuvel static void aesni_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
27354b6a1bdSHuang Ying {
27454b6a1bdSHuang Ying 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
27554b6a1bdSHuang Ying 
2762c53fd11SArd Biesheuvel 	if (!crypto_simd_usable()) {
2772c53fd11SArd Biesheuvel 		aes_decrypt(ctx, dst, src);
2782c53fd11SArd Biesheuvel 	} else {
27954b6a1bdSHuang Ying 		kernel_fpu_begin();
28054b6a1bdSHuang Ying 		aesni_dec(ctx, dst, src);
28154b6a1bdSHuang Ying 		kernel_fpu_end();
28254b6a1bdSHuang Ying 	}
28354b6a1bdSHuang Ying }
28454b6a1bdSHuang Ying 
aesni_skcipher_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int len)28585671860SHerbert Xu static int aesni_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
28685671860SHerbert Xu 			         unsigned int len)
28754b6a1bdSHuang Ying {
288*28b77609SEric Biggers 	return aes_set_key_common(aes_ctx(crypto_skcipher_ctx(tfm)), key, len);
28985671860SHerbert Xu }
29085671860SHerbert Xu 
ecb_encrypt(struct skcipher_request * req)29185671860SHerbert Xu static int ecb_encrypt(struct skcipher_request *req)
29285671860SHerbert Xu {
29385671860SHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
29485671860SHerbert Xu 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
29585671860SHerbert Xu 	struct skcipher_walk walk;
29685671860SHerbert Xu 	unsigned int nbytes;
29754b6a1bdSHuang Ying 	int err;
29854b6a1bdSHuang Ying 
29965d1e3c4SArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
30054b6a1bdSHuang Ying 
30154b6a1bdSHuang Ying 	while ((nbytes = walk.nbytes)) {
30265d1e3c4SArd Biesheuvel 		kernel_fpu_begin();
30354b6a1bdSHuang Ying 		aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
30454b6a1bdSHuang Ying 			      nbytes & AES_BLOCK_MASK);
30565d1e3c4SArd Biesheuvel 		kernel_fpu_end();
30654b6a1bdSHuang Ying 		nbytes &= AES_BLOCK_SIZE - 1;
30785671860SHerbert Xu 		err = skcipher_walk_done(&walk, nbytes);
30854b6a1bdSHuang Ying 	}
30954b6a1bdSHuang Ying 
31054b6a1bdSHuang Ying 	return err;
31154b6a1bdSHuang Ying }
31254b6a1bdSHuang Ying 
ecb_decrypt(struct skcipher_request * req)31385671860SHerbert Xu static int ecb_decrypt(struct skcipher_request *req)
31454b6a1bdSHuang Ying {
31585671860SHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
31685671860SHerbert Xu 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
31785671860SHerbert Xu 	struct skcipher_walk walk;
31885671860SHerbert Xu 	unsigned int nbytes;
31954b6a1bdSHuang Ying 	int err;
32054b6a1bdSHuang Ying 
32165d1e3c4SArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
32254b6a1bdSHuang Ying 
32354b6a1bdSHuang Ying 	while ((nbytes = walk.nbytes)) {
32465d1e3c4SArd Biesheuvel 		kernel_fpu_begin();
32554b6a1bdSHuang Ying 		aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
32654b6a1bdSHuang Ying 			      nbytes & AES_BLOCK_MASK);
32765d1e3c4SArd Biesheuvel 		kernel_fpu_end();
32854b6a1bdSHuang Ying 		nbytes &= AES_BLOCK_SIZE - 1;
32985671860SHerbert Xu 		err = skcipher_walk_done(&walk, nbytes);
33054b6a1bdSHuang Ying 	}
33154b6a1bdSHuang Ying 
33254b6a1bdSHuang Ying 	return err;
33354b6a1bdSHuang Ying }
33454b6a1bdSHuang Ying 
cbc_encrypt(struct skcipher_request * req)33585671860SHerbert Xu static int cbc_encrypt(struct skcipher_request *req)
33654b6a1bdSHuang Ying {
33785671860SHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
33885671860SHerbert Xu 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
33985671860SHerbert Xu 	struct skcipher_walk walk;
34085671860SHerbert Xu 	unsigned int nbytes;
34154b6a1bdSHuang Ying 	int err;
34254b6a1bdSHuang Ying 
34365d1e3c4SArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
34454b6a1bdSHuang Ying 
34554b6a1bdSHuang Ying 	while ((nbytes = walk.nbytes)) {
34665d1e3c4SArd Biesheuvel 		kernel_fpu_begin();
34754b6a1bdSHuang Ying 		aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
34854b6a1bdSHuang Ying 			      nbytes & AES_BLOCK_MASK, walk.iv);
34965d1e3c4SArd Biesheuvel 		kernel_fpu_end();
35054b6a1bdSHuang Ying 		nbytes &= AES_BLOCK_SIZE - 1;
35185671860SHerbert Xu 		err = skcipher_walk_done(&walk, nbytes);
35254b6a1bdSHuang Ying 	}
35354b6a1bdSHuang Ying 
35454b6a1bdSHuang Ying 	return err;
35554b6a1bdSHuang Ying }
35654b6a1bdSHuang Ying 
cbc_decrypt(struct skcipher_request * req)35785671860SHerbert Xu static int cbc_decrypt(struct skcipher_request *req)
35854b6a1bdSHuang Ying {
35985671860SHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
36085671860SHerbert Xu 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
36185671860SHerbert Xu 	struct skcipher_walk walk;
36285671860SHerbert Xu 	unsigned int nbytes;
36354b6a1bdSHuang Ying 	int err;
36454b6a1bdSHuang Ying 
36565d1e3c4SArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
36654b6a1bdSHuang Ying 
36754b6a1bdSHuang Ying 	while ((nbytes = walk.nbytes)) {
36865d1e3c4SArd Biesheuvel 		kernel_fpu_begin();
36954b6a1bdSHuang Ying 		aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
37054b6a1bdSHuang Ying 			      nbytes & AES_BLOCK_MASK, walk.iv);
37165d1e3c4SArd Biesheuvel 		kernel_fpu_end();
37254b6a1bdSHuang Ying 		nbytes &= AES_BLOCK_SIZE - 1;
37385671860SHerbert Xu 		err = skcipher_walk_done(&walk, nbytes);
37454b6a1bdSHuang Ying 	}
37554b6a1bdSHuang Ying 
37654b6a1bdSHuang Ying 	return err;
37754b6a1bdSHuang Ying }
37854b6a1bdSHuang Ying 
cts_cbc_encrypt(struct skcipher_request * req)379ddf169a9SArd Biesheuvel static int cts_cbc_encrypt(struct skcipher_request *req)
380ddf169a9SArd Biesheuvel {
381ddf169a9SArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
382ddf169a9SArd Biesheuvel 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
383ddf169a9SArd Biesheuvel 	int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
384ddf169a9SArd Biesheuvel 	struct scatterlist *src = req->src, *dst = req->dst;
385ddf169a9SArd Biesheuvel 	struct scatterlist sg_src[2], sg_dst[2];
386ddf169a9SArd Biesheuvel 	struct skcipher_request subreq;
387ddf169a9SArd Biesheuvel 	struct skcipher_walk walk;
388ddf169a9SArd Biesheuvel 	int err;
389ddf169a9SArd Biesheuvel 
390ddf169a9SArd Biesheuvel 	skcipher_request_set_tfm(&subreq, tfm);
391ddf169a9SArd Biesheuvel 	skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
392ddf169a9SArd Biesheuvel 				      NULL, NULL);
393ddf169a9SArd Biesheuvel 
394ddf169a9SArd Biesheuvel 	if (req->cryptlen <= AES_BLOCK_SIZE) {
395ddf169a9SArd Biesheuvel 		if (req->cryptlen < AES_BLOCK_SIZE)
396ddf169a9SArd Biesheuvel 			return -EINVAL;
397ddf169a9SArd Biesheuvel 		cbc_blocks = 1;
398ddf169a9SArd Biesheuvel 	}
399ddf169a9SArd Biesheuvel 
400ddf169a9SArd Biesheuvel 	if (cbc_blocks > 0) {
401ddf169a9SArd Biesheuvel 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
402ddf169a9SArd Biesheuvel 					   cbc_blocks * AES_BLOCK_SIZE,
403ddf169a9SArd Biesheuvel 					   req->iv);
404ddf169a9SArd Biesheuvel 
405ddf169a9SArd Biesheuvel 		err = cbc_encrypt(&subreq);
406ddf169a9SArd Biesheuvel 		if (err)
407ddf169a9SArd Biesheuvel 			return err;
408ddf169a9SArd Biesheuvel 
409ddf169a9SArd Biesheuvel 		if (req->cryptlen == AES_BLOCK_SIZE)
410ddf169a9SArd Biesheuvel 			return 0;
411ddf169a9SArd Biesheuvel 
412ddf169a9SArd Biesheuvel 		dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
413ddf169a9SArd Biesheuvel 		if (req->dst != req->src)
414ddf169a9SArd Biesheuvel 			dst = scatterwalk_ffwd(sg_dst, req->dst,
415ddf169a9SArd Biesheuvel 					       subreq.cryptlen);
416ddf169a9SArd Biesheuvel 	}
417ddf169a9SArd Biesheuvel 
418ddf169a9SArd Biesheuvel 	/* handle ciphertext stealing */
419ddf169a9SArd Biesheuvel 	skcipher_request_set_crypt(&subreq, src, dst,
420ddf169a9SArd Biesheuvel 				   req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
421ddf169a9SArd Biesheuvel 				   req->iv);
422ddf169a9SArd Biesheuvel 
423ddf169a9SArd Biesheuvel 	err = skcipher_walk_virt(&walk, &subreq, false);
424ddf169a9SArd Biesheuvel 	if (err)
425ddf169a9SArd Biesheuvel 		return err;
426ddf169a9SArd Biesheuvel 
427ddf169a9SArd Biesheuvel 	kernel_fpu_begin();
428ddf169a9SArd Biesheuvel 	aesni_cts_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
429ddf169a9SArd Biesheuvel 			  walk.nbytes, walk.iv);
430ddf169a9SArd Biesheuvel 	kernel_fpu_end();
431ddf169a9SArd Biesheuvel 
432ddf169a9SArd Biesheuvel 	return skcipher_walk_done(&walk, 0);
433ddf169a9SArd Biesheuvel }
434ddf169a9SArd Biesheuvel 
cts_cbc_decrypt(struct skcipher_request * req)435ddf169a9SArd Biesheuvel static int cts_cbc_decrypt(struct skcipher_request *req)
436ddf169a9SArd Biesheuvel {
437ddf169a9SArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
438ddf169a9SArd Biesheuvel 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
439ddf169a9SArd Biesheuvel 	int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
440ddf169a9SArd Biesheuvel 	struct scatterlist *src = req->src, *dst = req->dst;
441ddf169a9SArd Biesheuvel 	struct scatterlist sg_src[2], sg_dst[2];
442ddf169a9SArd Biesheuvel 	struct skcipher_request subreq;
443ddf169a9SArd Biesheuvel 	struct skcipher_walk walk;
444ddf169a9SArd Biesheuvel 	int err;
445ddf169a9SArd Biesheuvel 
446ddf169a9SArd Biesheuvel 	skcipher_request_set_tfm(&subreq, tfm);
447ddf169a9SArd Biesheuvel 	skcipher_request_set_callback(&subreq, skcipher_request_flags(req),
448ddf169a9SArd Biesheuvel 				      NULL, NULL);
449ddf169a9SArd Biesheuvel 
450ddf169a9SArd Biesheuvel 	if (req->cryptlen <= AES_BLOCK_SIZE) {
451ddf169a9SArd Biesheuvel 		if (req->cryptlen < AES_BLOCK_SIZE)
452ddf169a9SArd Biesheuvel 			return -EINVAL;
453ddf169a9SArd Biesheuvel 		cbc_blocks = 1;
454ddf169a9SArd Biesheuvel 	}
455ddf169a9SArd Biesheuvel 
456ddf169a9SArd Biesheuvel 	if (cbc_blocks > 0) {
457ddf169a9SArd Biesheuvel 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
458ddf169a9SArd Biesheuvel 					   cbc_blocks * AES_BLOCK_SIZE,
459ddf169a9SArd Biesheuvel 					   req->iv);
460ddf169a9SArd Biesheuvel 
461ddf169a9SArd Biesheuvel 		err = cbc_decrypt(&subreq);
462ddf169a9SArd Biesheuvel 		if (err)
463ddf169a9SArd Biesheuvel 			return err;
464ddf169a9SArd Biesheuvel 
465ddf169a9SArd Biesheuvel 		if (req->cryptlen == AES_BLOCK_SIZE)
466ddf169a9SArd Biesheuvel 			return 0;
467ddf169a9SArd Biesheuvel 
468ddf169a9SArd Biesheuvel 		dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen);
469ddf169a9SArd Biesheuvel 		if (req->dst != req->src)
470ddf169a9SArd Biesheuvel 			dst = scatterwalk_ffwd(sg_dst, req->dst,
471ddf169a9SArd Biesheuvel 					       subreq.cryptlen);
472ddf169a9SArd Biesheuvel 	}
473ddf169a9SArd Biesheuvel 
474ddf169a9SArd Biesheuvel 	/* handle ciphertext stealing */
475ddf169a9SArd Biesheuvel 	skcipher_request_set_crypt(&subreq, src, dst,
476ddf169a9SArd Biesheuvel 				   req->cryptlen - cbc_blocks * AES_BLOCK_SIZE,
477ddf169a9SArd Biesheuvel 				   req->iv);
478ddf169a9SArd Biesheuvel 
479ddf169a9SArd Biesheuvel 	err = skcipher_walk_virt(&walk, &subreq, false);
480ddf169a9SArd Biesheuvel 	if (err)
481ddf169a9SArd Biesheuvel 		return err;
482ddf169a9SArd Biesheuvel 
483ddf169a9SArd Biesheuvel 	kernel_fpu_begin();
484ddf169a9SArd Biesheuvel 	aesni_cts_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
485ddf169a9SArd Biesheuvel 			  walk.nbytes, walk.iv);
486ddf169a9SArd Biesheuvel 	kernel_fpu_end();
487ddf169a9SArd Biesheuvel 
488ddf169a9SArd Biesheuvel 	return skcipher_walk_done(&walk, 0);
489ddf169a9SArd Biesheuvel }
490ddf169a9SArd Biesheuvel 
4910d258efbSMathias Krause #ifdef CONFIG_X86_64
aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx * ctx,u8 * out,const u8 * in,unsigned int len,u8 * iv)49222cddcc7Schandramouli narayanan static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
49322cddcc7Schandramouli narayanan 			      const u8 *in, unsigned int len, u8 *iv)
49422cddcc7Schandramouli narayanan {
49522cddcc7Schandramouli narayanan 	/*
49622cddcc7Schandramouli narayanan 	 * based on key length, override with the by8 version
49722cddcc7Schandramouli narayanan 	 * of ctr mode encryption/decryption for improved performance
49822cddcc7Schandramouli narayanan 	 * aes_set_key_common() ensures that key length is one of
49922cddcc7Schandramouli narayanan 	 * {128,192,256}
50022cddcc7Schandramouli narayanan 	 */
50122cddcc7Schandramouli narayanan 	if (ctx->key_length == AES_KEYSIZE_128)
50222cddcc7Schandramouli narayanan 		aes_ctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len);
50322cddcc7Schandramouli narayanan 	else if (ctx->key_length == AES_KEYSIZE_192)
50422cddcc7Schandramouli narayanan 		aes_ctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len);
50522cddcc7Schandramouli narayanan 	else
50622cddcc7Schandramouli narayanan 		aes_ctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len);
50722cddcc7Schandramouli narayanan }
50822cddcc7Schandramouli narayanan 
ctr_crypt(struct skcipher_request * req)50985671860SHerbert Xu static int ctr_crypt(struct skcipher_request *req)
51012387a46SHuang Ying {
51185671860SHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
51285671860SHerbert Xu 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
51365d1e3c4SArd Biesheuvel 	u8 keystream[AES_BLOCK_SIZE];
51485671860SHerbert Xu 	struct skcipher_walk walk;
51585671860SHerbert Xu 	unsigned int nbytes;
51612387a46SHuang Ying 	int err;
51712387a46SHuang Ying 
51865d1e3c4SArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
51912387a46SHuang Ying 
52065d1e3c4SArd Biesheuvel 	while ((nbytes = walk.nbytes) > 0) {
52112387a46SHuang Ying 		kernel_fpu_begin();
52265d1e3c4SArd Biesheuvel 		if (nbytes & AES_BLOCK_MASK)
52364a49b85SArd Biesheuvel 			static_call(aesni_ctr_enc_tfm)(ctx, walk.dst.virt.addr,
52464a49b85SArd Biesheuvel 						       walk.src.virt.addr,
52564a49b85SArd Biesheuvel 						       nbytes & AES_BLOCK_MASK,
52664a49b85SArd Biesheuvel 						       walk.iv);
52765d1e3c4SArd Biesheuvel 		nbytes &= ~AES_BLOCK_MASK;
52865d1e3c4SArd Biesheuvel 
52965d1e3c4SArd Biesheuvel 		if (walk.nbytes == walk.total && nbytes > 0) {
53065d1e3c4SArd Biesheuvel 			aesni_enc(ctx, keystream, walk.iv);
53165d1e3c4SArd Biesheuvel 			crypto_xor_cpy(walk.dst.virt.addr + walk.nbytes - nbytes,
53265d1e3c4SArd Biesheuvel 				       walk.src.virt.addr + walk.nbytes - nbytes,
53365d1e3c4SArd Biesheuvel 				       keystream, nbytes);
53465d1e3c4SArd Biesheuvel 			crypto_inc(walk.iv, AES_BLOCK_SIZE);
53565d1e3c4SArd Biesheuvel 			nbytes = 0;
53612387a46SHuang Ying 		}
53712387a46SHuang Ying 		kernel_fpu_end();
53865d1e3c4SArd Biesheuvel 		err = skcipher_walk_done(&walk, nbytes);
53965d1e3c4SArd Biesheuvel 	}
54012387a46SHuang Ying 	return err;
54112387a46SHuang Ying }
54212387a46SHuang Ying 
aesni_xctr_enc_avx_tfm(struct crypto_aes_ctx * ctx,u8 * out,const u8 * in,unsigned int len,u8 * iv,unsigned int byte_ctr)543fd94fcf0SNathan Huckleberry static void aesni_xctr_enc_avx_tfm(struct crypto_aes_ctx *ctx, u8 *out,
544fd94fcf0SNathan Huckleberry 				   const u8 *in, unsigned int len, u8 *iv,
545fd94fcf0SNathan Huckleberry 				   unsigned int byte_ctr)
546fd94fcf0SNathan Huckleberry {
547fd94fcf0SNathan Huckleberry 	if (ctx->key_length == AES_KEYSIZE_128)
548fd94fcf0SNathan Huckleberry 		aes_xctr_enc_128_avx_by8(in, iv, (void *)ctx, out, len,
549fd94fcf0SNathan Huckleberry 					 byte_ctr);
550fd94fcf0SNathan Huckleberry 	else if (ctx->key_length == AES_KEYSIZE_192)
551fd94fcf0SNathan Huckleberry 		aes_xctr_enc_192_avx_by8(in, iv, (void *)ctx, out, len,
552fd94fcf0SNathan Huckleberry 					 byte_ctr);
553fd94fcf0SNathan Huckleberry 	else
554fd94fcf0SNathan Huckleberry 		aes_xctr_enc_256_avx_by8(in, iv, (void *)ctx, out, len,
555fd94fcf0SNathan Huckleberry 					 byte_ctr);
556fd94fcf0SNathan Huckleberry }
557fd94fcf0SNathan Huckleberry 
xctr_crypt(struct skcipher_request * req)558fd94fcf0SNathan Huckleberry static int xctr_crypt(struct skcipher_request *req)
559fd94fcf0SNathan Huckleberry {
560fd94fcf0SNathan Huckleberry 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
561fd94fcf0SNathan Huckleberry 	struct crypto_aes_ctx *ctx = aes_ctx(crypto_skcipher_ctx(tfm));
562fd94fcf0SNathan Huckleberry 	u8 keystream[AES_BLOCK_SIZE];
563fd94fcf0SNathan Huckleberry 	struct skcipher_walk walk;
564fd94fcf0SNathan Huckleberry 	unsigned int nbytes;
565fd94fcf0SNathan Huckleberry 	unsigned int byte_ctr = 0;
566fd94fcf0SNathan Huckleberry 	int err;
567fd94fcf0SNathan Huckleberry 	__le32 block[AES_BLOCK_SIZE / sizeof(__le32)];
568fd94fcf0SNathan Huckleberry 
569fd94fcf0SNathan Huckleberry 	err = skcipher_walk_virt(&walk, req, false);
570fd94fcf0SNathan Huckleberry 
571fd94fcf0SNathan Huckleberry 	while ((nbytes = walk.nbytes) > 0) {
572fd94fcf0SNathan Huckleberry 		kernel_fpu_begin();
573fd94fcf0SNathan Huckleberry 		if (nbytes & AES_BLOCK_MASK)
574fd94fcf0SNathan Huckleberry 			aesni_xctr_enc_avx_tfm(ctx, walk.dst.virt.addr,
575fd94fcf0SNathan Huckleberry 				walk.src.virt.addr, nbytes & AES_BLOCK_MASK,
576fd94fcf0SNathan Huckleberry 				walk.iv, byte_ctr);
577fd94fcf0SNathan Huckleberry 		nbytes &= ~AES_BLOCK_MASK;
578fd94fcf0SNathan Huckleberry 		byte_ctr += walk.nbytes - nbytes;
579fd94fcf0SNathan Huckleberry 
580fd94fcf0SNathan Huckleberry 		if (walk.nbytes == walk.total && nbytes > 0) {
581fd94fcf0SNathan Huckleberry 			memcpy(block, walk.iv, AES_BLOCK_SIZE);
582fd94fcf0SNathan Huckleberry 			block[0] ^= cpu_to_le32(1 + byte_ctr / AES_BLOCK_SIZE);
583fd94fcf0SNathan Huckleberry 			aesni_enc(ctx, keystream, (u8 *)block);
584fd94fcf0SNathan Huckleberry 			crypto_xor_cpy(walk.dst.virt.addr + walk.nbytes -
585fd94fcf0SNathan Huckleberry 				       nbytes, walk.src.virt.addr + walk.nbytes
586fd94fcf0SNathan Huckleberry 				       - nbytes, keystream, nbytes);
587fd94fcf0SNathan Huckleberry 			byte_ctr += nbytes;
588fd94fcf0SNathan Huckleberry 			nbytes = 0;
589fd94fcf0SNathan Huckleberry 		}
590fd94fcf0SNathan Huckleberry 		kernel_fpu_end();
591fd94fcf0SNathan Huckleberry 		err = skcipher_walk_done(&walk, nbytes);
592fd94fcf0SNathan Huckleberry 	}
593fd94fcf0SNathan Huckleberry 	return err;
594fd94fcf0SNathan Huckleberry }
595fd94fcf0SNathan Huckleberry 
5960bd82f5fSTadeusz Struk static int
rfc4106_set_hash_subkey(u8 * hash_subkey,const u8 * key,unsigned int key_len)5970bd82f5fSTadeusz Struk rfc4106_set_hash_subkey(u8 *hash_subkey, const u8 *key, unsigned int key_len)
5980bd82f5fSTadeusz Struk {
599f6680cbdSArd Biesheuvel 	struct crypto_aes_ctx ctx;
60002fa472aSHerbert Xu 	int ret;
6010bd82f5fSTadeusz Struk 
602f6680cbdSArd Biesheuvel 	ret = aes_expandkey(&ctx, key, key_len);
6037efd95f6SJesper Juhl 	if (ret)
604f6680cbdSArd Biesheuvel 		return ret;
6050bd82f5fSTadeusz Struk 
6060bd82f5fSTadeusz Struk 	/* Clear the data in the hash sub key container to zero.*/
6070bd82f5fSTadeusz Struk 	/* We want to cipher all zeros to create the hash sub key. */
6080bd82f5fSTadeusz Struk 	memset(hash_subkey, 0, RFC4106_HASH_SUBKEY_SIZE);
6090bd82f5fSTadeusz Struk 
610f6680cbdSArd Biesheuvel 	aes_encrypt(&ctx, hash_subkey, hash_subkey);
6110bd82f5fSTadeusz Struk 
612f6680cbdSArd Biesheuvel 	memzero_explicit(&ctx, sizeof(ctx));
613f6680cbdSArd Biesheuvel 	return 0;
6140bd82f5fSTadeusz Struk }
6150bd82f5fSTadeusz Struk 
common_rfc4106_set_key(struct crypto_aead * aead,const u8 * key,unsigned int key_len)61681e397d9STadeusz Struk static int common_rfc4106_set_key(struct crypto_aead *aead, const u8 *key,
6170bd82f5fSTadeusz Struk 				  unsigned int key_len)
6180bd82f5fSTadeusz Struk {
61981e397d9STadeusz Struk 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(aead);
6200bd82f5fSTadeusz Struk 
621674f368aSEric Biggers 	if (key_len < 4)
6220bd82f5fSTadeusz Struk 		return -EINVAL;
623674f368aSEric Biggers 
6240bd82f5fSTadeusz Struk 	/*Account for 4 byte nonce at the end.*/
6250bd82f5fSTadeusz Struk 	key_len -= 4;
6260bd82f5fSTadeusz Struk 
6270bd82f5fSTadeusz Struk 	memcpy(ctx->nonce, key + key_len, sizeof(ctx->nonce));
6280bd82f5fSTadeusz Struk 
629*28b77609SEric Biggers 	return aes_set_key_common(&ctx->aes_key_expanded, key, key_len) ?:
630b7c89d9eSHerbert Xu 	       rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
6310bd82f5fSTadeusz Struk }
6320bd82f5fSTadeusz Struk 
633149e1225SEric Biggers /* This is the Integrity Check Value (aka the authentication tag) length and can
634149e1225SEric Biggers  * be 8, 12 or 16 bytes long. */
common_rfc4106_set_authsize(struct crypto_aead * aead,unsigned int authsize)63581e397d9STadeusz Struk static int common_rfc4106_set_authsize(struct crypto_aead *aead,
63681e397d9STadeusz Struk 				       unsigned int authsize)
63781e397d9STadeusz Struk {
6380bd82f5fSTadeusz Struk 	switch (authsize) {
6390bd82f5fSTadeusz Struk 	case 8:
6400bd82f5fSTadeusz Struk 	case 12:
6410bd82f5fSTadeusz Struk 	case 16:
6420bd82f5fSTadeusz Struk 		break;
6430bd82f5fSTadeusz Struk 	default:
6440bd82f5fSTadeusz Struk 		return -EINVAL;
6450bd82f5fSTadeusz Struk 	}
646b7c89d9eSHerbert Xu 
6470bd82f5fSTadeusz Struk 	return 0;
6480bd82f5fSTadeusz Struk }
6490bd82f5fSTadeusz Struk 
generic_gcmaes_set_authsize(struct crypto_aead * tfm,unsigned int authsize)650cce2ea8dSSabrina Dubroca static int generic_gcmaes_set_authsize(struct crypto_aead *tfm,
651cce2ea8dSSabrina Dubroca 				       unsigned int authsize)
652cce2ea8dSSabrina Dubroca {
653cce2ea8dSSabrina Dubroca 	switch (authsize) {
654cce2ea8dSSabrina Dubroca 	case 4:
655cce2ea8dSSabrina Dubroca 	case 8:
656cce2ea8dSSabrina Dubroca 	case 12:
657cce2ea8dSSabrina Dubroca 	case 13:
658cce2ea8dSSabrina Dubroca 	case 14:
659cce2ea8dSSabrina Dubroca 	case 15:
660cce2ea8dSSabrina Dubroca 	case 16:
661cce2ea8dSSabrina Dubroca 		break;
662cce2ea8dSSabrina Dubroca 	default:
663cce2ea8dSSabrina Dubroca 		return -EINVAL;
664cce2ea8dSSabrina Dubroca 	}
665cce2ea8dSSabrina Dubroca 
666cce2ea8dSSabrina Dubroca 	return 0;
667cce2ea8dSSabrina Dubroca }
668cce2ea8dSSabrina Dubroca 
gcmaes_crypt_by_sg(bool enc,struct aead_request * req,unsigned int assoclen,u8 * hash_subkey,u8 * iv,void * aes_ctx,u8 * auth_tag,unsigned long auth_tag_len)669e8455207SDave Watson static int gcmaes_crypt_by_sg(bool enc, struct aead_request *req,
670e8455207SDave Watson 			      unsigned int assoclen, u8 *hash_subkey,
67183c83e65SArd Biesheuvel 			      u8 *iv, void *aes_ctx, u8 *auth_tag,
67283c83e65SArd Biesheuvel 			      unsigned long auth_tag_len)
673e8455207SDave Watson {
674a13ed1d1SArd Biesheuvel 	u8 databuf[sizeof(struct gcm_context_data) + (AESNI_ALIGN - 8)] __aligned(8);
675a13ed1d1SArd Biesheuvel 	struct gcm_context_data *data = PTR_ALIGN((void *)databuf, AESNI_ALIGN);
676e8455207SDave Watson 	unsigned long left = req->cryptlen;
677e8455207SDave Watson 	struct scatter_walk assoc_sg_walk;
67883c83e65SArd Biesheuvel 	struct skcipher_walk walk;
679d6cbf4eaSArd Biesheuvel 	bool do_avx, do_avx2;
680e8455207SDave Watson 	u8 *assocmem = NULL;
68183c83e65SArd Biesheuvel 	u8 *assoc;
68283c83e65SArd Biesheuvel 	int err;
683e8455207SDave Watson 
684e8455207SDave Watson 	if (!enc)
685e8455207SDave Watson 		left -= auth_tag_len;
686e8455207SDave Watson 
687d6cbf4eaSArd Biesheuvel 	do_avx = (left >= AVX_GEN2_OPTSIZE);
688d6cbf4eaSArd Biesheuvel 	do_avx2 = (left >= AVX_GEN4_OPTSIZE);
689603f8c3bSDave Watson 
690e8455207SDave Watson 	/* Linearize assoc, if not already linear */
6912694e23fSArd Biesheuvel 	if (req->src->length >= assoclen && req->src->length) {
692e8455207SDave Watson 		scatterwalk_start(&assoc_sg_walk, req->src);
693e8455207SDave Watson 		assoc = scatterwalk_map(&assoc_sg_walk);
694e8455207SDave Watson 	} else {
6952694e23fSArd Biesheuvel 		gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
6962694e23fSArd Biesheuvel 			      GFP_KERNEL : GFP_ATOMIC;
6972694e23fSArd Biesheuvel 
698e8455207SDave Watson 		/* assoc can be any length, so must be on heap */
6992694e23fSArd Biesheuvel 		assocmem = kmalloc(assoclen, flags);
700e8455207SDave Watson 		if (unlikely(!assocmem))
701e8455207SDave Watson 			return -ENOMEM;
702e8455207SDave Watson 		assoc = assocmem;
703e8455207SDave Watson 
704e8455207SDave Watson 		scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0);
705e8455207SDave Watson 	}
706e8455207SDave Watson 
707e8455207SDave Watson 	kernel_fpu_begin();
708d6cbf4eaSArd Biesheuvel 	if (static_branch_likely(&gcm_use_avx2) && do_avx2)
709d6cbf4eaSArd Biesheuvel 		aesni_gcm_init_avx_gen4(aes_ctx, data, iv, hash_subkey, assoc,
710d6cbf4eaSArd Biesheuvel 					assoclen);
711d6cbf4eaSArd Biesheuvel 	else if (static_branch_likely(&gcm_use_avx) && do_avx)
712d6cbf4eaSArd Biesheuvel 		aesni_gcm_init_avx_gen2(aes_ctx, data, iv, hash_subkey, assoc,
713d6cbf4eaSArd Biesheuvel 					assoclen);
714d6cbf4eaSArd Biesheuvel 	else
715d6cbf4eaSArd Biesheuvel 		aesni_gcm_init(aes_ctx, data, iv, hash_subkey, assoc, assoclen);
716e8455207SDave Watson 	kernel_fpu_end();
717e8455207SDave Watson 
718e8455207SDave Watson 	if (!assocmem)
719e8455207SDave Watson 		scatterwalk_unmap(assoc);
720e8455207SDave Watson 	else
721e8455207SDave Watson 		kfree(assocmem);
722e8455207SDave Watson 
72383c83e65SArd Biesheuvel 	err = enc ? skcipher_walk_aead_encrypt(&walk, req, false)
72483c83e65SArd Biesheuvel 		  : skcipher_walk_aead_decrypt(&walk, req, false);
725e8455207SDave Watson 
72683c83e65SArd Biesheuvel 	while (walk.nbytes > 0) {
72783c83e65SArd Biesheuvel 		kernel_fpu_begin();
728d6cbf4eaSArd Biesheuvel 		if (static_branch_likely(&gcm_use_avx2) && do_avx2) {
729d6cbf4eaSArd Biesheuvel 			if (enc)
730d6cbf4eaSArd Biesheuvel 				aesni_gcm_enc_update_avx_gen4(aes_ctx, data,
731d6cbf4eaSArd Biesheuvel 							      walk.dst.virt.addr,
732d6cbf4eaSArd Biesheuvel 							      walk.src.virt.addr,
733d6cbf4eaSArd Biesheuvel 							      walk.nbytes);
734d6cbf4eaSArd Biesheuvel 			else
735d6cbf4eaSArd Biesheuvel 				aesni_gcm_dec_update_avx_gen4(aes_ctx, data,
736d6cbf4eaSArd Biesheuvel 							      walk.dst.virt.addr,
737d6cbf4eaSArd Biesheuvel 							      walk.src.virt.addr,
738d6cbf4eaSArd Biesheuvel 							      walk.nbytes);
739d6cbf4eaSArd Biesheuvel 		} else if (static_branch_likely(&gcm_use_avx) && do_avx) {
740d6cbf4eaSArd Biesheuvel 			if (enc)
741d6cbf4eaSArd Biesheuvel 				aesni_gcm_enc_update_avx_gen2(aes_ctx, data,
742d6cbf4eaSArd Biesheuvel 							      walk.dst.virt.addr,
743d6cbf4eaSArd Biesheuvel 							      walk.src.virt.addr,
744d6cbf4eaSArd Biesheuvel 							      walk.nbytes);
745d6cbf4eaSArd Biesheuvel 			else
746d6cbf4eaSArd Biesheuvel 				aesni_gcm_dec_update_avx_gen2(aes_ctx, data,
747d6cbf4eaSArd Biesheuvel 							      walk.dst.virt.addr,
748d6cbf4eaSArd Biesheuvel 							      walk.src.virt.addr,
749d6cbf4eaSArd Biesheuvel 							      walk.nbytes);
750d6cbf4eaSArd Biesheuvel 		} else if (enc) {
751d6cbf4eaSArd Biesheuvel 			aesni_gcm_enc_update(aes_ctx, data, walk.dst.virt.addr,
75283c83e65SArd Biesheuvel 					     walk.src.virt.addr, walk.nbytes);
753d6cbf4eaSArd Biesheuvel 		} else {
754d6cbf4eaSArd Biesheuvel 			aesni_gcm_dec_update(aes_ctx, data, walk.dst.virt.addr,
755d6cbf4eaSArd Biesheuvel 					     walk.src.virt.addr, walk.nbytes);
756d6cbf4eaSArd Biesheuvel 		}
75783c83e65SArd Biesheuvel 		kernel_fpu_end();
758e8455207SDave Watson 
75983c83e65SArd Biesheuvel 		err = skcipher_walk_done(&walk, 0);
760e8455207SDave Watson 	}
761e8455207SDave Watson 
76283c83e65SArd Biesheuvel 	if (err)
76383c83e65SArd Biesheuvel 		return err;
76483c83e65SArd Biesheuvel 
76583c83e65SArd Biesheuvel 	kernel_fpu_begin();
766d6cbf4eaSArd Biesheuvel 	if (static_branch_likely(&gcm_use_avx2) && do_avx2)
767d6cbf4eaSArd Biesheuvel 		aesni_gcm_finalize_avx_gen4(aes_ctx, data, auth_tag,
768d6cbf4eaSArd Biesheuvel 					    auth_tag_len);
769d6cbf4eaSArd Biesheuvel 	else if (static_branch_likely(&gcm_use_avx) && do_avx)
770d6cbf4eaSArd Biesheuvel 		aesni_gcm_finalize_avx_gen2(aes_ctx, data, auth_tag,
771d6cbf4eaSArd Biesheuvel 					    auth_tag_len);
772d6cbf4eaSArd Biesheuvel 	else
773d6cbf4eaSArd Biesheuvel 		aesni_gcm_finalize(aes_ctx, data, auth_tag, auth_tag_len);
77483c83e65SArd Biesheuvel 	kernel_fpu_end();
775e8455207SDave Watson 
776e8455207SDave Watson 	return 0;
777e8455207SDave Watson }
778e8455207SDave Watson 
gcmaes_encrypt(struct aead_request * req,unsigned int assoclen,u8 * hash_subkey,u8 * iv,void * aes_ctx)779cce2ea8dSSabrina Dubroca static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen,
780cce2ea8dSSabrina Dubroca 			  u8 *hash_subkey, u8 *iv, void *aes_ctx)
7810bd82f5fSTadeusz Struk {
78283c83e65SArd Biesheuvel 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
78383c83e65SArd Biesheuvel 	unsigned long auth_tag_len = crypto_aead_authsize(tfm);
78483c83e65SArd Biesheuvel 	u8 auth_tag[16];
78583c83e65SArd Biesheuvel 	int err;
78683c83e65SArd Biesheuvel 
78783c83e65SArd Biesheuvel 	err = gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, aes_ctx,
78883c83e65SArd Biesheuvel 				 auth_tag, auth_tag_len);
78983c83e65SArd Biesheuvel 	if (err)
79083c83e65SArd Biesheuvel 		return err;
79183c83e65SArd Biesheuvel 
79283c83e65SArd Biesheuvel 	scatterwalk_map_and_copy(auth_tag, req->dst,
79383c83e65SArd Biesheuvel 				 req->assoclen + req->cryptlen,
79483c83e65SArd Biesheuvel 				 auth_tag_len, 1);
79583c83e65SArd Biesheuvel 	return 0;
796e8455207SDave Watson }
7970bd82f5fSTadeusz Struk 
gcmaes_decrypt(struct aead_request * req,unsigned int assoclen,u8 * hash_subkey,u8 * iv,void * aes_ctx)798cce2ea8dSSabrina Dubroca static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen,
799cce2ea8dSSabrina Dubroca 			  u8 *hash_subkey, u8 *iv, void *aes_ctx)
8000bd82f5fSTadeusz Struk {
80183c83e65SArd Biesheuvel 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
80283c83e65SArd Biesheuvel 	unsigned long auth_tag_len = crypto_aead_authsize(tfm);
80383c83e65SArd Biesheuvel 	u8 auth_tag_msg[16];
80483c83e65SArd Biesheuvel 	u8 auth_tag[16];
80583c83e65SArd Biesheuvel 	int err;
80683c83e65SArd Biesheuvel 
80783c83e65SArd Biesheuvel 	err = gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, aes_ctx,
80883c83e65SArd Biesheuvel 				 auth_tag, auth_tag_len);
80983c83e65SArd Biesheuvel 	if (err)
81083c83e65SArd Biesheuvel 		return err;
81183c83e65SArd Biesheuvel 
81283c83e65SArd Biesheuvel 	/* Copy out original auth_tag */
81383c83e65SArd Biesheuvel 	scatterwalk_map_and_copy(auth_tag_msg, req->src,
81483c83e65SArd Biesheuvel 				 req->assoclen + req->cryptlen - auth_tag_len,
81583c83e65SArd Biesheuvel 				 auth_tag_len, 0);
81683c83e65SArd Biesheuvel 
81783c83e65SArd Biesheuvel 	/* Compare generated tag with passed in tag. */
81883c83e65SArd Biesheuvel 	if (crypto_memneq(auth_tag_msg, auth_tag, auth_tag_len)) {
81983c83e65SArd Biesheuvel 		memzero_explicit(auth_tag, sizeof(auth_tag));
82083c83e65SArd Biesheuvel 		return -EBADMSG;
82183c83e65SArd Biesheuvel 	}
82283c83e65SArd Biesheuvel 	return 0;
823e8455207SDave Watson }
824cce2ea8dSSabrina Dubroca 
helper_rfc4106_encrypt(struct aead_request * req)825cce2ea8dSSabrina Dubroca static int helper_rfc4106_encrypt(struct aead_request *req)
826cce2ea8dSSabrina Dubroca {
827cce2ea8dSSabrina Dubroca 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
828cce2ea8dSSabrina Dubroca 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
829cce2ea8dSSabrina Dubroca 	void *aes_ctx = &(ctx->aes_key_expanded);
830a13ed1d1SArd Biesheuvel 	u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
831a13ed1d1SArd Biesheuvel 	u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
832cce2ea8dSSabrina Dubroca 	unsigned int i;
833cce2ea8dSSabrina Dubroca 	__be32 counter = cpu_to_be32(1);
834cce2ea8dSSabrina Dubroca 
835cce2ea8dSSabrina Dubroca 	/* Assuming we are supporting rfc4106 64-bit extended */
836cce2ea8dSSabrina Dubroca 	/* sequence numbers We need to have the AAD length equal */
837cce2ea8dSSabrina Dubroca 	/* to 16 or 20 bytes */
838cce2ea8dSSabrina Dubroca 	if (unlikely(req->assoclen != 16 && req->assoclen != 20))
839cce2ea8dSSabrina Dubroca 		return -EINVAL;
840cce2ea8dSSabrina Dubroca 
841cce2ea8dSSabrina Dubroca 	/* IV below built */
842cce2ea8dSSabrina Dubroca 	for (i = 0; i < 4; i++)
843cce2ea8dSSabrina Dubroca 		*(iv+i) = ctx->nonce[i];
844cce2ea8dSSabrina Dubroca 	for (i = 0; i < 8; i++)
845cce2ea8dSSabrina Dubroca 		*(iv+4+i) = req->iv[i];
846cce2ea8dSSabrina Dubroca 	*((__be32 *)(iv+12)) = counter;
847cce2ea8dSSabrina Dubroca 
848cce2ea8dSSabrina Dubroca 	return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
849cce2ea8dSSabrina Dubroca 			      aes_ctx);
850cce2ea8dSSabrina Dubroca }
851cce2ea8dSSabrina Dubroca 
helper_rfc4106_decrypt(struct aead_request * req)852cce2ea8dSSabrina Dubroca static int helper_rfc4106_decrypt(struct aead_request *req)
853cce2ea8dSSabrina Dubroca {
854cce2ea8dSSabrina Dubroca 	__be32 counter = cpu_to_be32(1);
855cce2ea8dSSabrina Dubroca 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
856cce2ea8dSSabrina Dubroca 	struct aesni_rfc4106_gcm_ctx *ctx = aesni_rfc4106_gcm_ctx_get(tfm);
857cce2ea8dSSabrina Dubroca 	void *aes_ctx = &(ctx->aes_key_expanded);
858a13ed1d1SArd Biesheuvel 	u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
859a13ed1d1SArd Biesheuvel 	u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
860cce2ea8dSSabrina Dubroca 	unsigned int i;
861cce2ea8dSSabrina Dubroca 
862cce2ea8dSSabrina Dubroca 	if (unlikely(req->assoclen != 16 && req->assoclen != 20))
863cce2ea8dSSabrina Dubroca 		return -EINVAL;
864cce2ea8dSSabrina Dubroca 
865cce2ea8dSSabrina Dubroca 	/* Assuming we are supporting rfc4106 64-bit extended */
866cce2ea8dSSabrina Dubroca 	/* sequence numbers We need to have the AAD length */
867cce2ea8dSSabrina Dubroca 	/* equal to 16 or 20 bytes */
868cce2ea8dSSabrina Dubroca 
869cce2ea8dSSabrina Dubroca 	/* IV below built */
870cce2ea8dSSabrina Dubroca 	for (i = 0; i < 4; i++)
871cce2ea8dSSabrina Dubroca 		*(iv+i) = ctx->nonce[i];
872cce2ea8dSSabrina Dubroca 	for (i = 0; i < 8; i++)
873cce2ea8dSSabrina Dubroca 		*(iv+4+i) = req->iv[i];
874cce2ea8dSSabrina Dubroca 	*((__be32 *)(iv+12)) = counter;
875cce2ea8dSSabrina Dubroca 
876cce2ea8dSSabrina Dubroca 	return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv,
877cce2ea8dSSabrina Dubroca 			      aes_ctx);
8780bd82f5fSTadeusz Struk }
879fa46ccb8SJussi Kivilinna #endif
8800bd82f5fSTadeusz Struk 
xts_aesni_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)8812481104fSArd Biesheuvel static int xts_aesni_setkey(struct crypto_skcipher *tfm, const u8 *key,
8822481104fSArd Biesheuvel 			    unsigned int keylen)
8832481104fSArd Biesheuvel {
8842481104fSArd Biesheuvel 	struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
8852481104fSArd Biesheuvel 	int err;
8862481104fSArd Biesheuvel 
8872481104fSArd Biesheuvel 	err = xts_verify_key(tfm, key, keylen);
8882481104fSArd Biesheuvel 	if (err)
8892481104fSArd Biesheuvel 		return err;
8902481104fSArd Biesheuvel 
8912481104fSArd Biesheuvel 	keylen /= 2;
8922481104fSArd Biesheuvel 
8932481104fSArd Biesheuvel 	/* first half of xts-key is for crypt */
894*28b77609SEric Biggers 	err = aes_set_key_common(aes_ctx(ctx->raw_crypt_ctx), key, keylen);
8952481104fSArd Biesheuvel 	if (err)
8962481104fSArd Biesheuvel 		return err;
8972481104fSArd Biesheuvel 
8982481104fSArd Biesheuvel 	/* second half of xts-key is for tweak */
899*28b77609SEric Biggers 	return aes_set_key_common(aes_ctx(ctx->raw_tweak_ctx), key + keylen,
900*28b77609SEric Biggers 				  keylen);
9012481104fSArd Biesheuvel }
9022481104fSArd Biesheuvel 
xts_crypt(struct skcipher_request * req,bool encrypt)9032481104fSArd Biesheuvel static int xts_crypt(struct skcipher_request *req, bool encrypt)
9042481104fSArd Biesheuvel {
9052481104fSArd Biesheuvel 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
9062481104fSArd Biesheuvel 	struct aesni_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
9072481104fSArd Biesheuvel 	int tail = req->cryptlen % AES_BLOCK_SIZE;
9082481104fSArd Biesheuvel 	struct skcipher_request subreq;
9092481104fSArd Biesheuvel 	struct skcipher_walk walk;
9102481104fSArd Biesheuvel 	int err;
9112481104fSArd Biesheuvel 
9122481104fSArd Biesheuvel 	if (req->cryptlen < AES_BLOCK_SIZE)
9132481104fSArd Biesheuvel 		return -EINVAL;
9142481104fSArd Biesheuvel 
9152481104fSArd Biesheuvel 	err = skcipher_walk_virt(&walk, req, false);
91672ff2bf0SShreyansh Chouhan 	if (!walk.nbytes)
917821720b9SArd Biesheuvel 		return err;
9182481104fSArd Biesheuvel 
9192481104fSArd Biesheuvel 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
9202481104fSArd Biesheuvel 		int blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2;
9212481104fSArd Biesheuvel 
9222481104fSArd Biesheuvel 		skcipher_walk_abort(&walk);
9232481104fSArd Biesheuvel 
9242481104fSArd Biesheuvel 		skcipher_request_set_tfm(&subreq, tfm);
9252481104fSArd Biesheuvel 		skcipher_request_set_callback(&subreq,
9262481104fSArd Biesheuvel 					      skcipher_request_flags(req),
9272481104fSArd Biesheuvel 					      NULL, NULL);
9282481104fSArd Biesheuvel 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
9292481104fSArd Biesheuvel 					   blocks * AES_BLOCK_SIZE, req->iv);
9302481104fSArd Biesheuvel 		req = &subreq;
931821720b9SArd Biesheuvel 
9322481104fSArd Biesheuvel 		err = skcipher_walk_virt(&walk, req, false);
933a2d3cbc8SShreyansh Chouhan 		if (!walk.nbytes)
934821720b9SArd Biesheuvel 			return err;
9352481104fSArd Biesheuvel 	} else {
9362481104fSArd Biesheuvel 		tail = 0;
9372481104fSArd Biesheuvel 	}
9382481104fSArd Biesheuvel 
9392481104fSArd Biesheuvel 	kernel_fpu_begin();
9402481104fSArd Biesheuvel 
9412481104fSArd Biesheuvel 	/* calculate first value of T */
9422481104fSArd Biesheuvel 	aesni_enc(aes_ctx(ctx->raw_tweak_ctx), walk.iv, walk.iv);
9432481104fSArd Biesheuvel 
9442481104fSArd Biesheuvel 	while (walk.nbytes > 0) {
9452481104fSArd Biesheuvel 		int nbytes = walk.nbytes;
9462481104fSArd Biesheuvel 
9472481104fSArd Biesheuvel 		if (nbytes < walk.total)
9482481104fSArd Biesheuvel 			nbytes &= ~(AES_BLOCK_SIZE - 1);
9492481104fSArd Biesheuvel 
9502481104fSArd Biesheuvel 		if (encrypt)
9512481104fSArd Biesheuvel 			aesni_xts_encrypt(aes_ctx(ctx->raw_crypt_ctx),
9522481104fSArd Biesheuvel 					  walk.dst.virt.addr, walk.src.virt.addr,
9532481104fSArd Biesheuvel 					  nbytes, walk.iv);
9542481104fSArd Biesheuvel 		else
9552481104fSArd Biesheuvel 			aesni_xts_decrypt(aes_ctx(ctx->raw_crypt_ctx),
9562481104fSArd Biesheuvel 					  walk.dst.virt.addr, walk.src.virt.addr,
9572481104fSArd Biesheuvel 					  nbytes, walk.iv);
9582481104fSArd Biesheuvel 		kernel_fpu_end();
9592481104fSArd Biesheuvel 
9602481104fSArd Biesheuvel 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
9612481104fSArd Biesheuvel 
9622481104fSArd Biesheuvel 		if (walk.nbytes > 0)
9632481104fSArd Biesheuvel 			kernel_fpu_begin();
9642481104fSArd Biesheuvel 	}
9652481104fSArd Biesheuvel 
9662481104fSArd Biesheuvel 	if (unlikely(tail > 0 && !err)) {
9672481104fSArd Biesheuvel 		struct scatterlist sg_src[2], sg_dst[2];
9682481104fSArd Biesheuvel 		struct scatterlist *src, *dst;
9692481104fSArd Biesheuvel 
9702481104fSArd Biesheuvel 		dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
9712481104fSArd Biesheuvel 		if (req->dst != req->src)
9722481104fSArd Biesheuvel 			dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
9732481104fSArd Biesheuvel 
9742481104fSArd Biesheuvel 		skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
9752481104fSArd Biesheuvel 					   req->iv);
9762481104fSArd Biesheuvel 
9772481104fSArd Biesheuvel 		err = skcipher_walk_virt(&walk, &subreq, false);
9782481104fSArd Biesheuvel 		if (err)
9792481104fSArd Biesheuvel 			return err;
9802481104fSArd Biesheuvel 
9812481104fSArd Biesheuvel 		kernel_fpu_begin();
9822481104fSArd Biesheuvel 		if (encrypt)
9832481104fSArd Biesheuvel 			aesni_xts_encrypt(aes_ctx(ctx->raw_crypt_ctx),
9842481104fSArd Biesheuvel 					  walk.dst.virt.addr, walk.src.virt.addr,
9852481104fSArd Biesheuvel 					  walk.nbytes, walk.iv);
9862481104fSArd Biesheuvel 		else
9872481104fSArd Biesheuvel 			aesni_xts_decrypt(aes_ctx(ctx->raw_crypt_ctx),
9882481104fSArd Biesheuvel 					  walk.dst.virt.addr, walk.src.virt.addr,
9892481104fSArd Biesheuvel 					  walk.nbytes, walk.iv);
9902481104fSArd Biesheuvel 		kernel_fpu_end();
9912481104fSArd Biesheuvel 
9922481104fSArd Biesheuvel 		err = skcipher_walk_done(&walk, 0);
9932481104fSArd Biesheuvel 	}
9942481104fSArd Biesheuvel 	return err;
9952481104fSArd Biesheuvel }
9962481104fSArd Biesheuvel 
xts_encrypt(struct skcipher_request * req)9972481104fSArd Biesheuvel static int xts_encrypt(struct skcipher_request *req)
9982481104fSArd Biesheuvel {
9992481104fSArd Biesheuvel 	return xts_crypt(req, true);
10002481104fSArd Biesheuvel }
10012481104fSArd Biesheuvel 
xts_decrypt(struct skcipher_request * req)10022481104fSArd Biesheuvel static int xts_decrypt(struct skcipher_request *req)
10032481104fSArd Biesheuvel {
10042481104fSArd Biesheuvel 	return xts_crypt(req, false);
10052481104fSArd Biesheuvel }
10062481104fSArd Biesheuvel 
100707269559SEric Biggers static struct crypto_alg aesni_cipher_alg = {
1008fa46ccb8SJussi Kivilinna 	.cra_name		= "aes",
1009fa46ccb8SJussi Kivilinna 	.cra_driver_name	= "aes-aesni",
1010fa46ccb8SJussi Kivilinna 	.cra_priority		= 300,
1011fa46ccb8SJussi Kivilinna 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
1012fa46ccb8SJussi Kivilinna 	.cra_blocksize		= AES_BLOCK_SIZE,
101385671860SHerbert Xu 	.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1014fa46ccb8SJussi Kivilinna 	.cra_module		= THIS_MODULE,
1015fa46ccb8SJussi Kivilinna 	.cra_u	= {
1016fa46ccb8SJussi Kivilinna 		.cipher	= {
1017fa46ccb8SJussi Kivilinna 			.cia_min_keysize	= AES_MIN_KEY_SIZE,
1018fa46ccb8SJussi Kivilinna 			.cia_max_keysize	= AES_MAX_KEY_SIZE,
1019fa46ccb8SJussi Kivilinna 			.cia_setkey		= aes_set_key,
1020724ecd3cSArd Biesheuvel 			.cia_encrypt		= aesni_encrypt,
1021724ecd3cSArd Biesheuvel 			.cia_decrypt		= aesni_decrypt
1022fa46ccb8SJussi Kivilinna 		}
1023fa46ccb8SJussi Kivilinna 	}
102407269559SEric Biggers };
102585671860SHerbert Xu 
102685671860SHerbert Xu static struct skcipher_alg aesni_skciphers[] = {
102785671860SHerbert Xu 	{
102885671860SHerbert Xu 		.base = {
102985671860SHerbert Xu 			.cra_name		= "__ecb(aes)",
103085671860SHerbert Xu 			.cra_driver_name	= "__ecb-aes-aesni",
103185671860SHerbert Xu 			.cra_priority		= 400,
103285671860SHerbert Xu 			.cra_flags		= CRYPTO_ALG_INTERNAL,
1033fa46ccb8SJussi Kivilinna 			.cra_blocksize		= AES_BLOCK_SIZE,
103485671860SHerbert Xu 			.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1035fa46ccb8SJussi Kivilinna 			.cra_module		= THIS_MODULE,
103685671860SHerbert Xu 		},
1037fa46ccb8SJussi Kivilinna 		.min_keysize	= AES_MIN_KEY_SIZE,
1038fa46ccb8SJussi Kivilinna 		.max_keysize	= AES_MAX_KEY_SIZE,
103985671860SHerbert Xu 		.setkey		= aesni_skcipher_setkey,
1040fa46ccb8SJussi Kivilinna 		.encrypt	= ecb_encrypt,
1041fa46ccb8SJussi Kivilinna 		.decrypt	= ecb_decrypt,
1042fa46ccb8SJussi Kivilinna 	}, {
104385671860SHerbert Xu 		.base = {
104485671860SHerbert Xu 			.cra_name		= "__cbc(aes)",
104585671860SHerbert Xu 			.cra_driver_name	= "__cbc-aes-aesni",
104685671860SHerbert Xu 			.cra_priority		= 400,
104785671860SHerbert Xu 			.cra_flags		= CRYPTO_ALG_INTERNAL,
1048fa46ccb8SJussi Kivilinna 			.cra_blocksize		= AES_BLOCK_SIZE,
104985671860SHerbert Xu 			.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1050fa46ccb8SJussi Kivilinna 			.cra_module		= THIS_MODULE,
105185671860SHerbert Xu 		},
1052fa46ccb8SJussi Kivilinna 		.min_keysize	= AES_MIN_KEY_SIZE,
1053fa46ccb8SJussi Kivilinna 		.max_keysize	= AES_MAX_KEY_SIZE,
105485671860SHerbert Xu 		.ivsize		= AES_BLOCK_SIZE,
105585671860SHerbert Xu 		.setkey		= aesni_skcipher_setkey,
1056fa46ccb8SJussi Kivilinna 		.encrypt	= cbc_encrypt,
1057fa46ccb8SJussi Kivilinna 		.decrypt	= cbc_decrypt,
1058ddf169a9SArd Biesheuvel 	}, {
1059ddf169a9SArd Biesheuvel 		.base = {
1060ddf169a9SArd Biesheuvel 			.cra_name		= "__cts(cbc(aes))",
1061ddf169a9SArd Biesheuvel 			.cra_driver_name	= "__cts-cbc-aes-aesni",
1062ddf169a9SArd Biesheuvel 			.cra_priority		= 400,
1063ddf169a9SArd Biesheuvel 			.cra_flags		= CRYPTO_ALG_INTERNAL,
1064ddf169a9SArd Biesheuvel 			.cra_blocksize		= AES_BLOCK_SIZE,
1065ddf169a9SArd Biesheuvel 			.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1066ddf169a9SArd Biesheuvel 			.cra_module		= THIS_MODULE,
1067ddf169a9SArd Biesheuvel 		},
1068ddf169a9SArd Biesheuvel 		.min_keysize	= AES_MIN_KEY_SIZE,
1069ddf169a9SArd Biesheuvel 		.max_keysize	= AES_MAX_KEY_SIZE,
1070ddf169a9SArd Biesheuvel 		.ivsize		= AES_BLOCK_SIZE,
1071ddf169a9SArd Biesheuvel 		.walksize	= 2 * AES_BLOCK_SIZE,
1072ddf169a9SArd Biesheuvel 		.setkey		= aesni_skcipher_setkey,
1073ddf169a9SArd Biesheuvel 		.encrypt	= cts_cbc_encrypt,
1074ddf169a9SArd Biesheuvel 		.decrypt	= cts_cbc_decrypt,
1075fa46ccb8SJussi Kivilinna #ifdef CONFIG_X86_64
1076fa46ccb8SJussi Kivilinna 	}, {
107785671860SHerbert Xu 		.base = {
107885671860SHerbert Xu 			.cra_name		= "__ctr(aes)",
107985671860SHerbert Xu 			.cra_driver_name	= "__ctr-aes-aesni",
108085671860SHerbert Xu 			.cra_priority		= 400,
108185671860SHerbert Xu 			.cra_flags		= CRYPTO_ALG_INTERNAL,
1082fa46ccb8SJussi Kivilinna 			.cra_blocksize		= 1,
108385671860SHerbert Xu 			.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1084fa46ccb8SJussi Kivilinna 			.cra_module		= THIS_MODULE,
108585671860SHerbert Xu 		},
1086fa46ccb8SJussi Kivilinna 		.min_keysize	= AES_MIN_KEY_SIZE,
1087fa46ccb8SJussi Kivilinna 		.max_keysize	= AES_MAX_KEY_SIZE,
1088fa46ccb8SJussi Kivilinna 		.ivsize		= AES_BLOCK_SIZE,
108985671860SHerbert Xu 		.chunksize	= AES_BLOCK_SIZE,
109085671860SHerbert Xu 		.setkey		= aesni_skcipher_setkey,
1091fa46ccb8SJussi Kivilinna 		.encrypt	= ctr_crypt,
1092fa46ccb8SJussi Kivilinna 		.decrypt	= ctr_crypt,
10932481104fSArd Biesheuvel #endif
1094fa46ccb8SJussi Kivilinna 	}, {
109585671860SHerbert Xu 		.base = {
109685671860SHerbert Xu 			.cra_name		= "__xts(aes)",
109785671860SHerbert Xu 			.cra_driver_name	= "__xts-aes-aesni",
109885671860SHerbert Xu 			.cra_priority		= 401,
109985671860SHerbert Xu 			.cra_flags		= CRYPTO_ALG_INTERNAL,
1100fa46ccb8SJussi Kivilinna 			.cra_blocksize		= AES_BLOCK_SIZE,
110185671860SHerbert Xu 			.cra_ctxsize		= XTS_AES_CTX_SIZE,
1102fa46ccb8SJussi Kivilinna 			.cra_module		= THIS_MODULE,
1103fa46ccb8SJussi Kivilinna 		},
1104023af608SJussi Kivilinna 		.min_keysize	= 2 * AES_MIN_KEY_SIZE,
1105023af608SJussi Kivilinna 		.max_keysize	= 2 * AES_MAX_KEY_SIZE,
1106023af608SJussi Kivilinna 		.ivsize		= AES_BLOCK_SIZE,
11072481104fSArd Biesheuvel 		.walksize	= 2 * AES_BLOCK_SIZE,
1108023af608SJussi Kivilinna 		.setkey		= xts_aesni_setkey,
1109023af608SJussi Kivilinna 		.encrypt	= xts_encrypt,
1110023af608SJussi Kivilinna 		.decrypt	= xts_decrypt,
111185671860SHerbert Xu 	}
111285671860SHerbert Xu };
111385671860SHerbert Xu 
11141c9fa294SColin Ian King static
111585671860SHerbert Xu struct simd_skcipher_alg *aesni_simd_skciphers[ARRAY_SIZE(aesni_skciphers)];
111685671860SHerbert Xu 
1117af05b300SHerbert Xu #ifdef CONFIG_X86_64
1118fd94fcf0SNathan Huckleberry /*
1119fd94fcf0SNathan Huckleberry  * XCTR does not have a non-AVX implementation, so it must be enabled
1120fd94fcf0SNathan Huckleberry  * conditionally.
1121fd94fcf0SNathan Huckleberry  */
1122fd94fcf0SNathan Huckleberry static struct skcipher_alg aesni_xctr = {
1123fd94fcf0SNathan Huckleberry 	.base = {
1124fd94fcf0SNathan Huckleberry 		.cra_name		= "__xctr(aes)",
1125fd94fcf0SNathan Huckleberry 		.cra_driver_name	= "__xctr-aes-aesni",
1126fd94fcf0SNathan Huckleberry 		.cra_priority		= 400,
1127fd94fcf0SNathan Huckleberry 		.cra_flags		= CRYPTO_ALG_INTERNAL,
1128fd94fcf0SNathan Huckleberry 		.cra_blocksize		= 1,
1129fd94fcf0SNathan Huckleberry 		.cra_ctxsize		= CRYPTO_AES_CTX_SIZE,
1130fd94fcf0SNathan Huckleberry 		.cra_module		= THIS_MODULE,
1131fd94fcf0SNathan Huckleberry 	},
1132fd94fcf0SNathan Huckleberry 	.min_keysize	= AES_MIN_KEY_SIZE,
1133fd94fcf0SNathan Huckleberry 	.max_keysize	= AES_MAX_KEY_SIZE,
1134fd94fcf0SNathan Huckleberry 	.ivsize		= AES_BLOCK_SIZE,
1135fd94fcf0SNathan Huckleberry 	.chunksize	= AES_BLOCK_SIZE,
1136fd94fcf0SNathan Huckleberry 	.setkey		= aesni_skcipher_setkey,
1137fd94fcf0SNathan Huckleberry 	.encrypt	= xctr_crypt,
1138fd94fcf0SNathan Huckleberry 	.decrypt	= xctr_crypt,
1139fd94fcf0SNathan Huckleberry };
1140fd94fcf0SNathan Huckleberry 
1141fd94fcf0SNathan Huckleberry static struct simd_skcipher_alg *aesni_simd_xctr;
1142fd94fcf0SNathan Huckleberry #endif /* CONFIG_X86_64 */
1143fd94fcf0SNathan Huckleberry 
1144fd94fcf0SNathan Huckleberry #ifdef CONFIG_X86_64
generic_gcmaes_set_key(struct crypto_aead * aead,const u8 * key,unsigned int key_len)1145cce2ea8dSSabrina Dubroca static int generic_gcmaes_set_key(struct crypto_aead *aead, const u8 *key,
1146cce2ea8dSSabrina Dubroca 				  unsigned int key_len)
1147cce2ea8dSSabrina Dubroca {
1148cce2ea8dSSabrina Dubroca 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(aead);
1149cce2ea8dSSabrina Dubroca 
1150*28b77609SEric Biggers 	return aes_set_key_common(&ctx->aes_key_expanded, key, key_len) ?:
1151cce2ea8dSSabrina Dubroca 	       rfc4106_set_hash_subkey(ctx->hash_subkey, key, key_len);
1152cce2ea8dSSabrina Dubroca }
1153cce2ea8dSSabrina Dubroca 
generic_gcmaes_encrypt(struct aead_request * req)1154cce2ea8dSSabrina Dubroca static int generic_gcmaes_encrypt(struct aead_request *req)
1155cce2ea8dSSabrina Dubroca {
1156cce2ea8dSSabrina Dubroca 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1157cce2ea8dSSabrina Dubroca 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1158cce2ea8dSSabrina Dubroca 	void *aes_ctx = &(ctx->aes_key_expanded);
1159a13ed1d1SArd Biesheuvel 	u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
1160a13ed1d1SArd Biesheuvel 	u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1161cce2ea8dSSabrina Dubroca 	__be32 counter = cpu_to_be32(1);
1162cce2ea8dSSabrina Dubroca 
1163cce2ea8dSSabrina Dubroca 	memcpy(iv, req->iv, 12);
1164cce2ea8dSSabrina Dubroca 	*((__be32 *)(iv+12)) = counter;
1165cce2ea8dSSabrina Dubroca 
1166cce2ea8dSSabrina Dubroca 	return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv,
1167cce2ea8dSSabrina Dubroca 			      aes_ctx);
1168cce2ea8dSSabrina Dubroca }
1169cce2ea8dSSabrina Dubroca 
generic_gcmaes_decrypt(struct aead_request * req)1170cce2ea8dSSabrina Dubroca static int generic_gcmaes_decrypt(struct aead_request *req)
1171cce2ea8dSSabrina Dubroca {
1172cce2ea8dSSabrina Dubroca 	__be32 counter = cpu_to_be32(1);
1173cce2ea8dSSabrina Dubroca 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1174106840c4SSabrina Dubroca 	struct generic_gcmaes_ctx *ctx = generic_gcmaes_ctx_get(tfm);
1175cce2ea8dSSabrina Dubroca 	void *aes_ctx = &(ctx->aes_key_expanded);
1176a13ed1d1SArd Biesheuvel 	u8 ivbuf[16 + (AESNI_ALIGN - 8)] __aligned(8);
1177a13ed1d1SArd Biesheuvel 	u8 *iv = PTR_ALIGN(&ivbuf[0], AESNI_ALIGN);
1178cce2ea8dSSabrina Dubroca 
1179cce2ea8dSSabrina Dubroca 	memcpy(iv, req->iv, 12);
1180cce2ea8dSSabrina Dubroca 	*((__be32 *)(iv+12)) = counter;
1181cce2ea8dSSabrina Dubroca 
1182cce2ea8dSSabrina Dubroca 	return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv,
1183cce2ea8dSSabrina Dubroca 			      aes_ctx);
1184cce2ea8dSSabrina Dubroca }
1185cce2ea8dSSabrina Dubroca 
1186149e1225SEric Biggers static struct aead_alg aesni_aeads[] = { {
1187b7c89d9eSHerbert Xu 	.setkey			= common_rfc4106_set_key,
1188b7c89d9eSHerbert Xu 	.setauthsize		= common_rfc4106_set_authsize,
1189b7c89d9eSHerbert Xu 	.encrypt		= helper_rfc4106_encrypt,
1190b7c89d9eSHerbert Xu 	.decrypt		= helper_rfc4106_decrypt,
119146d93748SCorentin LABBE 	.ivsize			= GCM_RFC4106_IV_SIZE,
1192b7c89d9eSHerbert Xu 	.maxauthsize		= 16,
1193b7c89d9eSHerbert Xu 	.base = {
1194149e1225SEric Biggers 		.cra_name		= "__rfc4106(gcm(aes))",
1195149e1225SEric Biggers 		.cra_driver_name	= "__rfc4106-gcm-aesni",
1196149e1225SEric Biggers 		.cra_priority		= 400,
1197b7c89d9eSHerbert Xu 		.cra_flags		= CRYPTO_ALG_INTERNAL,
1198b7c89d9eSHerbert Xu 		.cra_blocksize		= 1,
1199b7c89d9eSHerbert Xu 		.cra_ctxsize		= sizeof(struct aesni_rfc4106_gcm_ctx),
1200d480a26bSJakub Kicinski 		.cra_alignmask		= 0,
1201b7c89d9eSHerbert Xu 		.cra_module		= THIS_MODULE,
1202b7c89d9eSHerbert Xu 	},
1203b7c89d9eSHerbert Xu }, {
1204cce2ea8dSSabrina Dubroca 	.setkey			= generic_gcmaes_set_key,
1205cce2ea8dSSabrina Dubroca 	.setauthsize		= generic_gcmaes_set_authsize,
1206cce2ea8dSSabrina Dubroca 	.encrypt		= generic_gcmaes_encrypt,
1207cce2ea8dSSabrina Dubroca 	.decrypt		= generic_gcmaes_decrypt,
120846d93748SCorentin LABBE 	.ivsize			= GCM_AES_IV_SIZE,
1209cce2ea8dSSabrina Dubroca 	.maxauthsize		= 16,
1210cce2ea8dSSabrina Dubroca 	.base = {
1211149e1225SEric Biggers 		.cra_name		= "__gcm(aes)",
1212149e1225SEric Biggers 		.cra_driver_name	= "__generic-gcm-aesni",
1213149e1225SEric Biggers 		.cra_priority		= 400,
1214fc8517bfSSabrina Dubroca 		.cra_flags		= CRYPTO_ALG_INTERNAL,
1215fc8517bfSSabrina Dubroca 		.cra_blocksize		= 1,
1216fc8517bfSSabrina Dubroca 		.cra_ctxsize		= sizeof(struct generic_gcmaes_ctx),
1217d480a26bSJakub Kicinski 		.cra_alignmask		= 0,
1218fc8517bfSSabrina Dubroca 		.cra_module		= THIS_MODULE,
1219fc8517bfSSabrina Dubroca 	},
1220af05b300SHerbert Xu } };
1221af05b300SHerbert Xu #else
1222149e1225SEric Biggers static struct aead_alg aesni_aeads[0];
1223af05b300SHerbert Xu #endif
1224af05b300SHerbert Xu 
1225149e1225SEric Biggers static struct simd_aead_alg *aesni_simd_aeads[ARRAY_SIZE(aesni_aeads)];
12263bd391f0SAndi Kleen 
12273bd391f0SAndi Kleen static const struct x86_cpu_id aesni_cpu_id[] = {
1228f30cfacaSThomas Gleixner 	X86_MATCH_FEATURE(X86_FEATURE_AES, NULL),
12293bd391f0SAndi Kleen 	{}
12303bd391f0SAndi Kleen };
12313bd391f0SAndi Kleen MODULE_DEVICE_TABLE(x86cpu, aesni_cpu_id);
12323bd391f0SAndi Kleen 
aesni_init(void)123354b6a1bdSHuang Ying static int __init aesni_init(void)
123454b6a1bdSHuang Ying {
12357af6c245SJussi Kivilinna 	int err;
123654b6a1bdSHuang Ying 
12373bd391f0SAndi Kleen 	if (!x86_match_cpu(aesni_cpu_id))
123854b6a1bdSHuang Ying 		return -ENODEV;
12398610d7bfSAndy Shevchenko #ifdef CONFIG_X86_64
1240d764593aSTim Chen 	if (boot_cpu_has(X86_FEATURE_AVX2)) {
1241d764593aSTim Chen 		pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1242d6cbf4eaSArd Biesheuvel 		static_branch_enable(&gcm_use_avx);
1243d6cbf4eaSArd Biesheuvel 		static_branch_enable(&gcm_use_avx2);
1244d764593aSTim Chen 	} else
1245d764593aSTim Chen 	if (boot_cpu_has(X86_FEATURE_AVX)) {
1246d764593aSTim Chen 		pr_info("AVX version of gcm_enc/dec engaged.\n");
1247d6cbf4eaSArd Biesheuvel 		static_branch_enable(&gcm_use_avx);
124842251572SMasahiro Yamada 	} else {
1249d764593aSTim Chen 		pr_info("SSE version of gcm_enc/dec engaged.\n");
1250d764593aSTim Chen 	}
1251da154e82SBorislav Petkov 	if (boot_cpu_has(X86_FEATURE_AVX)) {
125222cddcc7Schandramouli narayanan 		/* optimize performance of ctr mode encryption transform */
125364a49b85SArd Biesheuvel 		static_call_update(aesni_ctr_enc_tfm, aesni_ctr_enc_avx_tfm);
125422cddcc7Schandramouli narayanan 		pr_info("AES CTR mode by8 optimization enabled\n");
125522cddcc7Schandramouli narayanan 	}
1256fd94fcf0SNathan Huckleberry #endif /* CONFIG_X86_64 */
12570bd82f5fSTadeusz Struk 
125807269559SEric Biggers 	err = crypto_register_alg(&aesni_cipher_alg);
1259af05b300SHerbert Xu 	if (err)
1260e0db9c48SEric Biggers 		return err;
1261af05b300SHerbert Xu 
12628b56d348SEric Biggers 	err = simd_register_skciphers_compat(aesni_skciphers,
12638b56d348SEric Biggers 					     ARRAY_SIZE(aesni_skciphers),
12648b56d348SEric Biggers 					     aesni_simd_skciphers);
1265af05b300SHerbert Xu 	if (err)
126607269559SEric Biggers 		goto unregister_cipher;
1267af05b300SHerbert Xu 
1268149e1225SEric Biggers 	err = simd_register_aeads_compat(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1269149e1225SEric Biggers 					 aesni_simd_aeads);
127085671860SHerbert Xu 	if (err)
127185671860SHerbert Xu 		goto unregister_skciphers;
1272af05b300SHerbert Xu 
1273fd94fcf0SNathan Huckleberry #ifdef CONFIG_X86_64
1274fd94fcf0SNathan Huckleberry 	if (boot_cpu_has(X86_FEATURE_AVX))
1275fd94fcf0SNathan Huckleberry 		err = simd_register_skciphers_compat(&aesni_xctr, 1,
1276fd94fcf0SNathan Huckleberry 						     &aesni_simd_xctr);
1277fd94fcf0SNathan Huckleberry 	if (err)
1278fd94fcf0SNathan Huckleberry 		goto unregister_aeads;
1279fd94fcf0SNathan Huckleberry #endif /* CONFIG_X86_64 */
1280fd94fcf0SNathan Huckleberry 
128185671860SHerbert Xu 	return 0;
128285671860SHerbert Xu 
1283fd94fcf0SNathan Huckleberry #ifdef CONFIG_X86_64
1284fd94fcf0SNathan Huckleberry unregister_aeads:
1285fd94fcf0SNathan Huckleberry 	simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1286fd94fcf0SNathan Huckleberry 				aesni_simd_aeads);
1287fd94fcf0SNathan Huckleberry #endif /* CONFIG_X86_64 */
1288fd94fcf0SNathan Huckleberry 
128985671860SHerbert Xu unregister_skciphers:
12908b56d348SEric Biggers 	simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
12918b56d348SEric Biggers 				  aesni_simd_skciphers);
129207269559SEric Biggers unregister_cipher:
129307269559SEric Biggers 	crypto_unregister_alg(&aesni_cipher_alg);
1294af05b300SHerbert Xu 	return err;
129554b6a1bdSHuang Ying }
129654b6a1bdSHuang Ying 
aesni_exit(void)129754b6a1bdSHuang Ying static void __exit aesni_exit(void)
129854b6a1bdSHuang Ying {
1299149e1225SEric Biggers 	simd_unregister_aeads(aesni_aeads, ARRAY_SIZE(aesni_aeads),
1300149e1225SEric Biggers 			      aesni_simd_aeads);
13018b56d348SEric Biggers 	simd_unregister_skciphers(aesni_skciphers, ARRAY_SIZE(aesni_skciphers),
13028b56d348SEric Biggers 				  aesni_simd_skciphers);
130307269559SEric Biggers 	crypto_unregister_alg(&aesni_cipher_alg);
1304fd94fcf0SNathan Huckleberry #ifdef CONFIG_X86_64
1305fd94fcf0SNathan Huckleberry 	if (boot_cpu_has(X86_FEATURE_AVX))
1306fd94fcf0SNathan Huckleberry 		simd_unregister_skciphers(&aesni_xctr, 1, &aesni_simd_xctr);
1307fd94fcf0SNathan Huckleberry #endif /* CONFIG_X86_64 */
130854b6a1bdSHuang Ying }
130954b6a1bdSHuang Ying 
13100fbafd06STadeusz Struk late_initcall(aesni_init);
131154b6a1bdSHuang Ying module_exit(aesni_exit);
131254b6a1bdSHuang Ying 
131354b6a1bdSHuang Ying MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
131454b6a1bdSHuang Ying MODULE_LICENSE("GPL");
13155d26a105SKees Cook MODULE_ALIAS_CRYPTO("aes");
1316