xref: /openbmc/linux/arch/x86/crypto/aria_aesni_avx_glue.c (revision 37d8d3ae7a58cb16fa3f4f1992d2ee36bc621438)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Glue Code for the AVX/AES-NI/GFNI assembler implementation of the ARIA Cipher
4  *
5  * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <crypto/internal/simd.h>
10 #include <crypto/aria.h>
11 #include <linux/crypto.h>
12 #include <linux/err.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 
16 #include "ecb_cbc_helpers.h"
17 #include "aria-avx.h"
18 
19 asmlinkage void aria_aesni_avx_encrypt_16way(const void *ctx, u8 *dst,
20 					     const u8 *src);
21 EXPORT_SYMBOL_GPL(aria_aesni_avx_encrypt_16way);
22 asmlinkage void aria_aesni_avx_decrypt_16way(const void *ctx, u8 *dst,
23 					     const u8 *src);
24 EXPORT_SYMBOL_GPL(aria_aesni_avx_decrypt_16way);
25 asmlinkage void aria_aesni_avx_ctr_crypt_16way(const void *ctx, u8 *dst,
26 					       const u8 *src,
27 					       u8 *keystream, u8 *iv);
28 EXPORT_SYMBOL_GPL(aria_aesni_avx_ctr_crypt_16way);
29 asmlinkage void aria_aesni_avx_gfni_encrypt_16way(const void *ctx, u8 *dst,
30 						  const u8 *src);
31 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_encrypt_16way);
32 asmlinkage void aria_aesni_avx_gfni_decrypt_16way(const void *ctx, u8 *dst,
33 						  const u8 *src);
34 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_decrypt_16way);
35 asmlinkage void aria_aesni_avx_gfni_ctr_crypt_16way(const void *ctx, u8 *dst,
36 						    const u8 *src,
37 						    u8 *keystream, u8 *iv);
38 EXPORT_SYMBOL_GPL(aria_aesni_avx_gfni_ctr_crypt_16way);
39 
40 static struct aria_avx_ops aria_ops;
41 
42 struct aria_avx_request_ctx {
43 	u8 keystream[ARIA_AESNI_PARALLEL_BLOCK_SIZE];
44 };
45 
46 static int ecb_do_encrypt(struct skcipher_request *req, const u32 *rkey)
47 {
48 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
49 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_encrypt_16way);
50 	ECB_BLOCK(1, aria_encrypt);
51 	ECB_WALK_END();
52 }
53 
54 static int ecb_do_decrypt(struct skcipher_request *req, const u32 *rkey)
55 {
56 	ECB_WALK_START(req, ARIA_BLOCK_SIZE, ARIA_AESNI_PARALLEL_BLOCKS);
57 	ECB_BLOCK(ARIA_AESNI_PARALLEL_BLOCKS, aria_ops.aria_decrypt_16way);
58 	ECB_BLOCK(1, aria_decrypt);
59 	ECB_WALK_END();
60 }
61 
62 static int aria_avx_ecb_encrypt(struct skcipher_request *req)
63 {
64 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
65 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
66 
67 	return ecb_do_encrypt(req, ctx->enc_key[0]);
68 }
69 
70 static int aria_avx_ecb_decrypt(struct skcipher_request *req)
71 {
72 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
73 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
74 
75 	return ecb_do_decrypt(req, ctx->dec_key[0]);
76 }
77 
78 static int aria_avx_set_key(struct crypto_skcipher *tfm, const u8 *key,
79 			    unsigned int keylen)
80 {
81 	return aria_set_key(&tfm->base, key, keylen);
82 }
83 
84 static int aria_avx_ctr_encrypt(struct skcipher_request *req)
85 {
86 	struct aria_avx_request_ctx *req_ctx = skcipher_request_ctx(req);
87 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
88 	struct aria_ctx *ctx = crypto_skcipher_ctx(tfm);
89 	struct skcipher_walk walk;
90 	unsigned int nbytes;
91 	int err;
92 
93 	err = skcipher_walk_virt(&walk, req, false);
94 
95 	while ((nbytes = walk.nbytes) > 0) {
96 		const u8 *src = walk.src.virt.addr;
97 		u8 *dst = walk.dst.virt.addr;
98 
99 		while (nbytes >= ARIA_AESNI_PARALLEL_BLOCK_SIZE) {
100 			kernel_fpu_begin();
101 			aria_ops.aria_ctr_crypt_16way(ctx, dst, src,
102 						      &req_ctx->keystream[0],
103 						      walk.iv);
104 			kernel_fpu_end();
105 			dst += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
106 			src += ARIA_AESNI_PARALLEL_BLOCK_SIZE;
107 			nbytes -= ARIA_AESNI_PARALLEL_BLOCK_SIZE;
108 		}
109 
110 		while (nbytes >= ARIA_BLOCK_SIZE) {
111 			memcpy(&req_ctx->keystream[0], walk.iv, ARIA_BLOCK_SIZE);
112 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
113 
114 			aria_encrypt(ctx, &req_ctx->keystream[0],
115 				     &req_ctx->keystream[0]);
116 
117 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
118 				       ARIA_BLOCK_SIZE);
119 			dst += ARIA_BLOCK_SIZE;
120 			src += ARIA_BLOCK_SIZE;
121 			nbytes -= ARIA_BLOCK_SIZE;
122 		}
123 
124 		if (walk.nbytes == walk.total && nbytes > 0) {
125 			memcpy(&req_ctx->keystream[0], walk.iv,
126 			       ARIA_BLOCK_SIZE);
127 			crypto_inc(walk.iv, ARIA_BLOCK_SIZE);
128 
129 			aria_encrypt(ctx, &req_ctx->keystream[0],
130 				     &req_ctx->keystream[0]);
131 
132 			crypto_xor_cpy(dst, src, &req_ctx->keystream[0],
133 				       nbytes);
134 			dst += nbytes;
135 			src += nbytes;
136 			nbytes = 0;
137 		}
138 		err = skcipher_walk_done(&walk, nbytes);
139 	}
140 
141 	return err;
142 }
143 
144 static int aria_avx_init_tfm(struct crypto_skcipher *tfm)
145 {
146 	crypto_skcipher_set_reqsize(tfm, sizeof(struct aria_avx_request_ctx));
147 
148 	return 0;
149 }
150 
151 static struct skcipher_alg aria_algs[] = {
152 	{
153 		.base.cra_name		= "__ecb(aria)",
154 		.base.cra_driver_name	= "__ecb-aria-avx",
155 		.base.cra_priority	= 400,
156 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
157 		.base.cra_blocksize	= ARIA_BLOCK_SIZE,
158 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
159 		.base.cra_module	= THIS_MODULE,
160 		.min_keysize		= ARIA_MIN_KEY_SIZE,
161 		.max_keysize		= ARIA_MAX_KEY_SIZE,
162 		.setkey			= aria_avx_set_key,
163 		.encrypt		= aria_avx_ecb_encrypt,
164 		.decrypt		= aria_avx_ecb_decrypt,
165 	}, {
166 		.base.cra_name		= "__ctr(aria)",
167 		.base.cra_driver_name	= "__ctr-aria-avx",
168 		.base.cra_priority	= 400,
169 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
170 		.base.cra_blocksize	= 1,
171 		.base.cra_ctxsize	= sizeof(struct aria_ctx),
172 		.base.cra_module	= THIS_MODULE,
173 		.min_keysize		= ARIA_MIN_KEY_SIZE,
174 		.max_keysize		= ARIA_MAX_KEY_SIZE,
175 		.ivsize			= ARIA_BLOCK_SIZE,
176 		.chunksize		= ARIA_BLOCK_SIZE,
177 		.walksize		= 16 * ARIA_BLOCK_SIZE,
178 		.setkey			= aria_avx_set_key,
179 		.encrypt		= aria_avx_ctr_encrypt,
180 		.decrypt		= aria_avx_ctr_encrypt,
181 		.init			= aria_avx_init_tfm,
182 	}
183 };
184 
185 static struct simd_skcipher_alg *aria_simd_algs[ARRAY_SIZE(aria_algs)];
186 
187 static int __init aria_avx_init(void)
188 {
189 	const char *feature_name;
190 
191 	if (!boot_cpu_has(X86_FEATURE_AVX) ||
192 	    !boot_cpu_has(X86_FEATURE_AES) ||
193 	    !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
194 		pr_info("AVX or AES-NI instructions are not detected.\n");
195 		return -ENODEV;
196 	}
197 
198 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
199 				&feature_name)) {
200 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
201 		return -ENODEV;
202 	}
203 
204 	if (boot_cpu_has(X86_FEATURE_GFNI)) {
205 		aria_ops.aria_encrypt_16way = aria_aesni_avx_gfni_encrypt_16way;
206 		aria_ops.aria_decrypt_16way = aria_aesni_avx_gfni_decrypt_16way;
207 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_gfni_ctr_crypt_16way;
208 	} else {
209 		aria_ops.aria_encrypt_16way = aria_aesni_avx_encrypt_16way;
210 		aria_ops.aria_decrypt_16way = aria_aesni_avx_decrypt_16way;
211 		aria_ops.aria_ctr_crypt_16way = aria_aesni_avx_ctr_crypt_16way;
212 	}
213 
214 	return simd_register_skciphers_compat(aria_algs,
215 					      ARRAY_SIZE(aria_algs),
216 					      aria_simd_algs);
217 }
218 
219 static void __exit aria_avx_exit(void)
220 {
221 	simd_unregister_skciphers(aria_algs, ARRAY_SIZE(aria_algs),
222 				  aria_simd_algs);
223 }
224 
225 module_init(aria_avx_init);
226 module_exit(aria_avx_exit);
227 
228 MODULE_LICENSE("GPL");
229 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>");
230 MODULE_DESCRIPTION("ARIA Cipher Algorithm, AVX/AES-NI/GFNI optimized");
231 MODULE_ALIAS_CRYPTO("aria");
232 MODULE_ALIAS_CRYPTO("aria-aesni-avx");
233