1 /*
2  * Glue Code for AVX assembler versions of Serpent Cipher
3  *
4  * Copyright (C) 2012 Johannes Goetzfried
5  *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6  *
7  * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation; either version 2 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the GNU General Public License
20  * along with this program; if not, write to the Free Software
21  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
22  * USA
23  *
24  */
25 
26 #include <linux/module.h>
27 #include <linux/types.h>
28 #include <linux/crypto.h>
29 #include <linux/err.h>
30 #include <crypto/algapi.h>
31 #include <crypto/internal/simd.h>
32 #include <crypto/serpent.h>
33 #include <crypto/xts.h>
34 #include <asm/crypto/glue_helper.h>
35 #include <asm/crypto/serpent-avx.h>
36 
37 /* 8-way parallel cipher functions */
38 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
39 					 const u8 *src);
40 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
41 
42 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
43 					 const u8 *src);
44 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
45 
46 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
47 					 const u8 *src);
48 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
49 
50 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
51 				     const u8 *src, le128 *iv);
52 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
53 
54 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
55 					 const u8 *src, le128 *iv);
56 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
57 
58 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
59 					 const u8 *src, le128 *iv);
60 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
61 
62 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
63 {
64 	be128 ctrblk;
65 
66 	le128_to_be128(&ctrblk, iv);
67 	le128_inc(iv);
68 
69 	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
70 	u128_xor(dst, src, (u128 *)&ctrblk);
71 }
72 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
73 
74 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
75 {
76 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
77 				  GLUE_FUNC_CAST(__serpent_encrypt));
78 }
79 EXPORT_SYMBOL_GPL(serpent_xts_enc);
80 
81 void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
82 {
83 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
84 				  GLUE_FUNC_CAST(__serpent_decrypt));
85 }
86 EXPORT_SYMBOL_GPL(serpent_xts_dec);
87 
88 static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
89 				   const u8 *key, unsigned int keylen)
90 {
91 	return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen);
92 }
93 
94 int xts_serpent_setkey(struct crypto_skcipher *tfm, const u8 *key,
95 		       unsigned int keylen)
96 {
97 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
98 	int err;
99 
100 	err = xts_verify_key(tfm, key, keylen);
101 	if (err)
102 		return err;
103 
104 	/* first half of xts-key is for crypt */
105 	err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
106 	if (err)
107 		return err;
108 
109 	/* second half of xts-key is for tweak */
110 	return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
111 }
112 EXPORT_SYMBOL_GPL(xts_serpent_setkey);
113 
114 static const struct common_glue_ctx serpent_enc = {
115 	.num_funcs = 2,
116 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
117 
118 	.funcs = { {
119 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
120 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
121 	}, {
122 		.num_blocks = 1,
123 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
124 	} }
125 };
126 
127 static const struct common_glue_ctx serpent_ctr = {
128 	.num_funcs = 2,
129 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
130 
131 	.funcs = { {
132 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
133 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
134 	}, {
135 		.num_blocks = 1,
136 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
137 	} }
138 };
139 
140 static const struct common_glue_ctx serpent_enc_xts = {
141 	.num_funcs = 2,
142 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
143 
144 	.funcs = { {
145 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
146 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
147 	}, {
148 		.num_blocks = 1,
149 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
150 	} }
151 };
152 
153 static const struct common_glue_ctx serpent_dec = {
154 	.num_funcs = 2,
155 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
156 
157 	.funcs = { {
158 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
159 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
160 	}, {
161 		.num_blocks = 1,
162 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
163 	} }
164 };
165 
166 static const struct common_glue_ctx serpent_dec_cbc = {
167 	.num_funcs = 2,
168 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
169 
170 	.funcs = { {
171 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
172 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
173 	}, {
174 		.num_blocks = 1,
175 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
176 	} }
177 };
178 
179 static const struct common_glue_ctx serpent_dec_xts = {
180 	.num_funcs = 2,
181 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
182 
183 	.funcs = { {
184 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
185 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
186 	}, {
187 		.num_blocks = 1,
188 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
189 	} }
190 };
191 
192 static int ecb_encrypt(struct skcipher_request *req)
193 {
194 	return glue_ecb_req_128bit(&serpent_enc, req);
195 }
196 
197 static int ecb_decrypt(struct skcipher_request *req)
198 {
199 	return glue_ecb_req_128bit(&serpent_dec, req);
200 }
201 
202 static int cbc_encrypt(struct skcipher_request *req)
203 {
204 	return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__serpent_encrypt),
205 					   req);
206 }
207 
208 static int cbc_decrypt(struct skcipher_request *req)
209 {
210 	return glue_cbc_decrypt_req_128bit(&serpent_dec_cbc, req);
211 }
212 
213 static int ctr_crypt(struct skcipher_request *req)
214 {
215 	return glue_ctr_req_128bit(&serpent_ctr, req);
216 }
217 
218 static int xts_encrypt(struct skcipher_request *req)
219 {
220 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
221 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
222 
223 	return glue_xts_req_128bit(&serpent_enc_xts, req,
224 				   XTS_TWEAK_CAST(__serpent_encrypt),
225 				   &ctx->tweak_ctx, &ctx->crypt_ctx);
226 }
227 
228 static int xts_decrypt(struct skcipher_request *req)
229 {
230 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
231 	struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
232 
233 	return glue_xts_req_128bit(&serpent_dec_xts, req,
234 				   XTS_TWEAK_CAST(__serpent_encrypt),
235 				   &ctx->tweak_ctx, &ctx->crypt_ctx);
236 }
237 
238 static struct skcipher_alg serpent_algs[] = {
239 	{
240 		.base.cra_name		= "__ecb(serpent)",
241 		.base.cra_driver_name	= "__ecb-serpent-avx",
242 		.base.cra_priority	= 500,
243 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
244 		.base.cra_blocksize	= SERPENT_BLOCK_SIZE,
245 		.base.cra_ctxsize	= sizeof(struct serpent_ctx),
246 		.base.cra_module	= THIS_MODULE,
247 		.min_keysize		= SERPENT_MIN_KEY_SIZE,
248 		.max_keysize		= SERPENT_MAX_KEY_SIZE,
249 		.setkey			= serpent_setkey_skcipher,
250 		.encrypt		= ecb_encrypt,
251 		.decrypt		= ecb_decrypt,
252 	}, {
253 		.base.cra_name		= "__cbc(serpent)",
254 		.base.cra_driver_name	= "__cbc-serpent-avx",
255 		.base.cra_priority	= 500,
256 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
257 		.base.cra_blocksize	= SERPENT_BLOCK_SIZE,
258 		.base.cra_ctxsize	= sizeof(struct serpent_ctx),
259 		.base.cra_module	= THIS_MODULE,
260 		.min_keysize		= SERPENT_MIN_KEY_SIZE,
261 		.max_keysize		= SERPENT_MAX_KEY_SIZE,
262 		.ivsize			= SERPENT_BLOCK_SIZE,
263 		.setkey			= serpent_setkey_skcipher,
264 		.encrypt		= cbc_encrypt,
265 		.decrypt		= cbc_decrypt,
266 	}, {
267 		.base.cra_name		= "__ctr(serpent)",
268 		.base.cra_driver_name	= "__ctr-serpent-avx",
269 		.base.cra_priority	= 500,
270 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
271 		.base.cra_blocksize	= 1,
272 		.base.cra_ctxsize	= sizeof(struct serpent_ctx),
273 		.base.cra_module	= THIS_MODULE,
274 		.min_keysize		= SERPENT_MIN_KEY_SIZE,
275 		.max_keysize		= SERPENT_MAX_KEY_SIZE,
276 		.ivsize			= SERPENT_BLOCK_SIZE,
277 		.chunksize		= SERPENT_BLOCK_SIZE,
278 		.setkey			= serpent_setkey_skcipher,
279 		.encrypt		= ctr_crypt,
280 		.decrypt		= ctr_crypt,
281 	}, {
282 		.base.cra_name		= "__xts(serpent)",
283 		.base.cra_driver_name	= "__xts-serpent-avx",
284 		.base.cra_priority	= 500,
285 		.base.cra_flags		= CRYPTO_ALG_INTERNAL,
286 		.base.cra_blocksize	= SERPENT_BLOCK_SIZE,
287 		.base.cra_ctxsize	= sizeof(struct serpent_xts_ctx),
288 		.base.cra_module	= THIS_MODULE,
289 		.min_keysize		= 2 * SERPENT_MIN_KEY_SIZE,
290 		.max_keysize		= 2 * SERPENT_MAX_KEY_SIZE,
291 		.ivsize			= SERPENT_BLOCK_SIZE,
292 		.setkey			= xts_serpent_setkey,
293 		.encrypt		= xts_encrypt,
294 		.decrypt		= xts_decrypt,
295 	},
296 };
297 
298 static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
299 
300 static int __init serpent_init(void)
301 {
302 	const char *feature_name;
303 
304 	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
305 				&feature_name)) {
306 		pr_info("CPU feature '%s' is not supported.\n", feature_name);
307 		return -ENODEV;
308 	}
309 
310 	return simd_register_skciphers_compat(serpent_algs,
311 					      ARRAY_SIZE(serpent_algs),
312 					      serpent_simd_algs);
313 }
314 
315 static void __exit serpent_exit(void)
316 {
317 	simd_unregister_skciphers(serpent_algs, ARRAY_SIZE(serpent_algs),
318 				  serpent_simd_algs);
319 }
320 
321 module_init(serpent_init);
322 module_exit(serpent_exit);
323 
324 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
325 MODULE_LICENSE("GPL");
326 MODULE_ALIAS_CRYPTO("serpent");
327