1 /*
2  * Glue Code for AVX assembler versions of Serpent Cipher
3  *
4  * Copyright (C) 2012 Johannes Goetzfried
5  *     <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
6  *
7  * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
8  *
9  * This program is free software; you can redistribute it and/or modify
10  * it under the terms of the GNU General Public License as published by
11  * the Free Software Foundation; either version 2 of the License, or
12  * (at your option) any later version.
13  *
14  * This program is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
17  * GNU General Public License for more details.
18  *
19  * You should have received a copy of the GNU General Public License
20  * along with this program; if not, write to the Free Software
21  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307
22  * USA
23  *
24  */
25 
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/ablk_helper.h>
32 #include <crypto/algapi.h>
33 #include <crypto/serpent.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
39 #include <asm/xcr.h>
40 #include <asm/fpu/xstate.h>
41 #include <asm/crypto/serpent-avx.h>
42 #include <asm/crypto/glue_helper.h>
43 
44 /* 8-way parallel cipher functions */
45 asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
46 					 const u8 *src);
47 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
48 
49 asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
50 					 const u8 *src);
51 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
52 
53 asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
54 					 const u8 *src);
55 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
56 
57 asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
58 				     const u8 *src, le128 *iv);
59 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
60 
61 asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
62 					 const u8 *src, le128 *iv);
63 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
64 
65 asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
66 					 const u8 *src, le128 *iv);
67 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
68 
69 void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
70 {
71 	be128 ctrblk;
72 
73 	le128_to_be128(&ctrblk, iv);
74 	le128_inc(iv);
75 
76 	__serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
77 	u128_xor(dst, src, (u128 *)&ctrblk);
78 }
79 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
80 
81 void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
82 {
83 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
84 				  GLUE_FUNC_CAST(__serpent_encrypt));
85 }
86 EXPORT_SYMBOL_GPL(serpent_xts_enc);
87 
88 void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
89 {
90 	glue_xts_crypt_128bit_one(ctx, dst, src, iv,
91 				  GLUE_FUNC_CAST(__serpent_decrypt));
92 }
93 EXPORT_SYMBOL_GPL(serpent_xts_dec);
94 
95 
96 static const struct common_glue_ctx serpent_enc = {
97 	.num_funcs = 2,
98 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
99 
100 	.funcs = { {
101 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
102 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
103 	}, {
104 		.num_blocks = 1,
105 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
106 	} }
107 };
108 
109 static const struct common_glue_ctx serpent_ctr = {
110 	.num_funcs = 2,
111 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
112 
113 	.funcs = { {
114 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
115 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
116 	}, {
117 		.num_blocks = 1,
118 		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
119 	} }
120 };
121 
122 static const struct common_glue_ctx serpent_enc_xts = {
123 	.num_funcs = 2,
124 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
125 
126 	.funcs = { {
127 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
128 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
129 	}, {
130 		.num_blocks = 1,
131 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
132 	} }
133 };
134 
135 static const struct common_glue_ctx serpent_dec = {
136 	.num_funcs = 2,
137 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
138 
139 	.funcs = { {
140 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
141 		.fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
142 	}, {
143 		.num_blocks = 1,
144 		.fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
145 	} }
146 };
147 
148 static const struct common_glue_ctx serpent_dec_cbc = {
149 	.num_funcs = 2,
150 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
151 
152 	.funcs = { {
153 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
154 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
155 	}, {
156 		.num_blocks = 1,
157 		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
158 	} }
159 };
160 
161 static const struct common_glue_ctx serpent_dec_xts = {
162 	.num_funcs = 2,
163 	.fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
164 
165 	.funcs = { {
166 		.num_blocks = SERPENT_PARALLEL_BLOCKS,
167 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
168 	}, {
169 		.num_blocks = 1,
170 		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
171 	} }
172 };
173 
174 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
175 		       struct scatterlist *src, unsigned int nbytes)
176 {
177 	return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
178 }
179 
180 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
181 		       struct scatterlist *src, unsigned int nbytes)
182 {
183 	return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
184 }
185 
186 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
187 		       struct scatterlist *src, unsigned int nbytes)
188 {
189 	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
190 				     dst, src, nbytes);
191 }
192 
193 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
194 		       struct scatterlist *src, unsigned int nbytes)
195 {
196 	return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
197 				       nbytes);
198 }
199 
200 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
201 		     struct scatterlist *src, unsigned int nbytes)
202 {
203 	return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
204 }
205 
206 static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
207 {
208 	return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
209 			      NULL, fpu_enabled, nbytes);
210 }
211 
212 static inline void serpent_fpu_end(bool fpu_enabled)
213 {
214 	glue_fpu_end(fpu_enabled);
215 }
216 
217 struct crypt_priv {
218 	struct serpent_ctx *ctx;
219 	bool fpu_enabled;
220 };
221 
222 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
223 {
224 	const unsigned int bsize = SERPENT_BLOCK_SIZE;
225 	struct crypt_priv *ctx = priv;
226 	int i;
227 
228 	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
229 
230 	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
231 		serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
232 		return;
233 	}
234 
235 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
236 		__serpent_encrypt(ctx->ctx, srcdst, srcdst);
237 }
238 
239 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
240 {
241 	const unsigned int bsize = SERPENT_BLOCK_SIZE;
242 	struct crypt_priv *ctx = priv;
243 	int i;
244 
245 	ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
246 
247 	if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
248 		serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
249 		return;
250 	}
251 
252 	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
253 		__serpent_decrypt(ctx->ctx, srcdst, srcdst);
254 }
255 
256 int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
257 		       unsigned int keylen)
258 {
259 	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
260 	int err;
261 
262 	err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
263 							SERPENT_BLOCK_SIZE);
264 	if (err)
265 		return err;
266 
267 	return lrw_init_table(&ctx->lrw_table, key + keylen -
268 						SERPENT_BLOCK_SIZE);
269 }
270 EXPORT_SYMBOL_GPL(lrw_serpent_setkey);
271 
272 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
273 		       struct scatterlist *src, unsigned int nbytes)
274 {
275 	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
276 	be128 buf[SERPENT_PARALLEL_BLOCKS];
277 	struct crypt_priv crypt_ctx = {
278 		.ctx = &ctx->serpent_ctx,
279 		.fpu_enabled = false,
280 	};
281 	struct lrw_crypt_req req = {
282 		.tbuf = buf,
283 		.tbuflen = sizeof(buf),
284 
285 		.table_ctx = &ctx->lrw_table,
286 		.crypt_ctx = &crypt_ctx,
287 		.crypt_fn = encrypt_callback,
288 	};
289 	int ret;
290 
291 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
292 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
293 	serpent_fpu_end(crypt_ctx.fpu_enabled);
294 
295 	return ret;
296 }
297 
298 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
299 		       struct scatterlist *src, unsigned int nbytes)
300 {
301 	struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
302 	be128 buf[SERPENT_PARALLEL_BLOCKS];
303 	struct crypt_priv crypt_ctx = {
304 		.ctx = &ctx->serpent_ctx,
305 		.fpu_enabled = false,
306 	};
307 	struct lrw_crypt_req req = {
308 		.tbuf = buf,
309 		.tbuflen = sizeof(buf),
310 
311 		.table_ctx = &ctx->lrw_table,
312 		.crypt_ctx = &crypt_ctx,
313 		.crypt_fn = decrypt_callback,
314 	};
315 	int ret;
316 
317 	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
318 	ret = lrw_crypt(desc, dst, src, nbytes, &req);
319 	serpent_fpu_end(crypt_ctx.fpu_enabled);
320 
321 	return ret;
322 }
323 
324 void lrw_serpent_exit_tfm(struct crypto_tfm *tfm)
325 {
326 	struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
327 
328 	lrw_free_table(&ctx->lrw_table);
329 }
330 EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm);
331 
332 int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
333 		       unsigned int keylen)
334 {
335 	struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
336 	u32 *flags = &tfm->crt_flags;
337 	int err;
338 
339 	/* key consists of keys of equal size concatenated, therefore
340 	 * the length must be even
341 	 */
342 	if (keylen % 2) {
343 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
344 		return -EINVAL;
345 	}
346 
347 	/* first half of xts-key is for crypt */
348 	err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
349 	if (err)
350 		return err;
351 
352 	/* second half of xts-key is for tweak */
353 	return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
354 }
355 EXPORT_SYMBOL_GPL(xts_serpent_setkey);
356 
357 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
358 		       struct scatterlist *src, unsigned int nbytes)
359 {
360 	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
361 
362 	return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
363 				     XTS_TWEAK_CAST(__serpent_encrypt),
364 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
365 }
366 
367 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
368 		       struct scatterlist *src, unsigned int nbytes)
369 {
370 	struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
371 
372 	return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
373 				     XTS_TWEAK_CAST(__serpent_encrypt),
374 				     &ctx->tweak_ctx, &ctx->crypt_ctx);
375 }
376 
377 static struct crypto_alg serpent_algs[10] = { {
378 	.cra_name		= "__ecb-serpent-avx",
379 	.cra_driver_name	= "__driver-ecb-serpent-avx",
380 	.cra_priority		= 0,
381 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
382 				  CRYPTO_ALG_INTERNAL,
383 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
384 	.cra_ctxsize		= sizeof(struct serpent_ctx),
385 	.cra_alignmask		= 0,
386 	.cra_type		= &crypto_blkcipher_type,
387 	.cra_module		= THIS_MODULE,
388 	.cra_u = {
389 		.blkcipher = {
390 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
391 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
392 			.setkey		= serpent_setkey,
393 			.encrypt	= ecb_encrypt,
394 			.decrypt	= ecb_decrypt,
395 		},
396 	},
397 }, {
398 	.cra_name		= "__cbc-serpent-avx",
399 	.cra_driver_name	= "__driver-cbc-serpent-avx",
400 	.cra_priority		= 0,
401 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
402 				  CRYPTO_ALG_INTERNAL,
403 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
404 	.cra_ctxsize		= sizeof(struct serpent_ctx),
405 	.cra_alignmask		= 0,
406 	.cra_type		= &crypto_blkcipher_type,
407 	.cra_module		= THIS_MODULE,
408 	.cra_u = {
409 		.blkcipher = {
410 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
411 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
412 			.setkey		= serpent_setkey,
413 			.encrypt	= cbc_encrypt,
414 			.decrypt	= cbc_decrypt,
415 		},
416 	},
417 }, {
418 	.cra_name		= "__ctr-serpent-avx",
419 	.cra_driver_name	= "__driver-ctr-serpent-avx",
420 	.cra_priority		= 0,
421 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
422 				  CRYPTO_ALG_INTERNAL,
423 	.cra_blocksize		= 1,
424 	.cra_ctxsize		= sizeof(struct serpent_ctx),
425 	.cra_alignmask		= 0,
426 	.cra_type		= &crypto_blkcipher_type,
427 	.cra_module		= THIS_MODULE,
428 	.cra_u = {
429 		.blkcipher = {
430 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
431 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
432 			.ivsize		= SERPENT_BLOCK_SIZE,
433 			.setkey		= serpent_setkey,
434 			.encrypt	= ctr_crypt,
435 			.decrypt	= ctr_crypt,
436 		},
437 	},
438 }, {
439 	.cra_name		= "__lrw-serpent-avx",
440 	.cra_driver_name	= "__driver-lrw-serpent-avx",
441 	.cra_priority		= 0,
442 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
443 				  CRYPTO_ALG_INTERNAL,
444 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
445 	.cra_ctxsize		= sizeof(struct serpent_lrw_ctx),
446 	.cra_alignmask		= 0,
447 	.cra_type		= &crypto_blkcipher_type,
448 	.cra_module		= THIS_MODULE,
449 	.cra_exit		= lrw_serpent_exit_tfm,
450 	.cra_u = {
451 		.blkcipher = {
452 			.min_keysize	= SERPENT_MIN_KEY_SIZE +
453 					  SERPENT_BLOCK_SIZE,
454 			.max_keysize	= SERPENT_MAX_KEY_SIZE +
455 					  SERPENT_BLOCK_SIZE,
456 			.ivsize		= SERPENT_BLOCK_SIZE,
457 			.setkey		= lrw_serpent_setkey,
458 			.encrypt	= lrw_encrypt,
459 			.decrypt	= lrw_decrypt,
460 		},
461 	},
462 }, {
463 	.cra_name		= "__xts-serpent-avx",
464 	.cra_driver_name	= "__driver-xts-serpent-avx",
465 	.cra_priority		= 0,
466 	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER |
467 				  CRYPTO_ALG_INTERNAL,
468 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
469 	.cra_ctxsize		= sizeof(struct serpent_xts_ctx),
470 	.cra_alignmask		= 0,
471 	.cra_type		= &crypto_blkcipher_type,
472 	.cra_module		= THIS_MODULE,
473 	.cra_u = {
474 		.blkcipher = {
475 			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
476 			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
477 			.ivsize		= SERPENT_BLOCK_SIZE,
478 			.setkey		= xts_serpent_setkey,
479 			.encrypt	= xts_encrypt,
480 			.decrypt	= xts_decrypt,
481 		},
482 	},
483 }, {
484 	.cra_name		= "ecb(serpent)",
485 	.cra_driver_name	= "ecb-serpent-avx",
486 	.cra_priority		= 500,
487 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
488 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
489 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
490 	.cra_alignmask		= 0,
491 	.cra_type		= &crypto_ablkcipher_type,
492 	.cra_module		= THIS_MODULE,
493 	.cra_init		= ablk_init,
494 	.cra_exit		= ablk_exit,
495 	.cra_u = {
496 		.ablkcipher = {
497 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
498 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
499 			.setkey		= ablk_set_key,
500 			.encrypt	= ablk_encrypt,
501 			.decrypt	= ablk_decrypt,
502 		},
503 	},
504 }, {
505 	.cra_name		= "cbc(serpent)",
506 	.cra_driver_name	= "cbc-serpent-avx",
507 	.cra_priority		= 500,
508 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
509 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
510 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
511 	.cra_alignmask		= 0,
512 	.cra_type		= &crypto_ablkcipher_type,
513 	.cra_module		= THIS_MODULE,
514 	.cra_init		= ablk_init,
515 	.cra_exit		= ablk_exit,
516 	.cra_u = {
517 		.ablkcipher = {
518 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
519 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
520 			.ivsize		= SERPENT_BLOCK_SIZE,
521 			.setkey		= ablk_set_key,
522 			.encrypt	= __ablk_encrypt,
523 			.decrypt	= ablk_decrypt,
524 		},
525 	},
526 }, {
527 	.cra_name		= "ctr(serpent)",
528 	.cra_driver_name	= "ctr-serpent-avx",
529 	.cra_priority		= 500,
530 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
531 	.cra_blocksize		= 1,
532 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
533 	.cra_alignmask		= 0,
534 	.cra_type		= &crypto_ablkcipher_type,
535 	.cra_module		= THIS_MODULE,
536 	.cra_init		= ablk_init,
537 	.cra_exit		= ablk_exit,
538 	.cra_u = {
539 		.ablkcipher = {
540 			.min_keysize	= SERPENT_MIN_KEY_SIZE,
541 			.max_keysize	= SERPENT_MAX_KEY_SIZE,
542 			.ivsize		= SERPENT_BLOCK_SIZE,
543 			.setkey		= ablk_set_key,
544 			.encrypt	= ablk_encrypt,
545 			.decrypt	= ablk_encrypt,
546 			.geniv		= "chainiv",
547 		},
548 	},
549 }, {
550 	.cra_name		= "lrw(serpent)",
551 	.cra_driver_name	= "lrw-serpent-avx",
552 	.cra_priority		= 500,
553 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
554 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
555 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
556 	.cra_alignmask		= 0,
557 	.cra_type		= &crypto_ablkcipher_type,
558 	.cra_module		= THIS_MODULE,
559 	.cra_init		= ablk_init,
560 	.cra_exit		= ablk_exit,
561 	.cra_u = {
562 		.ablkcipher = {
563 			.min_keysize	= SERPENT_MIN_KEY_SIZE +
564 					  SERPENT_BLOCK_SIZE,
565 			.max_keysize	= SERPENT_MAX_KEY_SIZE +
566 					  SERPENT_BLOCK_SIZE,
567 			.ivsize		= SERPENT_BLOCK_SIZE,
568 			.setkey		= ablk_set_key,
569 			.encrypt	= ablk_encrypt,
570 			.decrypt	= ablk_decrypt,
571 		},
572 	},
573 }, {
574 	.cra_name		= "xts(serpent)",
575 	.cra_driver_name	= "xts-serpent-avx",
576 	.cra_priority		= 500,
577 	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
578 	.cra_blocksize		= SERPENT_BLOCK_SIZE,
579 	.cra_ctxsize		= sizeof(struct async_helper_ctx),
580 	.cra_alignmask		= 0,
581 	.cra_type		= &crypto_ablkcipher_type,
582 	.cra_module		= THIS_MODULE,
583 	.cra_init		= ablk_init,
584 	.cra_exit		= ablk_exit,
585 	.cra_u = {
586 		.ablkcipher = {
587 			.min_keysize	= SERPENT_MIN_KEY_SIZE * 2,
588 			.max_keysize	= SERPENT_MAX_KEY_SIZE * 2,
589 			.ivsize		= SERPENT_BLOCK_SIZE,
590 			.setkey		= ablk_set_key,
591 			.encrypt	= ablk_encrypt,
592 			.decrypt	= ablk_decrypt,
593 		},
594 	},
595 } };
596 
597 static int __init serpent_init(void)
598 {
599 	u64 xcr0;
600 
601 	if (!cpu_has_avx || !cpu_has_osxsave) {
602 		printk(KERN_INFO "AVX instructions are not detected.\n");
603 		return -ENODEV;
604 	}
605 
606 	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
607 	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
608 		printk(KERN_INFO "AVX detected but unusable.\n");
609 		return -ENODEV;
610 	}
611 
612 	return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
613 }
614 
615 static void __exit serpent_exit(void)
616 {
617 	crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
618 }
619 
620 module_init(serpent_init);
621 module_exit(serpent_exit);
622 
623 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
624 MODULE_LICENSE("GPL");
625 MODULE_ALIAS_CRYPTO("serpent");
626