1 /* 2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher 3 * 4 * Copyright (C) 2012 Johannes Goetzfried 5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de> 6 * 7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 8 * 9 * This program is free software; you can redistribute it and/or modify 10 * it under the terms of the GNU General Public License as published by 11 * the Free Software Foundation; either version 2 of the License, or 12 * (at your option) any later version. 13 * 14 * This program is distributed in the hope that it will be useful, 15 * but WITHOUT ANY WARRANTY; without even the implied warranty of 16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 * GNU General Public License for more details. 18 * 19 * You should have received a copy of the GNU General Public License 20 * along with this program; if not, write to the Free Software 21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 22 * USA 23 * 24 */ 25 26 #include <linux/module.h> 27 #include <linux/hardirq.h> 28 #include <linux/types.h> 29 #include <linux/crypto.h> 30 #include <linux/err.h> 31 #include <crypto/ablk_helper.h> 32 #include <crypto/algapi.h> 33 #include <crypto/cast6.h> 34 #include <crypto/cryptd.h> 35 #include <crypto/b128ops.h> 36 #include <crypto/ctr.h> 37 #include <crypto/lrw.h> 38 #include <crypto/xts.h> 39 #include <asm/xcr.h> 40 #include <asm/xsave.h> 41 #include <asm/crypto/glue_helper.h> 42 43 #define CAST6_PARALLEL_BLOCKS 8 44 45 asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst, 46 const u8 *src); 47 asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst, 48 const u8 *src); 49 50 asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst, 51 const u8 *src); 52 asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src, 53 le128 *iv); 54 55 asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst, 56 const u8 *src, le128 *iv); 57 asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst, 58 const u8 *src, le128 *iv); 59 60 static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv) 61 { 62 glue_xts_crypt_128bit_one(ctx, dst, src, iv, 63 GLUE_FUNC_CAST(__cast6_encrypt)); 64 } 65 66 static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv) 67 { 68 glue_xts_crypt_128bit_one(ctx, dst, src, iv, 69 GLUE_FUNC_CAST(__cast6_decrypt)); 70 } 71 72 static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv) 73 { 74 be128 ctrblk; 75 76 le128_to_be128(&ctrblk, iv); 77 le128_inc(iv); 78 79 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); 80 u128_xor(dst, src, (u128 *)&ctrblk); 81 } 82 83 static const struct common_glue_ctx cast6_enc = { 84 .num_funcs = 2, 85 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 86 87 .funcs = { { 88 .num_blocks = CAST6_PARALLEL_BLOCKS, 89 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) } 90 }, { 91 .num_blocks = 1, 92 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) } 93 } } 94 }; 95 96 static const struct common_glue_ctx cast6_ctr = { 97 .num_funcs = 2, 98 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 99 100 .funcs = { { 101 .num_blocks = CAST6_PARALLEL_BLOCKS, 102 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) } 103 }, { 104 .num_blocks = 1, 105 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) } 106 } } 107 }; 108 109 static const struct common_glue_ctx cast6_enc_xts = { 110 .num_funcs = 2, 111 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 112 113 .funcs = { { 114 .num_blocks = CAST6_PARALLEL_BLOCKS, 115 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) } 116 }, { 117 .num_blocks = 1, 118 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) } 119 } } 120 }; 121 122 static const struct common_glue_ctx cast6_dec = { 123 .num_funcs = 2, 124 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 125 126 .funcs = { { 127 .num_blocks = CAST6_PARALLEL_BLOCKS, 128 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) } 129 }, { 130 .num_blocks = 1, 131 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) } 132 } } 133 }; 134 135 static const struct common_glue_ctx cast6_dec_cbc = { 136 .num_funcs = 2, 137 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 138 139 .funcs = { { 140 .num_blocks = CAST6_PARALLEL_BLOCKS, 141 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) } 142 }, { 143 .num_blocks = 1, 144 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) } 145 } } 146 }; 147 148 static const struct common_glue_ctx cast6_dec_xts = { 149 .num_funcs = 2, 150 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS, 151 152 .funcs = { { 153 .num_blocks = CAST6_PARALLEL_BLOCKS, 154 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) } 155 }, { 156 .num_blocks = 1, 157 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) } 158 } } 159 }; 160 161 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 162 struct scatterlist *src, unsigned int nbytes) 163 { 164 return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes); 165 } 166 167 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 168 struct scatterlist *src, unsigned int nbytes) 169 { 170 return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes); 171 } 172 173 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 174 struct scatterlist *src, unsigned int nbytes) 175 { 176 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc, 177 dst, src, nbytes); 178 } 179 180 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 181 struct scatterlist *src, unsigned int nbytes) 182 { 183 return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src, 184 nbytes); 185 } 186 187 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, 188 struct scatterlist *src, unsigned int nbytes) 189 { 190 return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes); 191 } 192 193 static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes) 194 { 195 return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS, 196 NULL, fpu_enabled, nbytes); 197 } 198 199 static inline void cast6_fpu_end(bool fpu_enabled) 200 { 201 glue_fpu_end(fpu_enabled); 202 } 203 204 struct crypt_priv { 205 struct cast6_ctx *ctx; 206 bool fpu_enabled; 207 }; 208 209 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) 210 { 211 const unsigned int bsize = CAST6_BLOCK_SIZE; 212 struct crypt_priv *ctx = priv; 213 int i; 214 215 ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); 216 217 if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { 218 cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst); 219 return; 220 } 221 222 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) 223 __cast6_encrypt(ctx->ctx, srcdst, srcdst); 224 } 225 226 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes) 227 { 228 const unsigned int bsize = CAST6_BLOCK_SIZE; 229 struct crypt_priv *ctx = priv; 230 int i; 231 232 ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes); 233 234 if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) { 235 cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst); 236 return; 237 } 238 239 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize) 240 __cast6_decrypt(ctx->ctx, srcdst, srcdst); 241 } 242 243 struct cast6_lrw_ctx { 244 struct lrw_table_ctx lrw_table; 245 struct cast6_ctx cast6_ctx; 246 }; 247 248 static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, 249 unsigned int keylen) 250 { 251 struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); 252 int err; 253 254 err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE, 255 &tfm->crt_flags); 256 if (err) 257 return err; 258 259 return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE); 260 } 261 262 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 263 struct scatterlist *src, unsigned int nbytes) 264 { 265 struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 266 be128 buf[CAST6_PARALLEL_BLOCKS]; 267 struct crypt_priv crypt_ctx = { 268 .ctx = &ctx->cast6_ctx, 269 .fpu_enabled = false, 270 }; 271 struct lrw_crypt_req req = { 272 .tbuf = buf, 273 .tbuflen = sizeof(buf), 274 275 .table_ctx = &ctx->lrw_table, 276 .crypt_ctx = &crypt_ctx, 277 .crypt_fn = encrypt_callback, 278 }; 279 int ret; 280 281 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 282 ret = lrw_crypt(desc, dst, src, nbytes, &req); 283 cast6_fpu_end(crypt_ctx.fpu_enabled); 284 285 return ret; 286 } 287 288 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 289 struct scatterlist *src, unsigned int nbytes) 290 { 291 struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 292 be128 buf[CAST6_PARALLEL_BLOCKS]; 293 struct crypt_priv crypt_ctx = { 294 .ctx = &ctx->cast6_ctx, 295 .fpu_enabled = false, 296 }; 297 struct lrw_crypt_req req = { 298 .tbuf = buf, 299 .tbuflen = sizeof(buf), 300 301 .table_ctx = &ctx->lrw_table, 302 .crypt_ctx = &crypt_ctx, 303 .crypt_fn = decrypt_callback, 304 }; 305 int ret; 306 307 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 308 ret = lrw_crypt(desc, dst, src, nbytes, &req); 309 cast6_fpu_end(crypt_ctx.fpu_enabled); 310 311 return ret; 312 } 313 314 static void lrw_exit_tfm(struct crypto_tfm *tfm) 315 { 316 struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm); 317 318 lrw_free_table(&ctx->lrw_table); 319 } 320 321 struct cast6_xts_ctx { 322 struct cast6_ctx tweak_ctx; 323 struct cast6_ctx crypt_ctx; 324 }; 325 326 static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key, 327 unsigned int keylen) 328 { 329 struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm); 330 u32 *flags = &tfm->crt_flags; 331 int err; 332 333 /* key consists of keys of equal size concatenated, therefore 334 * the length must be even 335 */ 336 if (keylen % 2) { 337 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN; 338 return -EINVAL; 339 } 340 341 /* first half of xts-key is for crypt */ 342 err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags); 343 if (err) 344 return err; 345 346 /* second half of xts-key is for tweak */ 347 return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2, 348 flags); 349 } 350 351 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 352 struct scatterlist *src, unsigned int nbytes) 353 { 354 struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 355 356 return glue_xts_crypt_128bit(&cast6_enc_xts, desc, dst, src, nbytes, 357 XTS_TWEAK_CAST(__cast6_encrypt), 358 &ctx->tweak_ctx, &ctx->crypt_ctx); 359 } 360 361 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, 362 struct scatterlist *src, unsigned int nbytes) 363 { 364 struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm); 365 366 return glue_xts_crypt_128bit(&cast6_dec_xts, desc, dst, src, nbytes, 367 XTS_TWEAK_CAST(__cast6_encrypt), 368 &ctx->tweak_ctx, &ctx->crypt_ctx); 369 } 370 371 static struct crypto_alg cast6_algs[10] = { { 372 .cra_name = "__ecb-cast6-avx", 373 .cra_driver_name = "__driver-ecb-cast6-avx", 374 .cra_priority = 0, 375 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 376 CRYPTO_ALG_INTERNAL, 377 .cra_blocksize = CAST6_BLOCK_SIZE, 378 .cra_ctxsize = sizeof(struct cast6_ctx), 379 .cra_alignmask = 0, 380 .cra_type = &crypto_blkcipher_type, 381 .cra_module = THIS_MODULE, 382 .cra_u = { 383 .blkcipher = { 384 .min_keysize = CAST6_MIN_KEY_SIZE, 385 .max_keysize = CAST6_MAX_KEY_SIZE, 386 .setkey = cast6_setkey, 387 .encrypt = ecb_encrypt, 388 .decrypt = ecb_decrypt, 389 }, 390 }, 391 }, { 392 .cra_name = "__cbc-cast6-avx", 393 .cra_driver_name = "__driver-cbc-cast6-avx", 394 .cra_priority = 0, 395 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 396 CRYPTO_ALG_INTERNAL, 397 .cra_blocksize = CAST6_BLOCK_SIZE, 398 .cra_ctxsize = sizeof(struct cast6_ctx), 399 .cra_alignmask = 0, 400 .cra_type = &crypto_blkcipher_type, 401 .cra_module = THIS_MODULE, 402 .cra_u = { 403 .blkcipher = { 404 .min_keysize = CAST6_MIN_KEY_SIZE, 405 .max_keysize = CAST6_MAX_KEY_SIZE, 406 .setkey = cast6_setkey, 407 .encrypt = cbc_encrypt, 408 .decrypt = cbc_decrypt, 409 }, 410 }, 411 }, { 412 .cra_name = "__ctr-cast6-avx", 413 .cra_driver_name = "__driver-ctr-cast6-avx", 414 .cra_priority = 0, 415 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 416 CRYPTO_ALG_INTERNAL, 417 .cra_blocksize = 1, 418 .cra_ctxsize = sizeof(struct cast6_ctx), 419 .cra_alignmask = 0, 420 .cra_type = &crypto_blkcipher_type, 421 .cra_module = THIS_MODULE, 422 .cra_u = { 423 .blkcipher = { 424 .min_keysize = CAST6_MIN_KEY_SIZE, 425 .max_keysize = CAST6_MAX_KEY_SIZE, 426 .ivsize = CAST6_BLOCK_SIZE, 427 .setkey = cast6_setkey, 428 .encrypt = ctr_crypt, 429 .decrypt = ctr_crypt, 430 }, 431 }, 432 }, { 433 .cra_name = "__lrw-cast6-avx", 434 .cra_driver_name = "__driver-lrw-cast6-avx", 435 .cra_priority = 0, 436 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 437 CRYPTO_ALG_INTERNAL, 438 .cra_blocksize = CAST6_BLOCK_SIZE, 439 .cra_ctxsize = sizeof(struct cast6_lrw_ctx), 440 .cra_alignmask = 0, 441 .cra_type = &crypto_blkcipher_type, 442 .cra_module = THIS_MODULE, 443 .cra_exit = lrw_exit_tfm, 444 .cra_u = { 445 .blkcipher = { 446 .min_keysize = CAST6_MIN_KEY_SIZE + 447 CAST6_BLOCK_SIZE, 448 .max_keysize = CAST6_MAX_KEY_SIZE + 449 CAST6_BLOCK_SIZE, 450 .ivsize = CAST6_BLOCK_SIZE, 451 .setkey = lrw_cast6_setkey, 452 .encrypt = lrw_encrypt, 453 .decrypt = lrw_decrypt, 454 }, 455 }, 456 }, { 457 .cra_name = "__xts-cast6-avx", 458 .cra_driver_name = "__driver-xts-cast6-avx", 459 .cra_priority = 0, 460 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | 461 CRYPTO_ALG_INTERNAL, 462 .cra_blocksize = CAST6_BLOCK_SIZE, 463 .cra_ctxsize = sizeof(struct cast6_xts_ctx), 464 .cra_alignmask = 0, 465 .cra_type = &crypto_blkcipher_type, 466 .cra_module = THIS_MODULE, 467 .cra_u = { 468 .blkcipher = { 469 .min_keysize = CAST6_MIN_KEY_SIZE * 2, 470 .max_keysize = CAST6_MAX_KEY_SIZE * 2, 471 .ivsize = CAST6_BLOCK_SIZE, 472 .setkey = xts_cast6_setkey, 473 .encrypt = xts_encrypt, 474 .decrypt = xts_decrypt, 475 }, 476 }, 477 }, { 478 .cra_name = "ecb(cast6)", 479 .cra_driver_name = "ecb-cast6-avx", 480 .cra_priority = 200, 481 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 482 .cra_blocksize = CAST6_BLOCK_SIZE, 483 .cra_ctxsize = sizeof(struct async_helper_ctx), 484 .cra_alignmask = 0, 485 .cra_type = &crypto_ablkcipher_type, 486 .cra_module = THIS_MODULE, 487 .cra_init = ablk_init, 488 .cra_exit = ablk_exit, 489 .cra_u = { 490 .ablkcipher = { 491 .min_keysize = CAST6_MIN_KEY_SIZE, 492 .max_keysize = CAST6_MAX_KEY_SIZE, 493 .setkey = ablk_set_key, 494 .encrypt = ablk_encrypt, 495 .decrypt = ablk_decrypt, 496 }, 497 }, 498 }, { 499 .cra_name = "cbc(cast6)", 500 .cra_driver_name = "cbc-cast6-avx", 501 .cra_priority = 200, 502 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 503 .cra_blocksize = CAST6_BLOCK_SIZE, 504 .cra_ctxsize = sizeof(struct async_helper_ctx), 505 .cra_alignmask = 0, 506 .cra_type = &crypto_ablkcipher_type, 507 .cra_module = THIS_MODULE, 508 .cra_init = ablk_init, 509 .cra_exit = ablk_exit, 510 .cra_u = { 511 .ablkcipher = { 512 .min_keysize = CAST6_MIN_KEY_SIZE, 513 .max_keysize = CAST6_MAX_KEY_SIZE, 514 .ivsize = CAST6_BLOCK_SIZE, 515 .setkey = ablk_set_key, 516 .encrypt = __ablk_encrypt, 517 .decrypt = ablk_decrypt, 518 }, 519 }, 520 }, { 521 .cra_name = "ctr(cast6)", 522 .cra_driver_name = "ctr-cast6-avx", 523 .cra_priority = 200, 524 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 525 .cra_blocksize = 1, 526 .cra_ctxsize = sizeof(struct async_helper_ctx), 527 .cra_alignmask = 0, 528 .cra_type = &crypto_ablkcipher_type, 529 .cra_module = THIS_MODULE, 530 .cra_init = ablk_init, 531 .cra_exit = ablk_exit, 532 .cra_u = { 533 .ablkcipher = { 534 .min_keysize = CAST6_MIN_KEY_SIZE, 535 .max_keysize = CAST6_MAX_KEY_SIZE, 536 .ivsize = CAST6_BLOCK_SIZE, 537 .setkey = ablk_set_key, 538 .encrypt = ablk_encrypt, 539 .decrypt = ablk_encrypt, 540 .geniv = "chainiv", 541 }, 542 }, 543 }, { 544 .cra_name = "lrw(cast6)", 545 .cra_driver_name = "lrw-cast6-avx", 546 .cra_priority = 200, 547 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 548 .cra_blocksize = CAST6_BLOCK_SIZE, 549 .cra_ctxsize = sizeof(struct async_helper_ctx), 550 .cra_alignmask = 0, 551 .cra_type = &crypto_ablkcipher_type, 552 .cra_module = THIS_MODULE, 553 .cra_init = ablk_init, 554 .cra_exit = ablk_exit, 555 .cra_u = { 556 .ablkcipher = { 557 .min_keysize = CAST6_MIN_KEY_SIZE + 558 CAST6_BLOCK_SIZE, 559 .max_keysize = CAST6_MAX_KEY_SIZE + 560 CAST6_BLOCK_SIZE, 561 .ivsize = CAST6_BLOCK_SIZE, 562 .setkey = ablk_set_key, 563 .encrypt = ablk_encrypt, 564 .decrypt = ablk_decrypt, 565 }, 566 }, 567 }, { 568 .cra_name = "xts(cast6)", 569 .cra_driver_name = "xts-cast6-avx", 570 .cra_priority = 200, 571 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 572 .cra_blocksize = CAST6_BLOCK_SIZE, 573 .cra_ctxsize = sizeof(struct async_helper_ctx), 574 .cra_alignmask = 0, 575 .cra_type = &crypto_ablkcipher_type, 576 .cra_module = THIS_MODULE, 577 .cra_init = ablk_init, 578 .cra_exit = ablk_exit, 579 .cra_u = { 580 .ablkcipher = { 581 .min_keysize = CAST6_MIN_KEY_SIZE * 2, 582 .max_keysize = CAST6_MAX_KEY_SIZE * 2, 583 .ivsize = CAST6_BLOCK_SIZE, 584 .setkey = ablk_set_key, 585 .encrypt = ablk_encrypt, 586 .decrypt = ablk_decrypt, 587 }, 588 }, 589 } }; 590 591 static int __init cast6_init(void) 592 { 593 u64 xcr0; 594 595 if (!cpu_has_avx || !cpu_has_osxsave) { 596 pr_info("AVX instructions are not detected.\n"); 597 return -ENODEV; 598 } 599 600 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK); 601 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) { 602 pr_info("AVX detected but unusable.\n"); 603 return -ENODEV; 604 } 605 606 return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); 607 } 608 609 static void __exit cast6_exit(void) 610 { 611 crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs)); 612 } 613 614 module_init(cast6_init); 615 module_exit(cast6_exit); 616 617 MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized"); 618 MODULE_LICENSE("GPL"); 619 MODULE_ALIAS_CRYPTO("cast6"); 620