1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia 4 * 5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi> 6 */ 7 8 #include <asm/crypto/camellia.h> 9 #include <asm/crypto/glue_helper.h> 10 #include <crypto/algapi.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/xts.h> 13 #include <linux/crypto.h> 14 #include <linux/err.h> 15 #include <linux/module.h> 16 #include <linux/types.h> 17 18 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16 19 20 /* 16-way parallel cipher functions (avx/aes-ni) */ 21 asmlinkage void camellia_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src); 22 EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way); 23 24 asmlinkage void camellia_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src); 25 EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way); 26 27 asmlinkage void camellia_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src); 28 EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way); 29 30 asmlinkage void camellia_ctr_16way(const void *ctx, u8 *dst, const u8 *src, 31 le128 *iv); 32 EXPORT_SYMBOL_GPL(camellia_ctr_16way); 33 34 asmlinkage void camellia_xts_enc_16way(const void *ctx, u8 *dst, const u8 *src, 35 le128 *iv); 36 EXPORT_SYMBOL_GPL(camellia_xts_enc_16way); 37 38 asmlinkage void camellia_xts_dec_16way(const void *ctx, u8 *dst, const u8 *src, 39 le128 *iv); 40 EXPORT_SYMBOL_GPL(camellia_xts_dec_16way); 41 42 void camellia_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv) 43 { 44 glue_xts_crypt_128bit_one(ctx, dst, src, iv, camellia_enc_blk); 45 } 46 EXPORT_SYMBOL_GPL(camellia_xts_enc); 47 48 void camellia_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv) 49 { 50 glue_xts_crypt_128bit_one(ctx, dst, src, iv, camellia_dec_blk); 51 } 52 EXPORT_SYMBOL_GPL(camellia_xts_dec); 53 54 static const struct common_glue_ctx camellia_enc = { 55 .num_funcs = 3, 56 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 57 58 .funcs = { { 59 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 60 .fn_u = { .ecb = camellia_ecb_enc_16way } 61 }, { 62 .num_blocks = 2, 63 .fn_u = { .ecb = camellia_enc_blk_2way } 64 }, { 65 .num_blocks = 1, 66 .fn_u = { .ecb = camellia_enc_blk } 67 } } 68 }; 69 70 static const struct common_glue_ctx camellia_ctr = { 71 .num_funcs = 3, 72 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 73 74 .funcs = { { 75 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 76 .fn_u = { .ctr = camellia_ctr_16way } 77 }, { 78 .num_blocks = 2, 79 .fn_u = { .ctr = camellia_crypt_ctr_2way } 80 }, { 81 .num_blocks = 1, 82 .fn_u = { .ctr = camellia_crypt_ctr } 83 } } 84 }; 85 86 static const struct common_glue_ctx camellia_enc_xts = { 87 .num_funcs = 2, 88 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 89 90 .funcs = { { 91 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 92 .fn_u = { .xts = camellia_xts_enc_16way } 93 }, { 94 .num_blocks = 1, 95 .fn_u = { .xts = camellia_xts_enc } 96 } } 97 }; 98 99 static const struct common_glue_ctx camellia_dec = { 100 .num_funcs = 3, 101 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 102 103 .funcs = { { 104 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 105 .fn_u = { .ecb = camellia_ecb_dec_16way } 106 }, { 107 .num_blocks = 2, 108 .fn_u = { .ecb = camellia_dec_blk_2way } 109 }, { 110 .num_blocks = 1, 111 .fn_u = { .ecb = camellia_dec_blk } 112 } } 113 }; 114 115 static const struct common_glue_ctx camellia_dec_cbc = { 116 .num_funcs = 3, 117 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 118 119 .funcs = { { 120 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 121 .fn_u = { .cbc = camellia_cbc_dec_16way } 122 }, { 123 .num_blocks = 2, 124 .fn_u = { .cbc = camellia_decrypt_cbc_2way } 125 }, { 126 .num_blocks = 1, 127 .fn_u = { .cbc = camellia_dec_blk } 128 } } 129 }; 130 131 static const struct common_glue_ctx camellia_dec_xts = { 132 .num_funcs = 2, 133 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS, 134 135 .funcs = { { 136 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS, 137 .fn_u = { .xts = camellia_xts_dec_16way } 138 }, { 139 .num_blocks = 1, 140 .fn_u = { .xts = camellia_xts_dec } 141 } } 142 }; 143 144 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key, 145 unsigned int keylen) 146 { 147 return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen, 148 &tfm->base.crt_flags); 149 } 150 151 static int ecb_encrypt(struct skcipher_request *req) 152 { 153 return glue_ecb_req_128bit(&camellia_enc, req); 154 } 155 156 static int ecb_decrypt(struct skcipher_request *req) 157 { 158 return glue_ecb_req_128bit(&camellia_dec, req); 159 } 160 161 static int cbc_encrypt(struct skcipher_request *req) 162 { 163 return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req); 164 } 165 166 static int cbc_decrypt(struct skcipher_request *req) 167 { 168 return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req); 169 } 170 171 static int ctr_crypt(struct skcipher_request *req) 172 { 173 return glue_ctr_req_128bit(&camellia_ctr, req); 174 } 175 176 int xts_camellia_setkey(struct crypto_skcipher *tfm, const u8 *key, 177 unsigned int keylen) 178 { 179 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 180 u32 *flags = &tfm->base.crt_flags; 181 int err; 182 183 err = xts_verify_key(tfm, key, keylen); 184 if (err) 185 return err; 186 187 /* first half of xts-key is for crypt */ 188 err = __camellia_setkey(&ctx->crypt_ctx, key, keylen / 2, flags); 189 if (err) 190 return err; 191 192 /* second half of xts-key is for tweak */ 193 return __camellia_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2, 194 flags); 195 } 196 EXPORT_SYMBOL_GPL(xts_camellia_setkey); 197 198 static int xts_encrypt(struct skcipher_request *req) 199 { 200 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 201 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 202 203 return glue_xts_req_128bit(&camellia_enc_xts, req, camellia_enc_blk, 204 &ctx->tweak_ctx, &ctx->crypt_ctx, false); 205 } 206 207 static int xts_decrypt(struct skcipher_request *req) 208 { 209 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 210 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 211 212 return glue_xts_req_128bit(&camellia_dec_xts, req, camellia_enc_blk, 213 &ctx->tweak_ctx, &ctx->crypt_ctx, true); 214 } 215 216 static struct skcipher_alg camellia_algs[] = { 217 { 218 .base.cra_name = "__ecb(camellia)", 219 .base.cra_driver_name = "__ecb-camellia-aesni", 220 .base.cra_priority = 400, 221 .base.cra_flags = CRYPTO_ALG_INTERNAL, 222 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE, 223 .base.cra_ctxsize = sizeof(struct camellia_ctx), 224 .base.cra_module = THIS_MODULE, 225 .min_keysize = CAMELLIA_MIN_KEY_SIZE, 226 .max_keysize = CAMELLIA_MAX_KEY_SIZE, 227 .setkey = camellia_setkey, 228 .encrypt = ecb_encrypt, 229 .decrypt = ecb_decrypt, 230 }, { 231 .base.cra_name = "__cbc(camellia)", 232 .base.cra_driver_name = "__cbc-camellia-aesni", 233 .base.cra_priority = 400, 234 .base.cra_flags = CRYPTO_ALG_INTERNAL, 235 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE, 236 .base.cra_ctxsize = sizeof(struct camellia_ctx), 237 .base.cra_module = THIS_MODULE, 238 .min_keysize = CAMELLIA_MIN_KEY_SIZE, 239 .max_keysize = CAMELLIA_MAX_KEY_SIZE, 240 .ivsize = CAMELLIA_BLOCK_SIZE, 241 .setkey = camellia_setkey, 242 .encrypt = cbc_encrypt, 243 .decrypt = cbc_decrypt, 244 }, { 245 .base.cra_name = "__ctr(camellia)", 246 .base.cra_driver_name = "__ctr-camellia-aesni", 247 .base.cra_priority = 400, 248 .base.cra_flags = CRYPTO_ALG_INTERNAL, 249 .base.cra_blocksize = 1, 250 .base.cra_ctxsize = sizeof(struct camellia_ctx), 251 .base.cra_module = THIS_MODULE, 252 .min_keysize = CAMELLIA_MIN_KEY_SIZE, 253 .max_keysize = CAMELLIA_MAX_KEY_SIZE, 254 .ivsize = CAMELLIA_BLOCK_SIZE, 255 .chunksize = CAMELLIA_BLOCK_SIZE, 256 .setkey = camellia_setkey, 257 .encrypt = ctr_crypt, 258 .decrypt = ctr_crypt, 259 }, { 260 .base.cra_name = "__xts(camellia)", 261 .base.cra_driver_name = "__xts-camellia-aesni", 262 .base.cra_priority = 400, 263 .base.cra_flags = CRYPTO_ALG_INTERNAL, 264 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE, 265 .base.cra_ctxsize = sizeof(struct camellia_xts_ctx), 266 .base.cra_module = THIS_MODULE, 267 .min_keysize = 2 * CAMELLIA_MIN_KEY_SIZE, 268 .max_keysize = 2 * CAMELLIA_MAX_KEY_SIZE, 269 .ivsize = CAMELLIA_BLOCK_SIZE, 270 .setkey = xts_camellia_setkey, 271 .encrypt = xts_encrypt, 272 .decrypt = xts_decrypt, 273 }, 274 }; 275 276 static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)]; 277 278 static int __init camellia_aesni_init(void) 279 { 280 const char *feature_name; 281 282 if (!boot_cpu_has(X86_FEATURE_AVX) || 283 !boot_cpu_has(X86_FEATURE_AES) || 284 !boot_cpu_has(X86_FEATURE_OSXSAVE)) { 285 pr_info("AVX or AES-NI instructions are not detected.\n"); 286 return -ENODEV; 287 } 288 289 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, 290 &feature_name)) { 291 pr_info("CPU feature '%s' is not supported.\n", feature_name); 292 return -ENODEV; 293 } 294 295 return simd_register_skciphers_compat(camellia_algs, 296 ARRAY_SIZE(camellia_algs), 297 camellia_simd_algs); 298 } 299 300 static void __exit camellia_aesni_fini(void) 301 { 302 simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs), 303 camellia_simd_algs); 304 } 305 306 module_init(camellia_aesni_init); 307 module_exit(camellia_aesni_fini); 308 309 MODULE_LICENSE("GPL"); 310 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized"); 311 MODULE_ALIAS_CRYPTO("camellia"); 312 MODULE_ALIAS_CRYPTO("camellia-asm"); 313