1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Glue Code for assembler optimized version of Blowfish 4 * 5 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi> 6 * 7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by: 8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 9 */ 10 11 #include <crypto/algapi.h> 12 #include <crypto/blowfish.h> 13 #include <crypto/internal/skcipher.h> 14 #include <linux/crypto.h> 15 #include <linux/init.h> 16 #include <linux/module.h> 17 #include <linux/types.h> 18 19 /* regular block cipher functions */ 20 asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src, 21 bool xor); 22 asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src); 23 24 /* 4-way parallel cipher functions */ 25 asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst, 26 const u8 *src, bool xor); 27 asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst, 28 const u8 *src); 29 30 static inline void blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src) 31 { 32 __blowfish_enc_blk(ctx, dst, src, false); 33 } 34 35 static inline void blowfish_enc_blk_xor(struct bf_ctx *ctx, u8 *dst, 36 const u8 *src) 37 { 38 __blowfish_enc_blk(ctx, dst, src, true); 39 } 40 41 static inline void blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst, 42 const u8 *src) 43 { 44 __blowfish_enc_blk_4way(ctx, dst, src, false); 45 } 46 47 static inline void blowfish_enc_blk_xor_4way(struct bf_ctx *ctx, u8 *dst, 48 const u8 *src) 49 { 50 __blowfish_enc_blk_4way(ctx, dst, src, true); 51 } 52 53 static void blowfish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 54 { 55 blowfish_enc_blk(crypto_tfm_ctx(tfm), dst, src); 56 } 57 58 static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 59 { 60 blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src); 61 } 62 63 static int blowfish_setkey_skcipher(struct crypto_skcipher *tfm, 64 const u8 *key, unsigned int keylen) 65 { 66 return blowfish_setkey(&tfm->base, key, keylen); 67 } 68 69 static int ecb_crypt(struct skcipher_request *req, 70 void (*fn)(struct bf_ctx *, u8 *, const u8 *), 71 void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *)) 72 { 73 unsigned int bsize = BF_BLOCK_SIZE; 74 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 75 struct bf_ctx *ctx = crypto_skcipher_ctx(tfm); 76 struct skcipher_walk walk; 77 unsigned int nbytes; 78 int err; 79 80 err = skcipher_walk_virt(&walk, req, false); 81 82 while ((nbytes = walk.nbytes)) { 83 u8 *wsrc = walk.src.virt.addr; 84 u8 *wdst = walk.dst.virt.addr; 85 86 /* Process four block batch */ 87 if (nbytes >= bsize * 4) { 88 do { 89 fn_4way(ctx, wdst, wsrc); 90 91 wsrc += bsize * 4; 92 wdst += bsize * 4; 93 nbytes -= bsize * 4; 94 } while (nbytes >= bsize * 4); 95 96 if (nbytes < bsize) 97 goto done; 98 } 99 100 /* Handle leftovers */ 101 do { 102 fn(ctx, wdst, wsrc); 103 104 wsrc += bsize; 105 wdst += bsize; 106 nbytes -= bsize; 107 } while (nbytes >= bsize); 108 109 done: 110 err = skcipher_walk_done(&walk, nbytes); 111 } 112 113 return err; 114 } 115 116 static int ecb_encrypt(struct skcipher_request *req) 117 { 118 return ecb_crypt(req, blowfish_enc_blk, blowfish_enc_blk_4way); 119 } 120 121 static int ecb_decrypt(struct skcipher_request *req) 122 { 123 return ecb_crypt(req, blowfish_dec_blk, blowfish_dec_blk_4way); 124 } 125 126 static unsigned int __cbc_encrypt(struct bf_ctx *ctx, 127 struct skcipher_walk *walk) 128 { 129 unsigned int bsize = BF_BLOCK_SIZE; 130 unsigned int nbytes = walk->nbytes; 131 u64 *src = (u64 *)walk->src.virt.addr; 132 u64 *dst = (u64 *)walk->dst.virt.addr; 133 u64 *iv = (u64 *)walk->iv; 134 135 do { 136 *dst = *src ^ *iv; 137 blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst); 138 iv = dst; 139 140 src += 1; 141 dst += 1; 142 nbytes -= bsize; 143 } while (nbytes >= bsize); 144 145 *(u64 *)walk->iv = *iv; 146 return nbytes; 147 } 148 149 static int cbc_encrypt(struct skcipher_request *req) 150 { 151 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 152 struct bf_ctx *ctx = crypto_skcipher_ctx(tfm); 153 struct skcipher_walk walk; 154 unsigned int nbytes; 155 int err; 156 157 err = skcipher_walk_virt(&walk, req, false); 158 159 while ((nbytes = walk.nbytes)) { 160 nbytes = __cbc_encrypt(ctx, &walk); 161 err = skcipher_walk_done(&walk, nbytes); 162 } 163 164 return err; 165 } 166 167 static unsigned int __cbc_decrypt(struct bf_ctx *ctx, 168 struct skcipher_walk *walk) 169 { 170 unsigned int bsize = BF_BLOCK_SIZE; 171 unsigned int nbytes = walk->nbytes; 172 u64 *src = (u64 *)walk->src.virt.addr; 173 u64 *dst = (u64 *)walk->dst.virt.addr; 174 u64 ivs[4 - 1]; 175 u64 last_iv; 176 177 /* Start of the last block. */ 178 src += nbytes / bsize - 1; 179 dst += nbytes / bsize - 1; 180 181 last_iv = *src; 182 183 /* Process four block batch */ 184 if (nbytes >= bsize * 4) { 185 do { 186 nbytes -= bsize * 4 - bsize; 187 src -= 4 - 1; 188 dst -= 4 - 1; 189 190 ivs[0] = src[0]; 191 ivs[1] = src[1]; 192 ivs[2] = src[2]; 193 194 blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src); 195 196 dst[1] ^= ivs[0]; 197 dst[2] ^= ivs[1]; 198 dst[3] ^= ivs[2]; 199 200 nbytes -= bsize; 201 if (nbytes < bsize) 202 goto done; 203 204 *dst ^= *(src - 1); 205 src -= 1; 206 dst -= 1; 207 } while (nbytes >= bsize * 4); 208 } 209 210 /* Handle leftovers */ 211 for (;;) { 212 blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src); 213 214 nbytes -= bsize; 215 if (nbytes < bsize) 216 break; 217 218 *dst ^= *(src - 1); 219 src -= 1; 220 dst -= 1; 221 } 222 223 done: 224 *dst ^= *(u64 *)walk->iv; 225 *(u64 *)walk->iv = last_iv; 226 227 return nbytes; 228 } 229 230 static int cbc_decrypt(struct skcipher_request *req) 231 { 232 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 233 struct bf_ctx *ctx = crypto_skcipher_ctx(tfm); 234 struct skcipher_walk walk; 235 unsigned int nbytes; 236 int err; 237 238 err = skcipher_walk_virt(&walk, req, false); 239 240 while ((nbytes = walk.nbytes)) { 241 nbytes = __cbc_decrypt(ctx, &walk); 242 err = skcipher_walk_done(&walk, nbytes); 243 } 244 245 return err; 246 } 247 248 static struct crypto_alg bf_cipher_alg = { 249 .cra_name = "blowfish", 250 .cra_driver_name = "blowfish-asm", 251 .cra_priority = 200, 252 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 253 .cra_blocksize = BF_BLOCK_SIZE, 254 .cra_ctxsize = sizeof(struct bf_ctx), 255 .cra_alignmask = 0, 256 .cra_module = THIS_MODULE, 257 .cra_u = { 258 .cipher = { 259 .cia_min_keysize = BF_MIN_KEY_SIZE, 260 .cia_max_keysize = BF_MAX_KEY_SIZE, 261 .cia_setkey = blowfish_setkey, 262 .cia_encrypt = blowfish_encrypt, 263 .cia_decrypt = blowfish_decrypt, 264 } 265 } 266 }; 267 268 static struct skcipher_alg bf_skcipher_algs[] = { 269 { 270 .base.cra_name = "ecb(blowfish)", 271 .base.cra_driver_name = "ecb-blowfish-asm", 272 .base.cra_priority = 300, 273 .base.cra_blocksize = BF_BLOCK_SIZE, 274 .base.cra_ctxsize = sizeof(struct bf_ctx), 275 .base.cra_module = THIS_MODULE, 276 .min_keysize = BF_MIN_KEY_SIZE, 277 .max_keysize = BF_MAX_KEY_SIZE, 278 .setkey = blowfish_setkey_skcipher, 279 .encrypt = ecb_encrypt, 280 .decrypt = ecb_decrypt, 281 }, { 282 .base.cra_name = "cbc(blowfish)", 283 .base.cra_driver_name = "cbc-blowfish-asm", 284 .base.cra_priority = 300, 285 .base.cra_blocksize = BF_BLOCK_SIZE, 286 .base.cra_ctxsize = sizeof(struct bf_ctx), 287 .base.cra_module = THIS_MODULE, 288 .min_keysize = BF_MIN_KEY_SIZE, 289 .max_keysize = BF_MAX_KEY_SIZE, 290 .ivsize = BF_BLOCK_SIZE, 291 .setkey = blowfish_setkey_skcipher, 292 .encrypt = cbc_encrypt, 293 .decrypt = cbc_decrypt, 294 }, 295 }; 296 297 static bool is_blacklisted_cpu(void) 298 { 299 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL) 300 return false; 301 302 if (boot_cpu_data.x86 == 0x0f) { 303 /* 304 * On Pentium 4, blowfish-x86_64 is slower than generic C 305 * implementation because use of 64bit rotates (which are really 306 * slow on P4). Therefore blacklist P4s. 307 */ 308 return true; 309 } 310 311 return false; 312 } 313 314 static int force; 315 module_param(force, int, 0); 316 MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist"); 317 318 static int __init init(void) 319 { 320 int err; 321 322 if (!force && is_blacklisted_cpu()) { 323 printk(KERN_INFO 324 "blowfish-x86_64: performance on this CPU " 325 "would be suboptimal: disabling " 326 "blowfish-x86_64.\n"); 327 return -ENODEV; 328 } 329 330 err = crypto_register_alg(&bf_cipher_alg); 331 if (err) 332 return err; 333 334 err = crypto_register_skciphers(bf_skcipher_algs, 335 ARRAY_SIZE(bf_skcipher_algs)); 336 if (err) 337 crypto_unregister_alg(&bf_cipher_alg); 338 339 return err; 340 } 341 342 static void __exit fini(void) 343 { 344 crypto_unregister_alg(&bf_cipher_alg); 345 crypto_unregister_skciphers(bf_skcipher_algs, 346 ARRAY_SIZE(bf_skcipher_algs)); 347 } 348 349 module_init(init); 350 module_exit(fini); 351 352 MODULE_LICENSE("GPL"); 353 MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized"); 354 MODULE_ALIAS_CRYPTO("blowfish"); 355 MODULE_ALIAS_CRYPTO("blowfish-asm"); 356