1 /* 2 * CTR: Counter mode 3 * 4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/algapi.h> 14 #include <linux/err.h> 15 #include <linux/init.h> 16 #include <linux/kernel.h> 17 #include <linux/module.h> 18 #include <linux/random.h> 19 #include <linux/scatterlist.h> 20 #include <linux/slab.h> 21 22 struct ctr_instance_ctx { 23 struct crypto_spawn alg; 24 unsigned int noncesize; 25 unsigned int ivsize; 26 unsigned int countersize; 27 }; 28 29 struct crypto_ctr_ctx { 30 struct crypto_cipher *child; 31 u8 *nonce; 32 }; 33 34 static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key, 35 unsigned int keylen) 36 { 37 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent); 38 struct crypto_cipher *child = ctx->child; 39 struct ctr_instance_ctx *ictx = 40 crypto_instance_ctx(crypto_tfm_alg_instance(parent)); 41 unsigned int noncelen = ictx->noncesize; 42 int err = 0; 43 44 /* the nonce is stored in bytes at end of key */ 45 if (keylen < noncelen) 46 return -EINVAL; 47 48 memcpy(ctx->nonce, key + (keylen - noncelen), noncelen); 49 50 keylen -= noncelen; 51 52 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 53 crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) & 54 CRYPTO_TFM_REQ_MASK); 55 err = crypto_cipher_setkey(child, key, keylen); 56 crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) & 57 CRYPTO_TFM_RES_MASK); 58 59 return err; 60 } 61 62 static void crypto_ctr_crypt_final(struct blkcipher_walk *walk, 63 struct crypto_cipher *tfm, u8 *ctrblk, 64 unsigned int countersize) 65 { 66 unsigned int bsize = crypto_cipher_blocksize(tfm); 67 u8 *keystream = ctrblk + bsize; 68 u8 *src = walk->src.virt.addr; 69 u8 *dst = walk->dst.virt.addr; 70 unsigned int nbytes = walk->nbytes; 71 72 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); 73 crypto_xor(keystream, src, nbytes); 74 memcpy(dst, keystream, nbytes); 75 } 76 77 static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk, 78 struct crypto_cipher *tfm, u8 *ctrblk, 79 unsigned int countersize) 80 { 81 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 82 crypto_cipher_alg(tfm)->cia_encrypt; 83 unsigned int bsize = crypto_cipher_blocksize(tfm); 84 u8 *src = walk->src.virt.addr; 85 u8 *dst = walk->dst.virt.addr; 86 unsigned int nbytes = walk->nbytes; 87 88 do { 89 /* create keystream */ 90 fn(crypto_cipher_tfm(tfm), dst, ctrblk); 91 crypto_xor(dst, src, bsize); 92 93 /* increment counter in counterblock */ 94 crypto_inc(ctrblk + bsize - countersize, countersize); 95 96 src += bsize; 97 dst += bsize; 98 } while ((nbytes -= bsize) >= bsize); 99 100 return nbytes; 101 } 102 103 static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk, 104 struct crypto_cipher *tfm, u8 *ctrblk, 105 unsigned int countersize) 106 { 107 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 108 crypto_cipher_alg(tfm)->cia_encrypt; 109 unsigned int bsize = crypto_cipher_blocksize(tfm); 110 unsigned int nbytes = walk->nbytes; 111 u8 *src = walk->src.virt.addr; 112 u8 *keystream = ctrblk + bsize; 113 114 do { 115 /* create keystream */ 116 fn(crypto_cipher_tfm(tfm), keystream, ctrblk); 117 crypto_xor(src, keystream, bsize); 118 119 /* increment counter in counterblock */ 120 crypto_inc(ctrblk + bsize - countersize, countersize); 121 122 src += bsize; 123 } while ((nbytes -= bsize) >= bsize); 124 125 return nbytes; 126 } 127 128 static int crypto_ctr_crypt(struct blkcipher_desc *desc, 129 struct scatterlist *dst, struct scatterlist *src, 130 unsigned int nbytes) 131 { 132 struct blkcipher_walk walk; 133 struct crypto_blkcipher *tfm = desc->tfm; 134 struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm); 135 struct crypto_cipher *child = ctx->child; 136 unsigned int bsize = crypto_cipher_blocksize(child); 137 struct ctr_instance_ctx *ictx = 138 crypto_instance_ctx(crypto_tfm_alg_instance(&tfm->base)); 139 unsigned long alignmask = crypto_cipher_alignmask(child) | 140 (__alignof__(u32) - 1); 141 u8 cblk[bsize * 2 + alignmask]; 142 u8 *counterblk = (u8 *)ALIGN((unsigned long)cblk, alignmask + 1); 143 int err; 144 145 blkcipher_walk_init(&walk, dst, src, nbytes); 146 err = blkcipher_walk_virt_block(desc, &walk, bsize); 147 148 /* set up counter block */ 149 memset(counterblk, 0 , bsize); 150 memcpy(counterblk, ctx->nonce, ictx->noncesize); 151 memcpy(counterblk + ictx->noncesize, walk.iv, ictx->ivsize); 152 153 /* initialize counter portion of counter block */ 154 crypto_inc(counterblk + bsize - ictx->countersize, ictx->countersize); 155 156 while (walk.nbytes >= bsize) { 157 if (walk.src.virt.addr == walk.dst.virt.addr) 158 nbytes = crypto_ctr_crypt_inplace(&walk, child, 159 counterblk, 160 ictx->countersize); 161 else 162 nbytes = crypto_ctr_crypt_segment(&walk, child, 163 counterblk, 164 ictx->countersize); 165 166 err = blkcipher_walk_done(desc, &walk, nbytes); 167 } 168 169 if (walk.nbytes) { 170 crypto_ctr_crypt_final(&walk, child, counterblk, 171 ictx->countersize); 172 err = blkcipher_walk_done(desc, &walk, 0); 173 } 174 175 return err; 176 } 177 178 static int crypto_ctr_init_tfm(struct crypto_tfm *tfm) 179 { 180 struct crypto_instance *inst = (void *)tfm->__crt_alg; 181 struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst); 182 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 183 struct crypto_cipher *cipher; 184 185 ctx->nonce = kzalloc(ictx->noncesize, GFP_KERNEL); 186 if (!ctx->nonce) 187 return -ENOMEM; 188 189 cipher = crypto_spawn_cipher(&ictx->alg); 190 if (IS_ERR(cipher)) 191 return PTR_ERR(cipher); 192 193 ctx->child = cipher; 194 195 return 0; 196 } 197 198 static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm) 199 { 200 struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm); 201 202 kfree(ctx->nonce); 203 crypto_free_cipher(ctx->child); 204 } 205 206 static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb) 207 { 208 struct crypto_instance *inst; 209 struct crypto_alg *alg; 210 struct ctr_instance_ctx *ictx; 211 unsigned int noncesize; 212 unsigned int ivsize; 213 unsigned int countersize; 214 int err; 215 216 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER); 217 if (err) 218 return ERR_PTR(err); 219 220 alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER, 221 CRYPTO_ALG_TYPE_MASK); 222 if (IS_ERR(alg)) 223 return ERR_PTR(PTR_ERR(alg)); 224 225 err = crypto_attr_u32(tb[2], &noncesize); 226 if (err) 227 goto out_put_alg; 228 229 err = crypto_attr_u32(tb[3], &ivsize); 230 if (err) 231 goto out_put_alg; 232 233 err = crypto_attr_u32(tb[4], &countersize); 234 if (err) 235 goto out_put_alg; 236 237 /* verify size of nonce + iv + counter 238 * counter must be >= 4 bytes. 239 */ 240 err = -EINVAL; 241 if (((noncesize + ivsize + countersize) < alg->cra_blocksize) || 242 ((noncesize + ivsize) > alg->cra_blocksize) || 243 (countersize > alg->cra_blocksize) || (countersize < 4)) 244 goto out_put_alg; 245 246 /* If this is false we'd fail the alignment of crypto_inc. */ 247 if ((alg->cra_blocksize - countersize) % 4) 248 goto out_put_alg; 249 250 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 251 err = -ENOMEM; 252 if (!inst) 253 goto out_put_alg; 254 255 err = -ENAMETOOLONG; 256 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, 257 "ctr(%s,%u,%u,%u)", alg->cra_name, noncesize, 258 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) { 259 goto err_free_inst; 260 } 261 262 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, 263 "ctr(%s,%u,%u,%u)", alg->cra_driver_name, noncesize, 264 ivsize, countersize) >= CRYPTO_MAX_ALG_NAME) { 265 goto err_free_inst; 266 } 267 268 ictx = crypto_instance_ctx(inst); 269 ictx->noncesize = noncesize; 270 ictx->ivsize = ivsize; 271 ictx->countersize = countersize; 272 273 err = crypto_init_spawn(&ictx->alg, alg, inst, 274 CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC); 275 if (err) 276 goto err_free_inst; 277 278 err = 0; 279 inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER; 280 inst->alg.cra_priority = alg->cra_priority; 281 inst->alg.cra_blocksize = 1; 282 inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1); 283 inst->alg.cra_type = &crypto_blkcipher_type; 284 285 inst->alg.cra_blkcipher.ivsize = ivsize; 286 inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize 287 + noncesize; 288 inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize 289 + noncesize; 290 291 inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx); 292 293 inst->alg.cra_init = crypto_ctr_init_tfm; 294 inst->alg.cra_exit = crypto_ctr_exit_tfm; 295 296 inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey; 297 inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt; 298 inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt; 299 300 err_free_inst: 301 if (err) 302 kfree(inst); 303 304 out_put_alg: 305 crypto_mod_put(alg); 306 307 if (err) 308 inst = ERR_PTR(err); 309 310 return inst; 311 } 312 313 static void crypto_ctr_free(struct crypto_instance *inst) 314 { 315 struct ctr_instance_ctx *ictx = crypto_instance_ctx(inst); 316 317 crypto_drop_spawn(&ictx->alg); 318 kfree(inst); 319 } 320 321 static struct crypto_template crypto_ctr_tmpl = { 322 .name = "ctr", 323 .alloc = crypto_ctr_alloc, 324 .free = crypto_ctr_free, 325 .module = THIS_MODULE, 326 }; 327 328 static int __init crypto_ctr_module_init(void) 329 { 330 return crypto_register_template(&crypto_ctr_tmpl); 331 } 332 333 static void __exit crypto_ctr_module_exit(void) 334 { 335 crypto_unregister_template(&crypto_ctr_tmpl); 336 } 337 338 module_init(crypto_ctr_module_init); 339 module_exit(crypto_ctr_module_exit); 340 341 MODULE_LICENSE("GPL"); 342 MODULE_DESCRIPTION("CTR Counter block mode"); 343