1 /* 2 * CTR: Counter mode 3 * 4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/algapi.h> 14 #include <crypto/ctr.h> 15 #include <crypto/internal/skcipher.h> 16 #include <linux/err.h> 17 #include <linux/init.h> 18 #include <linux/kernel.h> 19 #include <linux/module.h> 20 #include <linux/slab.h> 21 22 struct crypto_rfc3686_ctx { 23 struct crypto_skcipher *child; 24 u8 nonce[CTR_RFC3686_NONCE_SIZE]; 25 }; 26 27 struct crypto_rfc3686_req_ctx { 28 u8 iv[CTR_RFC3686_BLOCK_SIZE]; 29 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR; 30 }; 31 32 static void crypto_ctr_crypt_final(struct skcipher_walk *walk, 33 struct crypto_cipher *tfm) 34 { 35 unsigned int bsize = crypto_cipher_blocksize(tfm); 36 unsigned long alignmask = crypto_cipher_alignmask(tfm); 37 u8 *ctrblk = walk->iv; 38 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; 39 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); 40 u8 *src = walk->src.virt.addr; 41 u8 *dst = walk->dst.virt.addr; 42 unsigned int nbytes = walk->nbytes; 43 44 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); 45 crypto_xor_cpy(dst, keystream, src, nbytes); 46 47 crypto_inc(ctrblk, bsize); 48 } 49 50 static int crypto_ctr_crypt_segment(struct skcipher_walk *walk, 51 struct crypto_cipher *tfm) 52 { 53 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 54 crypto_cipher_alg(tfm)->cia_encrypt; 55 unsigned int bsize = crypto_cipher_blocksize(tfm); 56 u8 *ctrblk = walk->iv; 57 u8 *src = walk->src.virt.addr; 58 u8 *dst = walk->dst.virt.addr; 59 unsigned int nbytes = walk->nbytes; 60 61 do { 62 /* create keystream */ 63 fn(crypto_cipher_tfm(tfm), dst, ctrblk); 64 crypto_xor(dst, src, bsize); 65 66 /* increment counter in counterblock */ 67 crypto_inc(ctrblk, bsize); 68 69 src += bsize; 70 dst += bsize; 71 } while ((nbytes -= bsize) >= bsize); 72 73 return nbytes; 74 } 75 76 static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk, 77 struct crypto_cipher *tfm) 78 { 79 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) = 80 crypto_cipher_alg(tfm)->cia_encrypt; 81 unsigned int bsize = crypto_cipher_blocksize(tfm); 82 unsigned long alignmask = crypto_cipher_alignmask(tfm); 83 unsigned int nbytes = walk->nbytes; 84 u8 *ctrblk = walk->iv; 85 u8 *src = walk->src.virt.addr; 86 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK]; 87 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); 88 89 do { 90 /* create keystream */ 91 fn(crypto_cipher_tfm(tfm), keystream, ctrblk); 92 crypto_xor(src, keystream, bsize); 93 94 /* increment counter in counterblock */ 95 crypto_inc(ctrblk, bsize); 96 97 src += bsize; 98 } while ((nbytes -= bsize) >= bsize); 99 100 return nbytes; 101 } 102 103 static int crypto_ctr_crypt(struct skcipher_request *req) 104 { 105 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 106 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm); 107 const unsigned int bsize = crypto_cipher_blocksize(cipher); 108 struct skcipher_walk walk; 109 unsigned int nbytes; 110 int err; 111 112 err = skcipher_walk_virt(&walk, req, false); 113 114 while (walk.nbytes >= bsize) { 115 if (walk.src.virt.addr == walk.dst.virt.addr) 116 nbytes = crypto_ctr_crypt_inplace(&walk, cipher); 117 else 118 nbytes = crypto_ctr_crypt_segment(&walk, cipher); 119 120 err = skcipher_walk_done(&walk, nbytes); 121 } 122 123 if (walk.nbytes) { 124 crypto_ctr_crypt_final(&walk, cipher); 125 err = skcipher_walk_done(&walk, 0); 126 } 127 128 return err; 129 } 130 131 static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb) 132 { 133 struct skcipher_instance *inst; 134 struct crypto_alg *alg; 135 int err; 136 137 inst = skcipher_alloc_instance_simple(tmpl, tb, &alg); 138 if (IS_ERR(inst)) 139 return PTR_ERR(inst); 140 141 /* Block size must be >= 4 bytes. */ 142 err = -EINVAL; 143 if (alg->cra_blocksize < 4) 144 goto out_free_inst; 145 146 /* If this is false we'd fail the alignment of crypto_inc. */ 147 if (alg->cra_blocksize % 4) 148 goto out_free_inst; 149 150 /* CTR mode is a stream cipher. */ 151 inst->alg.base.cra_blocksize = 1; 152 153 /* 154 * To simplify the implementation, configure the skcipher walk to only 155 * give a partial block at the very end, never earlier. 156 */ 157 inst->alg.chunksize = alg->cra_blocksize; 158 159 inst->alg.encrypt = crypto_ctr_crypt; 160 inst->alg.decrypt = crypto_ctr_crypt; 161 162 err = skcipher_register_instance(tmpl, inst); 163 if (err) 164 goto out_free_inst; 165 goto out_put_alg; 166 167 out_free_inst: 168 inst->free(inst); 169 out_put_alg: 170 crypto_mod_put(alg); 171 return err; 172 } 173 174 static int crypto_rfc3686_setkey(struct crypto_skcipher *parent, 175 const u8 *key, unsigned int keylen) 176 { 177 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent); 178 struct crypto_skcipher *child = ctx->child; 179 int err; 180 181 /* the nonce is stored in bytes at end of key */ 182 if (keylen < CTR_RFC3686_NONCE_SIZE) 183 return -EINVAL; 184 185 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE), 186 CTR_RFC3686_NONCE_SIZE); 187 188 keylen -= CTR_RFC3686_NONCE_SIZE; 189 190 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK); 191 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) & 192 CRYPTO_TFM_REQ_MASK); 193 err = crypto_skcipher_setkey(child, key, keylen); 194 crypto_skcipher_set_flags(parent, crypto_skcipher_get_flags(child) & 195 CRYPTO_TFM_RES_MASK); 196 197 return err; 198 } 199 200 static int crypto_rfc3686_crypt(struct skcipher_request *req) 201 { 202 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 203 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); 204 struct crypto_skcipher *child = ctx->child; 205 unsigned long align = crypto_skcipher_alignmask(tfm); 206 struct crypto_rfc3686_req_ctx *rctx = 207 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1); 208 struct skcipher_request *subreq = &rctx->subreq; 209 u8 *iv = rctx->iv; 210 211 /* set up counter block */ 212 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE); 213 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE); 214 215 /* initialize counter portion of counter block */ 216 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) = 217 cpu_to_be32(1); 218 219 skcipher_request_set_tfm(subreq, child); 220 skcipher_request_set_callback(subreq, req->base.flags, 221 req->base.complete, req->base.data); 222 skcipher_request_set_crypt(subreq, req->src, req->dst, 223 req->cryptlen, iv); 224 225 return crypto_skcipher_encrypt(subreq); 226 } 227 228 static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm) 229 { 230 struct skcipher_instance *inst = skcipher_alg_instance(tfm); 231 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); 232 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); 233 struct crypto_skcipher *cipher; 234 unsigned long align; 235 unsigned int reqsize; 236 237 cipher = crypto_spawn_skcipher(spawn); 238 if (IS_ERR(cipher)) 239 return PTR_ERR(cipher); 240 241 ctx->child = cipher; 242 243 align = crypto_skcipher_alignmask(tfm); 244 align &= ~(crypto_tfm_ctx_alignment() - 1); 245 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) + 246 crypto_skcipher_reqsize(cipher); 247 crypto_skcipher_set_reqsize(tfm, reqsize); 248 249 return 0; 250 } 251 252 static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm) 253 { 254 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm); 255 256 crypto_free_skcipher(ctx->child); 257 } 258 259 static void crypto_rfc3686_free(struct skcipher_instance *inst) 260 { 261 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst); 262 263 crypto_drop_skcipher(spawn); 264 kfree(inst); 265 } 266 267 static int crypto_rfc3686_create(struct crypto_template *tmpl, 268 struct rtattr **tb) 269 { 270 struct crypto_attr_type *algt; 271 struct skcipher_instance *inst; 272 struct skcipher_alg *alg; 273 struct crypto_skcipher_spawn *spawn; 274 const char *cipher_name; 275 u32 mask; 276 277 int err; 278 279 algt = crypto_get_attr_type(tb); 280 if (IS_ERR(algt)) 281 return PTR_ERR(algt); 282 283 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 284 return -EINVAL; 285 286 cipher_name = crypto_attr_alg_name(tb[1]); 287 if (IS_ERR(cipher_name)) 288 return PTR_ERR(cipher_name); 289 290 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 291 if (!inst) 292 return -ENOMEM; 293 294 mask = crypto_requires_sync(algt->type, algt->mask) | 295 crypto_requires_off(algt->type, algt->mask, 296 CRYPTO_ALG_NEED_FALLBACK); 297 298 spawn = skcipher_instance_ctx(inst); 299 300 crypto_set_skcipher_spawn(spawn, skcipher_crypto_instance(inst)); 301 err = crypto_grab_skcipher(spawn, cipher_name, 0, mask); 302 if (err) 303 goto err_free_inst; 304 305 alg = crypto_spawn_skcipher_alg(spawn); 306 307 /* We only support 16-byte blocks. */ 308 err = -EINVAL; 309 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE) 310 goto err_drop_spawn; 311 312 /* Not a stream cipher? */ 313 if (alg->base.cra_blocksize != 1) 314 goto err_drop_spawn; 315 316 err = -ENAMETOOLONG; 317 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 318 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME) 319 goto err_drop_spawn; 320 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 321 "rfc3686(%s)", alg->base.cra_driver_name) >= 322 CRYPTO_MAX_ALG_NAME) 323 goto err_drop_spawn; 324 325 inst->alg.base.cra_priority = alg->base.cra_priority; 326 inst->alg.base.cra_blocksize = 1; 327 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 328 329 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 330 331 inst->alg.ivsize = CTR_RFC3686_IV_SIZE; 332 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); 333 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) + 334 CTR_RFC3686_NONCE_SIZE; 335 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) + 336 CTR_RFC3686_NONCE_SIZE; 337 338 inst->alg.setkey = crypto_rfc3686_setkey; 339 inst->alg.encrypt = crypto_rfc3686_crypt; 340 inst->alg.decrypt = crypto_rfc3686_crypt; 341 342 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx); 343 344 inst->alg.init = crypto_rfc3686_init_tfm; 345 inst->alg.exit = crypto_rfc3686_exit_tfm; 346 347 inst->free = crypto_rfc3686_free; 348 349 err = skcipher_register_instance(tmpl, inst); 350 if (err) 351 goto err_drop_spawn; 352 353 out: 354 return err; 355 356 err_drop_spawn: 357 crypto_drop_skcipher(spawn); 358 err_free_inst: 359 kfree(inst); 360 goto out; 361 } 362 363 static struct crypto_template crypto_ctr_tmpls[] = { 364 { 365 .name = "ctr", 366 .create = crypto_ctr_create, 367 .module = THIS_MODULE, 368 }, { 369 .name = "rfc3686", 370 .create = crypto_rfc3686_create, 371 .module = THIS_MODULE, 372 }, 373 }; 374 375 static int __init crypto_ctr_module_init(void) 376 { 377 return crypto_register_templates(crypto_ctr_tmpls, 378 ARRAY_SIZE(crypto_ctr_tmpls)); 379 } 380 381 static void __exit crypto_ctr_module_exit(void) 382 { 383 crypto_unregister_templates(crypto_ctr_tmpls, 384 ARRAY_SIZE(crypto_ctr_tmpls)); 385 } 386 387 module_init(crypto_ctr_module_init); 388 module_exit(crypto_ctr_module_exit); 389 390 MODULE_LICENSE("GPL"); 391 MODULE_DESCRIPTION("CTR block cipher mode of operation"); 392 MODULE_ALIAS_CRYPTO("rfc3686"); 393 MODULE_ALIAS_CRYPTO("ctr"); 394