1 /* 2 * CCM: Counter with CBC-MAC 3 * 4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12 13 #include <crypto/internal/aead.h> 14 #include <crypto/internal/hash.h> 15 #include <crypto/internal/skcipher.h> 16 #include <crypto/scatterwalk.h> 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/slab.h> 22 23 #include "internal.h" 24 25 struct ccm_instance_ctx { 26 struct crypto_skcipher_spawn ctr; 27 struct crypto_ahash_spawn mac; 28 }; 29 30 struct crypto_ccm_ctx { 31 struct crypto_ahash *mac; 32 struct crypto_skcipher *ctr; 33 }; 34 35 struct crypto_rfc4309_ctx { 36 struct crypto_aead *child; 37 u8 nonce[3]; 38 }; 39 40 struct crypto_rfc4309_req_ctx { 41 struct scatterlist src[3]; 42 struct scatterlist dst[3]; 43 struct aead_request subreq; 44 }; 45 46 struct crypto_ccm_req_priv_ctx { 47 u8 odata[16]; 48 u8 idata[16]; 49 u8 auth_tag[16]; 50 u32 flags; 51 struct scatterlist src[3]; 52 struct scatterlist dst[3]; 53 union { 54 struct ahash_request ahreq; 55 struct skcipher_request skreq; 56 }; 57 }; 58 59 struct cbcmac_tfm_ctx { 60 struct crypto_cipher *child; 61 }; 62 63 struct cbcmac_desc_ctx { 64 unsigned int len; 65 }; 66 67 static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx( 68 struct aead_request *req) 69 { 70 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); 71 72 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 73 } 74 75 static int set_msg_len(u8 *block, unsigned int msglen, int csize) 76 { 77 __be32 data; 78 79 memset(block, 0, csize); 80 block += csize; 81 82 if (csize >= 4) 83 csize = 4; 84 else if (msglen > (1 << (8 * csize))) 85 return -EOVERFLOW; 86 87 data = cpu_to_be32(msglen); 88 memcpy(block - csize, (u8 *)&data + 4 - csize, csize); 89 90 return 0; 91 } 92 93 static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key, 94 unsigned int keylen) 95 { 96 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 97 struct crypto_skcipher *ctr = ctx->ctr; 98 struct crypto_ahash *mac = ctx->mac; 99 int err = 0; 100 101 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); 102 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) & 103 CRYPTO_TFM_REQ_MASK); 104 err = crypto_skcipher_setkey(ctr, key, keylen); 105 crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) & 106 CRYPTO_TFM_RES_MASK); 107 if (err) 108 goto out; 109 110 crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK); 111 crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) & 112 CRYPTO_TFM_REQ_MASK); 113 err = crypto_ahash_setkey(mac, key, keylen); 114 crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) & 115 CRYPTO_TFM_RES_MASK); 116 117 out: 118 return err; 119 } 120 121 static int crypto_ccm_setauthsize(struct crypto_aead *tfm, 122 unsigned int authsize) 123 { 124 switch (authsize) { 125 case 4: 126 case 6: 127 case 8: 128 case 10: 129 case 12: 130 case 14: 131 case 16: 132 break; 133 default: 134 return -EINVAL; 135 } 136 137 return 0; 138 } 139 140 static int format_input(u8 *info, struct aead_request *req, 141 unsigned int cryptlen) 142 { 143 struct crypto_aead *aead = crypto_aead_reqtfm(req); 144 unsigned int lp = req->iv[0]; 145 unsigned int l = lp + 1; 146 unsigned int m; 147 148 m = crypto_aead_authsize(aead); 149 150 memcpy(info, req->iv, 16); 151 152 /* format control info per RFC 3610 and 153 * NIST Special Publication 800-38C 154 */ 155 *info |= (8 * ((m - 2) / 2)); 156 if (req->assoclen) 157 *info |= 64; 158 159 return set_msg_len(info + 16 - l, cryptlen, l); 160 } 161 162 static int format_adata(u8 *adata, unsigned int a) 163 { 164 int len = 0; 165 166 /* add control info for associated data 167 * RFC 3610 and NIST Special Publication 800-38C 168 */ 169 if (a < 65280) { 170 *(__be16 *)adata = cpu_to_be16(a); 171 len = 2; 172 } else { 173 *(__be16 *)adata = cpu_to_be16(0xfffe); 174 *(__be32 *)&adata[2] = cpu_to_be32(a); 175 len = 6; 176 } 177 178 return len; 179 } 180 181 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain, 182 unsigned int cryptlen) 183 { 184 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 185 struct crypto_aead *aead = crypto_aead_reqtfm(req); 186 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 187 struct ahash_request *ahreq = &pctx->ahreq; 188 unsigned int assoclen = req->assoclen; 189 struct scatterlist sg[3]; 190 u8 *odata = pctx->odata; 191 u8 *idata = pctx->idata; 192 int ilen, err; 193 194 /* format control data for input */ 195 err = format_input(odata, req, cryptlen); 196 if (err) 197 goto out; 198 199 sg_init_table(sg, 3); 200 sg_set_buf(&sg[0], odata, 16); 201 202 /* format associated data and compute into mac */ 203 if (assoclen) { 204 ilen = format_adata(idata, assoclen); 205 sg_set_buf(&sg[1], idata, ilen); 206 sg_chain(sg, 3, req->src); 207 } else { 208 ilen = 0; 209 sg_chain(sg, 2, req->src); 210 } 211 212 ahash_request_set_tfm(ahreq, ctx->mac); 213 ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL); 214 ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16); 215 err = crypto_ahash_init(ahreq); 216 if (err) 217 goto out; 218 err = crypto_ahash_update(ahreq); 219 if (err) 220 goto out; 221 222 /* we need to pad the MAC input to a round multiple of the block size */ 223 ilen = 16 - (assoclen + ilen) % 16; 224 if (ilen < 16) { 225 memset(idata, 0, ilen); 226 sg_init_table(sg, 2); 227 sg_set_buf(&sg[0], idata, ilen); 228 if (plain) 229 sg_chain(sg, 2, plain); 230 plain = sg; 231 cryptlen += ilen; 232 } 233 234 ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); 235 err = crypto_ahash_finup(ahreq); 236 out: 237 return err; 238 } 239 240 static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err) 241 { 242 struct aead_request *req = areq->data; 243 struct crypto_aead *aead = crypto_aead_reqtfm(req); 244 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 245 u8 *odata = pctx->odata; 246 247 if (!err) 248 scatterwalk_map_and_copy(odata, req->dst, 249 req->assoclen + req->cryptlen, 250 crypto_aead_authsize(aead), 1); 251 aead_request_complete(req, err); 252 } 253 254 static inline int crypto_ccm_check_iv(const u8 *iv) 255 { 256 /* 2 <= L <= 8, so 1 <= L' <= 7. */ 257 if (1 > iv[0] || iv[0] > 7) 258 return -EINVAL; 259 260 return 0; 261 } 262 263 static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag) 264 { 265 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 266 struct scatterlist *sg; 267 u8 *iv = req->iv; 268 int err; 269 270 err = crypto_ccm_check_iv(iv); 271 if (err) 272 return err; 273 274 pctx->flags = aead_request_flags(req); 275 276 /* Note: rfc 3610 and NIST 800-38C require counter of 277 * zero to encrypt auth tag. 278 */ 279 memset(iv + 15 - iv[0], 0, iv[0] + 1); 280 281 sg_init_table(pctx->src, 3); 282 sg_set_buf(pctx->src, tag, 16); 283 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); 284 if (sg != pctx->src + 1) 285 sg_chain(pctx->src, 2, sg); 286 287 if (req->src != req->dst) { 288 sg_init_table(pctx->dst, 3); 289 sg_set_buf(pctx->dst, tag, 16); 290 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); 291 if (sg != pctx->dst + 1) 292 sg_chain(pctx->dst, 2, sg); 293 } 294 295 return 0; 296 } 297 298 static int crypto_ccm_encrypt(struct aead_request *req) 299 { 300 struct crypto_aead *aead = crypto_aead_reqtfm(req); 301 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 302 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 303 struct skcipher_request *skreq = &pctx->skreq; 304 struct scatterlist *dst; 305 unsigned int cryptlen = req->cryptlen; 306 u8 *odata = pctx->odata; 307 u8 *iv = req->iv; 308 int err; 309 310 err = crypto_ccm_init_crypt(req, odata); 311 if (err) 312 return err; 313 314 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); 315 if (err) 316 return err; 317 318 dst = pctx->src; 319 if (req->src != req->dst) 320 dst = pctx->dst; 321 322 skcipher_request_set_tfm(skreq, ctx->ctr); 323 skcipher_request_set_callback(skreq, pctx->flags, 324 crypto_ccm_encrypt_done, req); 325 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); 326 err = crypto_skcipher_encrypt(skreq); 327 if (err) 328 return err; 329 330 /* copy authtag to end of dst */ 331 scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen, 332 crypto_aead_authsize(aead), 1); 333 return err; 334 } 335 336 static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, 337 int err) 338 { 339 struct aead_request *req = areq->data; 340 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 341 struct crypto_aead *aead = crypto_aead_reqtfm(req); 342 unsigned int authsize = crypto_aead_authsize(aead); 343 unsigned int cryptlen = req->cryptlen - authsize; 344 struct scatterlist *dst; 345 346 pctx->flags = 0; 347 348 dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst); 349 350 if (!err) { 351 err = crypto_ccm_auth(req, dst, cryptlen); 352 if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize)) 353 err = -EBADMSG; 354 } 355 aead_request_complete(req, err); 356 } 357 358 static int crypto_ccm_decrypt(struct aead_request *req) 359 { 360 struct crypto_aead *aead = crypto_aead_reqtfm(req); 361 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead); 362 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req); 363 struct skcipher_request *skreq = &pctx->skreq; 364 struct scatterlist *dst; 365 unsigned int authsize = crypto_aead_authsize(aead); 366 unsigned int cryptlen = req->cryptlen; 367 u8 *authtag = pctx->auth_tag; 368 u8 *odata = pctx->odata; 369 u8 *iv = pctx->idata; 370 int err; 371 372 cryptlen -= authsize; 373 374 err = crypto_ccm_init_crypt(req, authtag); 375 if (err) 376 return err; 377 378 scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen, 379 authsize, 0); 380 381 dst = pctx->src; 382 if (req->src != req->dst) 383 dst = pctx->dst; 384 385 memcpy(iv, req->iv, 16); 386 387 skcipher_request_set_tfm(skreq, ctx->ctr); 388 skcipher_request_set_callback(skreq, pctx->flags, 389 crypto_ccm_decrypt_done, req); 390 skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); 391 err = crypto_skcipher_decrypt(skreq); 392 if (err) 393 return err; 394 395 err = crypto_ccm_auth(req, sg_next(dst), cryptlen); 396 if (err) 397 return err; 398 399 /* verify */ 400 if (crypto_memneq(authtag, odata, authsize)) 401 return -EBADMSG; 402 403 return err; 404 } 405 406 static int crypto_ccm_init_tfm(struct crypto_aead *tfm) 407 { 408 struct aead_instance *inst = aead_alg_instance(tfm); 409 struct ccm_instance_ctx *ictx = aead_instance_ctx(inst); 410 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); 411 struct crypto_ahash *mac; 412 struct crypto_skcipher *ctr; 413 unsigned long align; 414 int err; 415 416 mac = crypto_spawn_ahash(&ictx->mac); 417 if (IS_ERR(mac)) 418 return PTR_ERR(mac); 419 420 ctr = crypto_spawn_skcipher(&ictx->ctr); 421 err = PTR_ERR(ctr); 422 if (IS_ERR(ctr)) 423 goto err_free_mac; 424 425 ctx->mac = mac; 426 ctx->ctr = ctr; 427 428 align = crypto_aead_alignmask(tfm); 429 align &= ~(crypto_tfm_ctx_alignment() - 1); 430 crypto_aead_set_reqsize( 431 tfm, 432 align + sizeof(struct crypto_ccm_req_priv_ctx) + 433 max(crypto_ahash_reqsize(mac), crypto_skcipher_reqsize(ctr))); 434 435 return 0; 436 437 err_free_mac: 438 crypto_free_ahash(mac); 439 return err; 440 } 441 442 static void crypto_ccm_exit_tfm(struct crypto_aead *tfm) 443 { 444 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm); 445 446 crypto_free_ahash(ctx->mac); 447 crypto_free_skcipher(ctx->ctr); 448 } 449 450 static void crypto_ccm_free(struct aead_instance *inst) 451 { 452 struct ccm_instance_ctx *ctx = aead_instance_ctx(inst); 453 454 crypto_drop_ahash(&ctx->mac); 455 crypto_drop_skcipher(&ctx->ctr); 456 kfree(inst); 457 } 458 459 static int crypto_ccm_create_common(struct crypto_template *tmpl, 460 struct rtattr **tb, 461 const char *full_name, 462 const char *ctr_name, 463 const char *mac_name) 464 { 465 struct crypto_attr_type *algt; 466 struct aead_instance *inst; 467 struct skcipher_alg *ctr; 468 struct crypto_alg *mac_alg; 469 struct hash_alg_common *mac; 470 struct ccm_instance_ctx *ictx; 471 int err; 472 473 algt = crypto_get_attr_type(tb); 474 if (IS_ERR(algt)) 475 return PTR_ERR(algt); 476 477 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 478 return -EINVAL; 479 480 mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type, 481 CRYPTO_ALG_TYPE_HASH, 482 CRYPTO_ALG_TYPE_AHASH_MASK | 483 CRYPTO_ALG_ASYNC); 484 if (IS_ERR(mac_alg)) 485 return PTR_ERR(mac_alg); 486 487 mac = __crypto_hash_alg_common(mac_alg); 488 err = -EINVAL; 489 if (mac->digestsize != 16) 490 goto out_put_mac; 491 492 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 493 err = -ENOMEM; 494 if (!inst) 495 goto out_put_mac; 496 497 ictx = aead_instance_ctx(inst); 498 err = crypto_init_ahash_spawn(&ictx->mac, mac, 499 aead_crypto_instance(inst)); 500 if (err) 501 goto err_free_inst; 502 503 crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst)); 504 err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0, 505 crypto_requires_sync(algt->type, 506 algt->mask)); 507 if (err) 508 goto err_drop_mac; 509 510 ctr = crypto_spawn_skcipher_alg(&ictx->ctr); 511 512 /* Not a stream cipher? */ 513 err = -EINVAL; 514 if (ctr->base.cra_blocksize != 1) 515 goto err_drop_ctr; 516 517 /* We want the real thing! */ 518 if (crypto_skcipher_alg_ivsize(ctr) != 16) 519 goto err_drop_ctr; 520 521 err = -ENAMETOOLONG; 522 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 523 "ccm_base(%s,%s)", ctr->base.cra_driver_name, 524 mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 525 goto err_drop_ctr; 526 527 memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME); 528 529 inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC; 530 inst->alg.base.cra_priority = (mac->base.cra_priority + 531 ctr->base.cra_priority) / 2; 532 inst->alg.base.cra_blocksize = 1; 533 inst->alg.base.cra_alignmask = mac->base.cra_alignmask | 534 ctr->base.cra_alignmask; 535 inst->alg.ivsize = 16; 536 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); 537 inst->alg.maxauthsize = 16; 538 inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx); 539 inst->alg.init = crypto_ccm_init_tfm; 540 inst->alg.exit = crypto_ccm_exit_tfm; 541 inst->alg.setkey = crypto_ccm_setkey; 542 inst->alg.setauthsize = crypto_ccm_setauthsize; 543 inst->alg.encrypt = crypto_ccm_encrypt; 544 inst->alg.decrypt = crypto_ccm_decrypt; 545 546 inst->free = crypto_ccm_free; 547 548 err = aead_register_instance(tmpl, inst); 549 if (err) 550 goto err_drop_ctr; 551 552 out_put_mac: 553 crypto_mod_put(mac_alg); 554 return err; 555 556 err_drop_ctr: 557 crypto_drop_skcipher(&ictx->ctr); 558 err_drop_mac: 559 crypto_drop_ahash(&ictx->mac); 560 err_free_inst: 561 kfree(inst); 562 goto out_put_mac; 563 } 564 565 static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb) 566 { 567 const char *cipher_name; 568 char ctr_name[CRYPTO_MAX_ALG_NAME]; 569 char mac_name[CRYPTO_MAX_ALG_NAME]; 570 char full_name[CRYPTO_MAX_ALG_NAME]; 571 572 cipher_name = crypto_attr_alg_name(tb[1]); 573 if (IS_ERR(cipher_name)) 574 return PTR_ERR(cipher_name); 575 576 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", 577 cipher_name) >= CRYPTO_MAX_ALG_NAME) 578 return -ENAMETOOLONG; 579 580 if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)", 581 cipher_name) >= CRYPTO_MAX_ALG_NAME) 582 return -ENAMETOOLONG; 583 584 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >= 585 CRYPTO_MAX_ALG_NAME) 586 return -ENAMETOOLONG; 587 588 return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name, 589 mac_name); 590 } 591 592 static struct crypto_template crypto_ccm_tmpl = { 593 .name = "ccm", 594 .create = crypto_ccm_create, 595 .module = THIS_MODULE, 596 }; 597 598 static int crypto_ccm_base_create(struct crypto_template *tmpl, 599 struct rtattr **tb) 600 { 601 const char *ctr_name; 602 const char *cipher_name; 603 char full_name[CRYPTO_MAX_ALG_NAME]; 604 605 ctr_name = crypto_attr_alg_name(tb[1]); 606 if (IS_ERR(ctr_name)) 607 return PTR_ERR(ctr_name); 608 609 cipher_name = crypto_attr_alg_name(tb[2]); 610 if (IS_ERR(cipher_name)) 611 return PTR_ERR(cipher_name); 612 613 if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)", 614 ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME) 615 return -ENAMETOOLONG; 616 617 return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name, 618 cipher_name); 619 } 620 621 static struct crypto_template crypto_ccm_base_tmpl = { 622 .name = "ccm_base", 623 .create = crypto_ccm_base_create, 624 .module = THIS_MODULE, 625 }; 626 627 static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key, 628 unsigned int keylen) 629 { 630 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); 631 struct crypto_aead *child = ctx->child; 632 int err; 633 634 if (keylen < 3) 635 return -EINVAL; 636 637 keylen -= 3; 638 memcpy(ctx->nonce, key + keylen, 3); 639 640 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); 641 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & 642 CRYPTO_TFM_REQ_MASK); 643 err = crypto_aead_setkey(child, key, keylen); 644 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & 645 CRYPTO_TFM_RES_MASK); 646 647 return err; 648 } 649 650 static int crypto_rfc4309_setauthsize(struct crypto_aead *parent, 651 unsigned int authsize) 652 { 653 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent); 654 655 switch (authsize) { 656 case 8: 657 case 12: 658 case 16: 659 break; 660 default: 661 return -EINVAL; 662 } 663 664 return crypto_aead_setauthsize(ctx->child, authsize); 665 } 666 667 static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req) 668 { 669 struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req); 670 struct aead_request *subreq = &rctx->subreq; 671 struct crypto_aead *aead = crypto_aead_reqtfm(req); 672 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead); 673 struct crypto_aead *child = ctx->child; 674 struct scatterlist *sg; 675 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), 676 crypto_aead_alignmask(child) + 1); 677 678 /* L' */ 679 iv[0] = 3; 680 681 memcpy(iv + 1, ctx->nonce, 3); 682 memcpy(iv + 4, req->iv, 8); 683 684 scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0); 685 686 sg_init_table(rctx->src, 3); 687 sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); 688 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); 689 if (sg != rctx->src + 1) 690 sg_chain(rctx->src, 2, sg); 691 692 if (req->src != req->dst) { 693 sg_init_table(rctx->dst, 3); 694 sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); 695 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); 696 if (sg != rctx->dst + 1) 697 sg_chain(rctx->dst, 2, sg); 698 } 699 700 aead_request_set_tfm(subreq, child); 701 aead_request_set_callback(subreq, req->base.flags, req->base.complete, 702 req->base.data); 703 aead_request_set_crypt(subreq, rctx->src, 704 req->src == req->dst ? rctx->src : rctx->dst, 705 req->cryptlen, iv); 706 aead_request_set_ad(subreq, req->assoclen - 8); 707 708 return subreq; 709 } 710 711 static int crypto_rfc4309_encrypt(struct aead_request *req) 712 { 713 if (req->assoclen != 16 && req->assoclen != 20) 714 return -EINVAL; 715 716 req = crypto_rfc4309_crypt(req); 717 718 return crypto_aead_encrypt(req); 719 } 720 721 static int crypto_rfc4309_decrypt(struct aead_request *req) 722 { 723 if (req->assoclen != 16 && req->assoclen != 20) 724 return -EINVAL; 725 726 req = crypto_rfc4309_crypt(req); 727 728 return crypto_aead_decrypt(req); 729 } 730 731 static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm) 732 { 733 struct aead_instance *inst = aead_alg_instance(tfm); 734 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst); 735 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm); 736 struct crypto_aead *aead; 737 unsigned long align; 738 739 aead = crypto_spawn_aead(spawn); 740 if (IS_ERR(aead)) 741 return PTR_ERR(aead); 742 743 ctx->child = aead; 744 745 align = crypto_aead_alignmask(aead); 746 align &= ~(crypto_tfm_ctx_alignment() - 1); 747 crypto_aead_set_reqsize( 748 tfm, 749 sizeof(struct crypto_rfc4309_req_ctx) + 750 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + 751 align + 32); 752 753 return 0; 754 } 755 756 static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm) 757 { 758 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm); 759 760 crypto_free_aead(ctx->child); 761 } 762 763 static void crypto_rfc4309_free(struct aead_instance *inst) 764 { 765 crypto_drop_aead(aead_instance_ctx(inst)); 766 kfree(inst); 767 } 768 769 static int crypto_rfc4309_create(struct crypto_template *tmpl, 770 struct rtattr **tb) 771 { 772 struct crypto_attr_type *algt; 773 struct aead_instance *inst; 774 struct crypto_aead_spawn *spawn; 775 struct aead_alg *alg; 776 const char *ccm_name; 777 int err; 778 779 algt = crypto_get_attr_type(tb); 780 if (IS_ERR(algt)) 781 return PTR_ERR(algt); 782 783 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 784 return -EINVAL; 785 786 ccm_name = crypto_attr_alg_name(tb[1]); 787 if (IS_ERR(ccm_name)) 788 return PTR_ERR(ccm_name); 789 790 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 791 if (!inst) 792 return -ENOMEM; 793 794 spawn = aead_instance_ctx(inst); 795 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); 796 err = crypto_grab_aead(spawn, ccm_name, 0, 797 crypto_requires_sync(algt->type, algt->mask)); 798 if (err) 799 goto out_free_inst; 800 801 alg = crypto_spawn_aead_alg(spawn); 802 803 err = -EINVAL; 804 805 /* We only support 16-byte blocks. */ 806 if (crypto_aead_alg_ivsize(alg) != 16) 807 goto out_drop_alg; 808 809 /* Not a stream cipher? */ 810 if (alg->base.cra_blocksize != 1) 811 goto out_drop_alg; 812 813 err = -ENAMETOOLONG; 814 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 815 "rfc4309(%s)", alg->base.cra_name) >= 816 CRYPTO_MAX_ALG_NAME || 817 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 818 "rfc4309(%s)", alg->base.cra_driver_name) >= 819 CRYPTO_MAX_ALG_NAME) 820 goto out_drop_alg; 821 822 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 823 inst->alg.base.cra_priority = alg->base.cra_priority; 824 inst->alg.base.cra_blocksize = 1; 825 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 826 827 inst->alg.ivsize = 8; 828 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 829 inst->alg.maxauthsize = 16; 830 831 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx); 832 833 inst->alg.init = crypto_rfc4309_init_tfm; 834 inst->alg.exit = crypto_rfc4309_exit_tfm; 835 836 inst->alg.setkey = crypto_rfc4309_setkey; 837 inst->alg.setauthsize = crypto_rfc4309_setauthsize; 838 inst->alg.encrypt = crypto_rfc4309_encrypt; 839 inst->alg.decrypt = crypto_rfc4309_decrypt; 840 841 inst->free = crypto_rfc4309_free; 842 843 err = aead_register_instance(tmpl, inst); 844 if (err) 845 goto out_drop_alg; 846 847 out: 848 return err; 849 850 out_drop_alg: 851 crypto_drop_aead(spawn); 852 out_free_inst: 853 kfree(inst); 854 goto out; 855 } 856 857 static struct crypto_template crypto_rfc4309_tmpl = { 858 .name = "rfc4309", 859 .create = crypto_rfc4309_create, 860 .module = THIS_MODULE, 861 }; 862 863 static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent, 864 const u8 *inkey, unsigned int keylen) 865 { 866 struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent); 867 868 return crypto_cipher_setkey(ctx->child, inkey, keylen); 869 } 870 871 static int crypto_cbcmac_digest_init(struct shash_desc *pdesc) 872 { 873 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 874 int bs = crypto_shash_digestsize(pdesc->tfm); 875 u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs; 876 877 ctx->len = 0; 878 memset(dg, 0, bs); 879 880 return 0; 881 } 882 883 static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p, 884 unsigned int len) 885 { 886 struct crypto_shash *parent = pdesc->tfm; 887 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 888 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 889 struct crypto_cipher *tfm = tctx->child; 890 int bs = crypto_shash_digestsize(parent); 891 u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; 892 893 while (len > 0) { 894 unsigned int l = min(len, bs - ctx->len); 895 896 crypto_xor(dg + ctx->len, p, l); 897 ctx->len +=l; 898 len -= l; 899 p += l; 900 901 if (ctx->len == bs) { 902 crypto_cipher_encrypt_one(tfm, dg, dg); 903 ctx->len = 0; 904 } 905 } 906 907 return 0; 908 } 909 910 static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out) 911 { 912 struct crypto_shash *parent = pdesc->tfm; 913 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent); 914 struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc); 915 struct crypto_cipher *tfm = tctx->child; 916 int bs = crypto_shash_digestsize(parent); 917 u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs; 918 919 if (ctx->len) 920 crypto_cipher_encrypt_one(tfm, dg, dg); 921 922 memcpy(out, dg, bs); 923 return 0; 924 } 925 926 static int cbcmac_init_tfm(struct crypto_tfm *tfm) 927 { 928 struct crypto_cipher *cipher; 929 struct crypto_instance *inst = (void *)tfm->__crt_alg; 930 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 931 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 932 933 cipher = crypto_spawn_cipher(spawn); 934 if (IS_ERR(cipher)) 935 return PTR_ERR(cipher); 936 937 ctx->child = cipher; 938 939 return 0; 940 }; 941 942 static void cbcmac_exit_tfm(struct crypto_tfm *tfm) 943 { 944 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 945 crypto_free_cipher(ctx->child); 946 } 947 948 static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb) 949 { 950 struct shash_instance *inst; 951 struct crypto_alg *alg; 952 int err; 953 954 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 955 if (err) 956 return err; 957 958 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 959 CRYPTO_ALG_TYPE_MASK); 960 if (IS_ERR(alg)) 961 return PTR_ERR(alg); 962 963 inst = shash_alloc_instance("cbcmac", alg); 964 err = PTR_ERR(inst); 965 if (IS_ERR(inst)) 966 goto out_put_alg; 967 968 err = crypto_init_spawn(shash_instance_ctx(inst), alg, 969 shash_crypto_instance(inst), 970 CRYPTO_ALG_TYPE_MASK); 971 if (err) 972 goto out_free_inst; 973 974 inst->alg.base.cra_priority = alg->cra_priority; 975 inst->alg.base.cra_blocksize = 1; 976 977 inst->alg.digestsize = alg->cra_blocksize; 978 inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx), 979 alg->cra_alignmask + 1) + 980 alg->cra_blocksize; 981 982 inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx); 983 inst->alg.base.cra_init = cbcmac_init_tfm; 984 inst->alg.base.cra_exit = cbcmac_exit_tfm; 985 986 inst->alg.init = crypto_cbcmac_digest_init; 987 inst->alg.update = crypto_cbcmac_digest_update; 988 inst->alg.final = crypto_cbcmac_digest_final; 989 inst->alg.setkey = crypto_cbcmac_digest_setkey; 990 991 err = shash_register_instance(tmpl, inst); 992 993 out_free_inst: 994 if (err) 995 shash_free_instance(shash_crypto_instance(inst)); 996 997 out_put_alg: 998 crypto_mod_put(alg); 999 return err; 1000 } 1001 1002 static struct crypto_template crypto_cbcmac_tmpl = { 1003 .name = "cbcmac", 1004 .create = cbcmac_create, 1005 .free = shash_free_instance, 1006 .module = THIS_MODULE, 1007 }; 1008 1009 static int __init crypto_ccm_module_init(void) 1010 { 1011 int err; 1012 1013 err = crypto_register_template(&crypto_cbcmac_tmpl); 1014 if (err) 1015 goto out; 1016 1017 err = crypto_register_template(&crypto_ccm_base_tmpl); 1018 if (err) 1019 goto out_undo_cbcmac; 1020 1021 err = crypto_register_template(&crypto_ccm_tmpl); 1022 if (err) 1023 goto out_undo_base; 1024 1025 err = crypto_register_template(&crypto_rfc4309_tmpl); 1026 if (err) 1027 goto out_undo_ccm; 1028 1029 out: 1030 return err; 1031 1032 out_undo_ccm: 1033 crypto_unregister_template(&crypto_ccm_tmpl); 1034 out_undo_base: 1035 crypto_unregister_template(&crypto_ccm_base_tmpl); 1036 out_undo_cbcmac: 1037 crypto_register_template(&crypto_cbcmac_tmpl); 1038 goto out; 1039 } 1040 1041 static void __exit crypto_ccm_module_exit(void) 1042 { 1043 crypto_unregister_template(&crypto_rfc4309_tmpl); 1044 crypto_unregister_template(&crypto_ccm_tmpl); 1045 crypto_unregister_template(&crypto_ccm_base_tmpl); 1046 crypto_unregister_template(&crypto_cbcmac_tmpl); 1047 } 1048 1049 module_init(crypto_ccm_module_init); 1050 module_exit(crypto_ccm_module_exit); 1051 1052 MODULE_LICENSE("GPL"); 1053 MODULE_DESCRIPTION("Counter with CBC MAC"); 1054 MODULE_ALIAS_CRYPTO("ccm_base"); 1055 MODULE_ALIAS_CRYPTO("rfc4309"); 1056 MODULE_ALIAS_CRYPTO("ccm"); 1057