1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * GCM: Galois/Counter Mode. 4 * 5 * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi> 6 */ 7 8 #include <crypto/gf128mul.h> 9 #include <crypto/internal/aead.h> 10 #include <crypto/internal/skcipher.h> 11 #include <crypto/internal/hash.h> 12 #include <crypto/null.h> 13 #include <crypto/scatterwalk.h> 14 #include <crypto/gcm.h> 15 #include <crypto/hash.h> 16 #include "internal.h" 17 #include <linux/err.h> 18 #include <linux/init.h> 19 #include <linux/kernel.h> 20 #include <linux/module.h> 21 #include <linux/slab.h> 22 23 struct gcm_instance_ctx { 24 struct crypto_skcipher_spawn ctr; 25 struct crypto_ahash_spawn ghash; 26 }; 27 28 struct crypto_gcm_ctx { 29 struct crypto_skcipher *ctr; 30 struct crypto_ahash *ghash; 31 }; 32 33 struct crypto_rfc4106_ctx { 34 struct crypto_aead *child; 35 u8 nonce[4]; 36 }; 37 38 struct crypto_rfc4106_req_ctx { 39 struct scatterlist src[3]; 40 struct scatterlist dst[3]; 41 struct aead_request subreq; 42 }; 43 44 struct crypto_rfc4543_instance_ctx { 45 struct crypto_aead_spawn aead; 46 }; 47 48 struct crypto_rfc4543_ctx { 49 struct crypto_aead *child; 50 struct crypto_sync_skcipher *null; 51 u8 nonce[4]; 52 }; 53 54 struct crypto_rfc4543_req_ctx { 55 struct aead_request subreq; 56 }; 57 58 struct crypto_gcm_ghash_ctx { 59 unsigned int cryptlen; 60 struct scatterlist *src; 61 int (*complete)(struct aead_request *req, u32 flags); 62 }; 63 64 struct crypto_gcm_req_priv_ctx { 65 u8 iv[16]; 66 u8 auth_tag[16]; 67 u8 iauth_tag[16]; 68 struct scatterlist src[3]; 69 struct scatterlist dst[3]; 70 struct scatterlist sg; 71 struct crypto_gcm_ghash_ctx ghash_ctx; 72 union { 73 struct ahash_request ahreq; 74 struct skcipher_request skreq; 75 } u; 76 }; 77 78 static struct { 79 u8 buf[16]; 80 struct scatterlist sg; 81 } *gcm_zeroes; 82 83 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc); 84 85 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx( 86 struct aead_request *req) 87 { 88 unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req)); 89 90 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1); 91 } 92 93 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key, 94 unsigned int keylen) 95 { 96 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 97 struct crypto_ahash *ghash = ctx->ghash; 98 struct crypto_skcipher *ctr = ctx->ctr; 99 struct { 100 be128 hash; 101 u8 iv[16]; 102 103 struct crypto_wait wait; 104 105 struct scatterlist sg[1]; 106 struct skcipher_request req; 107 } *data; 108 int err; 109 110 crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK); 111 crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) & 112 CRYPTO_TFM_REQ_MASK); 113 err = crypto_skcipher_setkey(ctr, key, keylen); 114 crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) & 115 CRYPTO_TFM_RES_MASK); 116 if (err) 117 return err; 118 119 data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr), 120 GFP_KERNEL); 121 if (!data) 122 return -ENOMEM; 123 124 crypto_init_wait(&data->wait); 125 sg_init_one(data->sg, &data->hash, sizeof(data->hash)); 126 skcipher_request_set_tfm(&data->req, ctr); 127 skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP | 128 CRYPTO_TFM_REQ_MAY_BACKLOG, 129 crypto_req_done, 130 &data->wait); 131 skcipher_request_set_crypt(&data->req, data->sg, data->sg, 132 sizeof(data->hash), data->iv); 133 134 err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), 135 &data->wait); 136 137 if (err) 138 goto out; 139 140 crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK); 141 crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) & 142 CRYPTO_TFM_REQ_MASK); 143 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128)); 144 crypto_aead_set_flags(aead, crypto_ahash_get_flags(ghash) & 145 CRYPTO_TFM_RES_MASK); 146 147 out: 148 kzfree(data); 149 return err; 150 } 151 152 static int crypto_gcm_setauthsize(struct crypto_aead *tfm, 153 unsigned int authsize) 154 { 155 return crypto_gcm_check_authsize(authsize); 156 } 157 158 static void crypto_gcm_init_common(struct aead_request *req) 159 { 160 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 161 __be32 counter = cpu_to_be32(1); 162 struct scatterlist *sg; 163 164 memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag)); 165 memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE); 166 memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4); 167 168 sg_init_table(pctx->src, 3); 169 sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag)); 170 sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); 171 if (sg != pctx->src + 1) 172 sg_chain(pctx->src, 2, sg); 173 174 if (req->src != req->dst) { 175 sg_init_table(pctx->dst, 3); 176 sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag)); 177 sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); 178 if (sg != pctx->dst + 1) 179 sg_chain(pctx->dst, 2, sg); 180 } 181 } 182 183 static void crypto_gcm_init_crypt(struct aead_request *req, 184 unsigned int cryptlen) 185 { 186 struct crypto_aead *aead = crypto_aead_reqtfm(req); 187 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead); 188 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 189 struct skcipher_request *skreq = &pctx->u.skreq; 190 struct scatterlist *dst; 191 192 dst = req->src == req->dst ? pctx->src : pctx->dst; 193 194 skcipher_request_set_tfm(skreq, ctx->ctr); 195 skcipher_request_set_crypt(skreq, pctx->src, dst, 196 cryptlen + sizeof(pctx->auth_tag), 197 pctx->iv); 198 } 199 200 static inline unsigned int gcm_remain(unsigned int len) 201 { 202 len &= 0xfU; 203 return len ? 16 - len : 0; 204 } 205 206 static void gcm_hash_len_done(struct crypto_async_request *areq, int err); 207 208 static int gcm_hash_update(struct aead_request *req, 209 crypto_completion_t compl, 210 struct scatterlist *src, 211 unsigned int len, u32 flags) 212 { 213 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 214 struct ahash_request *ahreq = &pctx->u.ahreq; 215 216 ahash_request_set_callback(ahreq, flags, compl, req); 217 ahash_request_set_crypt(ahreq, src, NULL, len); 218 219 return crypto_ahash_update(ahreq); 220 } 221 222 static int gcm_hash_remain(struct aead_request *req, 223 unsigned int remain, 224 crypto_completion_t compl, u32 flags) 225 { 226 return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags); 227 } 228 229 static int gcm_hash_len(struct aead_request *req, u32 flags) 230 { 231 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 232 struct ahash_request *ahreq = &pctx->u.ahreq; 233 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 234 be128 lengths; 235 236 lengths.a = cpu_to_be64(req->assoclen * 8); 237 lengths.b = cpu_to_be64(gctx->cryptlen * 8); 238 memcpy(pctx->iauth_tag, &lengths, 16); 239 sg_init_one(&pctx->sg, pctx->iauth_tag, 16); 240 ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req); 241 ahash_request_set_crypt(ahreq, &pctx->sg, 242 pctx->iauth_tag, sizeof(lengths)); 243 244 return crypto_ahash_finup(ahreq); 245 } 246 247 static int gcm_hash_len_continue(struct aead_request *req, u32 flags) 248 { 249 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 250 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 251 252 return gctx->complete(req, flags); 253 } 254 255 static void gcm_hash_len_done(struct crypto_async_request *areq, int err) 256 { 257 struct aead_request *req = areq->data; 258 259 if (err) 260 goto out; 261 262 err = gcm_hash_len_continue(req, 0); 263 if (err == -EINPROGRESS) 264 return; 265 266 out: 267 aead_request_complete(req, err); 268 } 269 270 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags) 271 { 272 return gcm_hash_len(req, flags) ?: 273 gcm_hash_len_continue(req, flags); 274 } 275 276 static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq, 277 int err) 278 { 279 struct aead_request *req = areq->data; 280 281 if (err) 282 goto out; 283 284 err = gcm_hash_crypt_remain_continue(req, 0); 285 if (err == -EINPROGRESS) 286 return; 287 288 out: 289 aead_request_complete(req, err); 290 } 291 292 static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags) 293 { 294 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 295 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 296 unsigned int remain; 297 298 remain = gcm_remain(gctx->cryptlen); 299 if (remain) 300 return gcm_hash_remain(req, remain, 301 gcm_hash_crypt_remain_done, flags) ?: 302 gcm_hash_crypt_remain_continue(req, flags); 303 304 return gcm_hash_crypt_remain_continue(req, flags); 305 } 306 307 static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err) 308 { 309 struct aead_request *req = areq->data; 310 311 if (err) 312 goto out; 313 314 err = gcm_hash_crypt_continue(req, 0); 315 if (err == -EINPROGRESS) 316 return; 317 318 out: 319 aead_request_complete(req, err); 320 } 321 322 static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags) 323 { 324 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 325 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 326 327 if (gctx->cryptlen) 328 return gcm_hash_update(req, gcm_hash_crypt_done, 329 gctx->src, gctx->cryptlen, flags) ?: 330 gcm_hash_crypt_continue(req, flags); 331 332 return gcm_hash_crypt_remain_continue(req, flags); 333 } 334 335 static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq, 336 int err) 337 { 338 struct aead_request *req = areq->data; 339 340 if (err) 341 goto out; 342 343 err = gcm_hash_assoc_remain_continue(req, 0); 344 if (err == -EINPROGRESS) 345 return; 346 347 out: 348 aead_request_complete(req, err); 349 } 350 351 static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags) 352 { 353 unsigned int remain; 354 355 remain = gcm_remain(req->assoclen); 356 if (remain) 357 return gcm_hash_remain(req, remain, 358 gcm_hash_assoc_remain_done, flags) ?: 359 gcm_hash_assoc_remain_continue(req, flags); 360 361 return gcm_hash_assoc_remain_continue(req, flags); 362 } 363 364 static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err) 365 { 366 struct aead_request *req = areq->data; 367 368 if (err) 369 goto out; 370 371 err = gcm_hash_assoc_continue(req, 0); 372 if (err == -EINPROGRESS) 373 return; 374 375 out: 376 aead_request_complete(req, err); 377 } 378 379 static int gcm_hash_init_continue(struct aead_request *req, u32 flags) 380 { 381 if (req->assoclen) 382 return gcm_hash_update(req, gcm_hash_assoc_done, 383 req->src, req->assoclen, flags) ?: 384 gcm_hash_assoc_continue(req, flags); 385 386 return gcm_hash_assoc_remain_continue(req, flags); 387 } 388 389 static void gcm_hash_init_done(struct crypto_async_request *areq, int err) 390 { 391 struct aead_request *req = areq->data; 392 393 if (err) 394 goto out; 395 396 err = gcm_hash_init_continue(req, 0); 397 if (err == -EINPROGRESS) 398 return; 399 400 out: 401 aead_request_complete(req, err); 402 } 403 404 static int gcm_hash(struct aead_request *req, u32 flags) 405 { 406 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 407 struct ahash_request *ahreq = &pctx->u.ahreq; 408 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 409 410 ahash_request_set_tfm(ahreq, ctx->ghash); 411 412 ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req); 413 return crypto_ahash_init(ahreq) ?: 414 gcm_hash_init_continue(req, flags); 415 } 416 417 static int gcm_enc_copy_hash(struct aead_request *req, u32 flags) 418 { 419 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 420 struct crypto_aead *aead = crypto_aead_reqtfm(req); 421 u8 *auth_tag = pctx->auth_tag; 422 423 crypto_xor(auth_tag, pctx->iauth_tag, 16); 424 scatterwalk_map_and_copy(auth_tag, req->dst, 425 req->assoclen + req->cryptlen, 426 crypto_aead_authsize(aead), 1); 427 return 0; 428 } 429 430 static int gcm_encrypt_continue(struct aead_request *req, u32 flags) 431 { 432 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 433 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 434 435 gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst); 436 gctx->cryptlen = req->cryptlen; 437 gctx->complete = gcm_enc_copy_hash; 438 439 return gcm_hash(req, flags); 440 } 441 442 static void gcm_encrypt_done(struct crypto_async_request *areq, int err) 443 { 444 struct aead_request *req = areq->data; 445 446 if (err) 447 goto out; 448 449 err = gcm_encrypt_continue(req, 0); 450 if (err == -EINPROGRESS) 451 return; 452 453 out: 454 aead_request_complete(req, err); 455 } 456 457 static int crypto_gcm_encrypt(struct aead_request *req) 458 { 459 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 460 struct skcipher_request *skreq = &pctx->u.skreq; 461 u32 flags = aead_request_flags(req); 462 463 crypto_gcm_init_common(req); 464 crypto_gcm_init_crypt(req, req->cryptlen); 465 skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req); 466 467 return crypto_skcipher_encrypt(skreq) ?: 468 gcm_encrypt_continue(req, flags); 469 } 470 471 static int crypto_gcm_verify(struct aead_request *req) 472 { 473 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 474 struct crypto_aead *aead = crypto_aead_reqtfm(req); 475 u8 *auth_tag = pctx->auth_tag; 476 u8 *iauth_tag = pctx->iauth_tag; 477 unsigned int authsize = crypto_aead_authsize(aead); 478 unsigned int cryptlen = req->cryptlen - authsize; 479 480 crypto_xor(auth_tag, iauth_tag, 16); 481 scatterwalk_map_and_copy(iauth_tag, req->src, 482 req->assoclen + cryptlen, authsize, 0); 483 return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0; 484 } 485 486 static void gcm_decrypt_done(struct crypto_async_request *areq, int err) 487 { 488 struct aead_request *req = areq->data; 489 490 if (!err) 491 err = crypto_gcm_verify(req); 492 493 aead_request_complete(req, err); 494 } 495 496 static int gcm_dec_hash_continue(struct aead_request *req, u32 flags) 497 { 498 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 499 struct skcipher_request *skreq = &pctx->u.skreq; 500 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 501 502 crypto_gcm_init_crypt(req, gctx->cryptlen); 503 skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req); 504 return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req); 505 } 506 507 static int crypto_gcm_decrypt(struct aead_request *req) 508 { 509 struct crypto_aead *aead = crypto_aead_reqtfm(req); 510 struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req); 511 struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx; 512 unsigned int authsize = crypto_aead_authsize(aead); 513 unsigned int cryptlen = req->cryptlen; 514 u32 flags = aead_request_flags(req); 515 516 cryptlen -= authsize; 517 518 crypto_gcm_init_common(req); 519 520 gctx->src = sg_next(pctx->src); 521 gctx->cryptlen = cryptlen; 522 gctx->complete = gcm_dec_hash_continue; 523 524 return gcm_hash(req, flags); 525 } 526 527 static int crypto_gcm_init_tfm(struct crypto_aead *tfm) 528 { 529 struct aead_instance *inst = aead_alg_instance(tfm); 530 struct gcm_instance_ctx *ictx = aead_instance_ctx(inst); 531 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm); 532 struct crypto_skcipher *ctr; 533 struct crypto_ahash *ghash; 534 unsigned long align; 535 int err; 536 537 ghash = crypto_spawn_ahash(&ictx->ghash); 538 if (IS_ERR(ghash)) 539 return PTR_ERR(ghash); 540 541 ctr = crypto_spawn_skcipher(&ictx->ctr); 542 err = PTR_ERR(ctr); 543 if (IS_ERR(ctr)) 544 goto err_free_hash; 545 546 ctx->ctr = ctr; 547 ctx->ghash = ghash; 548 549 align = crypto_aead_alignmask(tfm); 550 align &= ~(crypto_tfm_ctx_alignment() - 1); 551 crypto_aead_set_reqsize(tfm, 552 align + offsetof(struct crypto_gcm_req_priv_ctx, u) + 553 max(sizeof(struct skcipher_request) + 554 crypto_skcipher_reqsize(ctr), 555 sizeof(struct ahash_request) + 556 crypto_ahash_reqsize(ghash))); 557 558 return 0; 559 560 err_free_hash: 561 crypto_free_ahash(ghash); 562 return err; 563 } 564 565 static void crypto_gcm_exit_tfm(struct crypto_aead *tfm) 566 { 567 struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm); 568 569 crypto_free_ahash(ctx->ghash); 570 crypto_free_skcipher(ctx->ctr); 571 } 572 573 static void crypto_gcm_free(struct aead_instance *inst) 574 { 575 struct gcm_instance_ctx *ctx = aead_instance_ctx(inst); 576 577 crypto_drop_skcipher(&ctx->ctr); 578 crypto_drop_ahash(&ctx->ghash); 579 kfree(inst); 580 } 581 582 static int crypto_gcm_create_common(struct crypto_template *tmpl, 583 struct rtattr **tb, 584 const char *ctr_name, 585 const char *ghash_name) 586 { 587 struct crypto_attr_type *algt; 588 struct aead_instance *inst; 589 struct skcipher_alg *ctr; 590 struct crypto_alg *ghash_alg; 591 struct hash_alg_common *ghash; 592 struct gcm_instance_ctx *ctx; 593 int err; 594 595 algt = crypto_get_attr_type(tb); 596 if (IS_ERR(algt)) 597 return PTR_ERR(algt); 598 599 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 600 return -EINVAL; 601 602 ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type, 603 CRYPTO_ALG_TYPE_HASH, 604 CRYPTO_ALG_TYPE_AHASH_MASK | 605 crypto_requires_sync(algt->type, 606 algt->mask)); 607 if (IS_ERR(ghash_alg)) 608 return PTR_ERR(ghash_alg); 609 610 ghash = __crypto_hash_alg_common(ghash_alg); 611 612 err = -ENOMEM; 613 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 614 if (!inst) 615 goto out_put_ghash; 616 617 ctx = aead_instance_ctx(inst); 618 err = crypto_init_ahash_spawn(&ctx->ghash, ghash, 619 aead_crypto_instance(inst)); 620 if (err) 621 goto err_free_inst; 622 623 err = -EINVAL; 624 if (strcmp(ghash->base.cra_name, "ghash") != 0 || 625 ghash->digestsize != 16) 626 goto err_drop_ghash; 627 628 crypto_set_skcipher_spawn(&ctx->ctr, aead_crypto_instance(inst)); 629 err = crypto_grab_skcipher(&ctx->ctr, ctr_name, 0, 630 crypto_requires_sync(algt->type, 631 algt->mask)); 632 if (err) 633 goto err_drop_ghash; 634 635 ctr = crypto_spawn_skcipher_alg(&ctx->ctr); 636 637 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */ 638 err = -EINVAL; 639 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 || 640 crypto_skcipher_alg_ivsize(ctr) != 16 || 641 ctr->base.cra_blocksize != 1) 642 goto out_put_ctr; 643 644 err = -ENAMETOOLONG; 645 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 646 "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME) 647 goto out_put_ctr; 648 649 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 650 "gcm_base(%s,%s)", ctr->base.cra_driver_name, 651 ghash_alg->cra_driver_name) >= 652 CRYPTO_MAX_ALG_NAME) 653 goto out_put_ctr; 654 655 inst->alg.base.cra_flags = (ghash->base.cra_flags | 656 ctr->base.cra_flags) & CRYPTO_ALG_ASYNC; 657 inst->alg.base.cra_priority = (ghash->base.cra_priority + 658 ctr->base.cra_priority) / 2; 659 inst->alg.base.cra_blocksize = 1; 660 inst->alg.base.cra_alignmask = ghash->base.cra_alignmask | 661 ctr->base.cra_alignmask; 662 inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx); 663 inst->alg.ivsize = GCM_AES_IV_SIZE; 664 inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr); 665 inst->alg.maxauthsize = 16; 666 inst->alg.init = crypto_gcm_init_tfm; 667 inst->alg.exit = crypto_gcm_exit_tfm; 668 inst->alg.setkey = crypto_gcm_setkey; 669 inst->alg.setauthsize = crypto_gcm_setauthsize; 670 inst->alg.encrypt = crypto_gcm_encrypt; 671 inst->alg.decrypt = crypto_gcm_decrypt; 672 673 inst->free = crypto_gcm_free; 674 675 err = aead_register_instance(tmpl, inst); 676 if (err) 677 goto out_put_ctr; 678 679 out_put_ghash: 680 crypto_mod_put(ghash_alg); 681 return err; 682 683 out_put_ctr: 684 crypto_drop_skcipher(&ctx->ctr); 685 err_drop_ghash: 686 crypto_drop_ahash(&ctx->ghash); 687 err_free_inst: 688 kfree(inst); 689 goto out_put_ghash; 690 } 691 692 static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb) 693 { 694 const char *cipher_name; 695 char ctr_name[CRYPTO_MAX_ALG_NAME]; 696 697 cipher_name = crypto_attr_alg_name(tb[1]); 698 if (IS_ERR(cipher_name)) 699 return PTR_ERR(cipher_name); 700 701 if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >= 702 CRYPTO_MAX_ALG_NAME) 703 return -ENAMETOOLONG; 704 705 return crypto_gcm_create_common(tmpl, tb, ctr_name, "ghash"); 706 } 707 708 static int crypto_gcm_base_create(struct crypto_template *tmpl, 709 struct rtattr **tb) 710 { 711 const char *ctr_name; 712 const char *ghash_name; 713 714 ctr_name = crypto_attr_alg_name(tb[1]); 715 if (IS_ERR(ctr_name)) 716 return PTR_ERR(ctr_name); 717 718 ghash_name = crypto_attr_alg_name(tb[2]); 719 if (IS_ERR(ghash_name)) 720 return PTR_ERR(ghash_name); 721 722 return crypto_gcm_create_common(tmpl, tb, ctr_name, ghash_name); 723 } 724 725 static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key, 726 unsigned int keylen) 727 { 728 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); 729 struct crypto_aead *child = ctx->child; 730 int err; 731 732 if (keylen < 4) 733 return -EINVAL; 734 735 keylen -= 4; 736 memcpy(ctx->nonce, key + keylen, 4); 737 738 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); 739 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & 740 CRYPTO_TFM_REQ_MASK); 741 err = crypto_aead_setkey(child, key, keylen); 742 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & 743 CRYPTO_TFM_RES_MASK); 744 745 return err; 746 } 747 748 static int crypto_rfc4106_setauthsize(struct crypto_aead *parent, 749 unsigned int authsize) 750 { 751 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent); 752 int err; 753 754 err = crypto_rfc4106_check_authsize(authsize); 755 if (err) 756 return err; 757 758 return crypto_aead_setauthsize(ctx->child, authsize); 759 } 760 761 static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req) 762 { 763 struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); 764 struct crypto_aead *aead = crypto_aead_reqtfm(req); 765 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead); 766 struct aead_request *subreq = &rctx->subreq; 767 struct crypto_aead *child = ctx->child; 768 struct scatterlist *sg; 769 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child), 770 crypto_aead_alignmask(child) + 1); 771 772 scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0); 773 774 memcpy(iv, ctx->nonce, 4); 775 memcpy(iv + 4, req->iv, 8); 776 777 sg_init_table(rctx->src, 3); 778 sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8); 779 sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); 780 if (sg != rctx->src + 1) 781 sg_chain(rctx->src, 2, sg); 782 783 if (req->src != req->dst) { 784 sg_init_table(rctx->dst, 3); 785 sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8); 786 sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); 787 if (sg != rctx->dst + 1) 788 sg_chain(rctx->dst, 2, sg); 789 } 790 791 aead_request_set_tfm(subreq, child); 792 aead_request_set_callback(subreq, req->base.flags, req->base.complete, 793 req->base.data); 794 aead_request_set_crypt(subreq, rctx->src, 795 req->src == req->dst ? rctx->src : rctx->dst, 796 req->cryptlen, iv); 797 aead_request_set_ad(subreq, req->assoclen - 8); 798 799 return subreq; 800 } 801 802 static int crypto_rfc4106_encrypt(struct aead_request *req) 803 { 804 int err; 805 806 err = crypto_ipsec_check_assoclen(req->assoclen); 807 if (err) 808 return err; 809 810 req = crypto_rfc4106_crypt(req); 811 812 return crypto_aead_encrypt(req); 813 } 814 815 static int crypto_rfc4106_decrypt(struct aead_request *req) 816 { 817 int err; 818 819 err = crypto_ipsec_check_assoclen(req->assoclen); 820 if (err) 821 return err; 822 823 req = crypto_rfc4106_crypt(req); 824 825 return crypto_aead_decrypt(req); 826 } 827 828 static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm) 829 { 830 struct aead_instance *inst = aead_alg_instance(tfm); 831 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst); 832 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm); 833 struct crypto_aead *aead; 834 unsigned long align; 835 836 aead = crypto_spawn_aead(spawn); 837 if (IS_ERR(aead)) 838 return PTR_ERR(aead); 839 840 ctx->child = aead; 841 842 align = crypto_aead_alignmask(aead); 843 align &= ~(crypto_tfm_ctx_alignment() - 1); 844 crypto_aead_set_reqsize( 845 tfm, 846 sizeof(struct crypto_rfc4106_req_ctx) + 847 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + 848 align + 24); 849 850 return 0; 851 } 852 853 static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm) 854 { 855 struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm); 856 857 crypto_free_aead(ctx->child); 858 } 859 860 static void crypto_rfc4106_free(struct aead_instance *inst) 861 { 862 crypto_drop_aead(aead_instance_ctx(inst)); 863 kfree(inst); 864 } 865 866 static int crypto_rfc4106_create(struct crypto_template *tmpl, 867 struct rtattr **tb) 868 { 869 struct crypto_attr_type *algt; 870 struct aead_instance *inst; 871 struct crypto_aead_spawn *spawn; 872 struct aead_alg *alg; 873 const char *ccm_name; 874 int err; 875 876 algt = crypto_get_attr_type(tb); 877 if (IS_ERR(algt)) 878 return PTR_ERR(algt); 879 880 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 881 return -EINVAL; 882 883 ccm_name = crypto_attr_alg_name(tb[1]); 884 if (IS_ERR(ccm_name)) 885 return PTR_ERR(ccm_name); 886 887 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 888 if (!inst) 889 return -ENOMEM; 890 891 spawn = aead_instance_ctx(inst); 892 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); 893 err = crypto_grab_aead(spawn, ccm_name, 0, 894 crypto_requires_sync(algt->type, algt->mask)); 895 if (err) 896 goto out_free_inst; 897 898 alg = crypto_spawn_aead_alg(spawn); 899 900 err = -EINVAL; 901 902 /* Underlying IV size must be 12. */ 903 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE) 904 goto out_drop_alg; 905 906 /* Not a stream cipher? */ 907 if (alg->base.cra_blocksize != 1) 908 goto out_drop_alg; 909 910 err = -ENAMETOOLONG; 911 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 912 "rfc4106(%s)", alg->base.cra_name) >= 913 CRYPTO_MAX_ALG_NAME || 914 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 915 "rfc4106(%s)", alg->base.cra_driver_name) >= 916 CRYPTO_MAX_ALG_NAME) 917 goto out_drop_alg; 918 919 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 920 inst->alg.base.cra_priority = alg->base.cra_priority; 921 inst->alg.base.cra_blocksize = 1; 922 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 923 924 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx); 925 926 inst->alg.ivsize = GCM_RFC4106_IV_SIZE; 927 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 928 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); 929 930 inst->alg.init = crypto_rfc4106_init_tfm; 931 inst->alg.exit = crypto_rfc4106_exit_tfm; 932 933 inst->alg.setkey = crypto_rfc4106_setkey; 934 inst->alg.setauthsize = crypto_rfc4106_setauthsize; 935 inst->alg.encrypt = crypto_rfc4106_encrypt; 936 inst->alg.decrypt = crypto_rfc4106_decrypt; 937 938 inst->free = crypto_rfc4106_free; 939 940 err = aead_register_instance(tmpl, inst); 941 if (err) 942 goto out_drop_alg; 943 944 out: 945 return err; 946 947 out_drop_alg: 948 crypto_drop_aead(spawn); 949 out_free_inst: 950 kfree(inst); 951 goto out; 952 } 953 954 static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key, 955 unsigned int keylen) 956 { 957 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); 958 struct crypto_aead *child = ctx->child; 959 int err; 960 961 if (keylen < 4) 962 return -EINVAL; 963 964 keylen -= 4; 965 memcpy(ctx->nonce, key + keylen, 4); 966 967 crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK); 968 crypto_aead_set_flags(child, crypto_aead_get_flags(parent) & 969 CRYPTO_TFM_REQ_MASK); 970 err = crypto_aead_setkey(child, key, keylen); 971 crypto_aead_set_flags(parent, crypto_aead_get_flags(child) & 972 CRYPTO_TFM_RES_MASK); 973 974 return err; 975 } 976 977 static int crypto_rfc4543_setauthsize(struct crypto_aead *parent, 978 unsigned int authsize) 979 { 980 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent); 981 982 if (authsize != 16) 983 return -EINVAL; 984 985 return crypto_aead_setauthsize(ctx->child, authsize); 986 } 987 988 static int crypto_rfc4543_crypt(struct aead_request *req, bool enc) 989 { 990 struct crypto_aead *aead = crypto_aead_reqtfm(req); 991 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead); 992 struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req); 993 struct aead_request *subreq = &rctx->subreq; 994 unsigned int authsize = crypto_aead_authsize(aead); 995 u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), 996 crypto_aead_alignmask(ctx->child) + 1); 997 int err; 998 999 if (req->src != req->dst) { 1000 err = crypto_rfc4543_copy_src_to_dst(req, enc); 1001 if (err) 1002 return err; 1003 } 1004 1005 memcpy(iv, ctx->nonce, 4); 1006 memcpy(iv + 4, req->iv, 8); 1007 1008 aead_request_set_tfm(subreq, ctx->child); 1009 aead_request_set_callback(subreq, req->base.flags, 1010 req->base.complete, req->base.data); 1011 aead_request_set_crypt(subreq, req->src, req->dst, 1012 enc ? 0 : authsize, iv); 1013 aead_request_set_ad(subreq, req->assoclen + req->cryptlen - 1014 subreq->cryptlen); 1015 1016 return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq); 1017 } 1018 1019 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc) 1020 { 1021 struct crypto_aead *aead = crypto_aead_reqtfm(req); 1022 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead); 1023 unsigned int authsize = crypto_aead_authsize(aead); 1024 unsigned int nbytes = req->assoclen + req->cryptlen - 1025 (enc ? 0 : authsize); 1026 SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null); 1027 1028 skcipher_request_set_sync_tfm(nreq, ctx->null); 1029 skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL); 1030 skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL); 1031 1032 return crypto_skcipher_encrypt(nreq); 1033 } 1034 1035 static int crypto_rfc4543_encrypt(struct aead_request *req) 1036 { 1037 return crypto_ipsec_check_assoclen(req->assoclen) ?: 1038 crypto_rfc4543_crypt(req, true); 1039 } 1040 1041 static int crypto_rfc4543_decrypt(struct aead_request *req) 1042 { 1043 return crypto_ipsec_check_assoclen(req->assoclen) ?: 1044 crypto_rfc4543_crypt(req, false); 1045 } 1046 1047 static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm) 1048 { 1049 struct aead_instance *inst = aead_alg_instance(tfm); 1050 struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst); 1051 struct crypto_aead_spawn *spawn = &ictx->aead; 1052 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm); 1053 struct crypto_aead *aead; 1054 struct crypto_sync_skcipher *null; 1055 unsigned long align; 1056 int err = 0; 1057 1058 aead = crypto_spawn_aead(spawn); 1059 if (IS_ERR(aead)) 1060 return PTR_ERR(aead); 1061 1062 null = crypto_get_default_null_skcipher(); 1063 err = PTR_ERR(null); 1064 if (IS_ERR(null)) 1065 goto err_free_aead; 1066 1067 ctx->child = aead; 1068 ctx->null = null; 1069 1070 align = crypto_aead_alignmask(aead); 1071 align &= ~(crypto_tfm_ctx_alignment() - 1); 1072 crypto_aead_set_reqsize( 1073 tfm, 1074 sizeof(struct crypto_rfc4543_req_ctx) + 1075 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + 1076 align + GCM_AES_IV_SIZE); 1077 1078 return 0; 1079 1080 err_free_aead: 1081 crypto_free_aead(aead); 1082 return err; 1083 } 1084 1085 static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm) 1086 { 1087 struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm); 1088 1089 crypto_free_aead(ctx->child); 1090 crypto_put_default_null_skcipher(); 1091 } 1092 1093 static void crypto_rfc4543_free(struct aead_instance *inst) 1094 { 1095 struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst); 1096 1097 crypto_drop_aead(&ctx->aead); 1098 1099 kfree(inst); 1100 } 1101 1102 static int crypto_rfc4543_create(struct crypto_template *tmpl, 1103 struct rtattr **tb) 1104 { 1105 struct crypto_attr_type *algt; 1106 struct aead_instance *inst; 1107 struct crypto_aead_spawn *spawn; 1108 struct aead_alg *alg; 1109 struct crypto_rfc4543_instance_ctx *ctx; 1110 const char *ccm_name; 1111 int err; 1112 1113 algt = crypto_get_attr_type(tb); 1114 if (IS_ERR(algt)) 1115 return PTR_ERR(algt); 1116 1117 if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 1118 return -EINVAL; 1119 1120 ccm_name = crypto_attr_alg_name(tb[1]); 1121 if (IS_ERR(ccm_name)) 1122 return PTR_ERR(ccm_name); 1123 1124 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 1125 if (!inst) 1126 return -ENOMEM; 1127 1128 ctx = aead_instance_ctx(inst); 1129 spawn = &ctx->aead; 1130 crypto_set_aead_spawn(spawn, aead_crypto_instance(inst)); 1131 err = crypto_grab_aead(spawn, ccm_name, 0, 1132 crypto_requires_sync(algt->type, algt->mask)); 1133 if (err) 1134 goto out_free_inst; 1135 1136 alg = crypto_spawn_aead_alg(spawn); 1137 1138 err = -EINVAL; 1139 1140 /* Underlying IV size must be 12. */ 1141 if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE) 1142 goto out_drop_alg; 1143 1144 /* Not a stream cipher? */ 1145 if (alg->base.cra_blocksize != 1) 1146 goto out_drop_alg; 1147 1148 err = -ENAMETOOLONG; 1149 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 1150 "rfc4543(%s)", alg->base.cra_name) >= 1151 CRYPTO_MAX_ALG_NAME || 1152 snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 1153 "rfc4543(%s)", alg->base.cra_driver_name) >= 1154 CRYPTO_MAX_ALG_NAME) 1155 goto out_drop_alg; 1156 1157 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 1158 inst->alg.base.cra_priority = alg->base.cra_priority; 1159 inst->alg.base.cra_blocksize = 1; 1160 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 1161 1162 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx); 1163 1164 inst->alg.ivsize = GCM_RFC4543_IV_SIZE; 1165 inst->alg.chunksize = crypto_aead_alg_chunksize(alg); 1166 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); 1167 1168 inst->alg.init = crypto_rfc4543_init_tfm; 1169 inst->alg.exit = crypto_rfc4543_exit_tfm; 1170 1171 inst->alg.setkey = crypto_rfc4543_setkey; 1172 inst->alg.setauthsize = crypto_rfc4543_setauthsize; 1173 inst->alg.encrypt = crypto_rfc4543_encrypt; 1174 inst->alg.decrypt = crypto_rfc4543_decrypt; 1175 1176 inst->free = crypto_rfc4543_free, 1177 1178 err = aead_register_instance(tmpl, inst); 1179 if (err) 1180 goto out_drop_alg; 1181 1182 out: 1183 return err; 1184 1185 out_drop_alg: 1186 crypto_drop_aead(spawn); 1187 out_free_inst: 1188 kfree(inst); 1189 goto out; 1190 } 1191 1192 static struct crypto_template crypto_gcm_tmpls[] = { 1193 { 1194 .name = "gcm_base", 1195 .create = crypto_gcm_base_create, 1196 .module = THIS_MODULE, 1197 }, { 1198 .name = "gcm", 1199 .create = crypto_gcm_create, 1200 .module = THIS_MODULE, 1201 }, { 1202 .name = "rfc4106", 1203 .create = crypto_rfc4106_create, 1204 .module = THIS_MODULE, 1205 }, { 1206 .name = "rfc4543", 1207 .create = crypto_rfc4543_create, 1208 .module = THIS_MODULE, 1209 }, 1210 }; 1211 1212 static int __init crypto_gcm_module_init(void) 1213 { 1214 int err; 1215 1216 gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL); 1217 if (!gcm_zeroes) 1218 return -ENOMEM; 1219 1220 sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf)); 1221 1222 err = crypto_register_templates(crypto_gcm_tmpls, 1223 ARRAY_SIZE(crypto_gcm_tmpls)); 1224 if (err) 1225 kfree(gcm_zeroes); 1226 1227 return err; 1228 } 1229 1230 static void __exit crypto_gcm_module_exit(void) 1231 { 1232 kfree(gcm_zeroes); 1233 crypto_unregister_templates(crypto_gcm_tmpls, 1234 ARRAY_SIZE(crypto_gcm_tmpls)); 1235 } 1236 1237 subsys_initcall(crypto_gcm_module_init); 1238 module_exit(crypto_gcm_module_exit); 1239 1240 MODULE_LICENSE("GPL"); 1241 MODULE_DESCRIPTION("Galois/Counter Mode"); 1242 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>"); 1243 MODULE_ALIAS_CRYPTO("gcm_base"); 1244 MODULE_ALIAS_CRYPTO("rfc4106"); 1245 MODULE_ALIAS_CRYPTO("rfc4543"); 1246 MODULE_ALIAS_CRYPTO("gcm"); 1247