1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * The AEGIS-128 Authenticated-Encryption Algorithm 4 * 5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com> 6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved. 7 */ 8 9 #include <crypto/algapi.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/internal/skcipher.h> 13 #include <crypto/scatterwalk.h> 14 #include <linux/err.h> 15 #include <linux/init.h> 16 #include <linux/jump_label.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 21 #include <asm/simd.h> 22 23 #include "aegis.h" 24 25 #define AEGIS128_NONCE_SIZE 16 26 #define AEGIS128_STATE_BLOCKS 5 27 #define AEGIS128_KEY_SIZE 16 28 #define AEGIS128_MIN_AUTH_SIZE 8 29 #define AEGIS128_MAX_AUTH_SIZE 16 30 31 struct aegis_state { 32 union aegis_block blocks[AEGIS128_STATE_BLOCKS]; 33 }; 34 35 struct aegis_ctx { 36 union aegis_block key; 37 }; 38 39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd); 40 41 static const union aegis_block crypto_aegis_const[2] = { 42 { .words64 = { 43 cpu_to_le64(U64_C(0x0d08050302010100)), 44 cpu_to_le64(U64_C(0x6279e99059372215)), 45 } }, 46 { .words64 = { 47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)), 48 cpu_to_le64(U64_C(0xdd28b57342311120)), 49 } }, 50 }; 51 52 static bool aegis128_do_simd(void) 53 { 54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD 55 if (static_branch_likely(&have_simd)) 56 return crypto_simd_usable(); 57 #endif 58 return false; 59 } 60 61 bool crypto_aegis128_have_simd(void); 62 void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg); 63 void crypto_aegis128_init_simd(struct aegis_state *state, 64 const union aegis_block *key, 65 const u8 *iv); 66 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst, 67 const u8 *src, unsigned int size); 68 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst, 69 const u8 *src, unsigned int size); 70 int crypto_aegis128_final_simd(struct aegis_state *state, 71 union aegis_block *tag_xor, 72 unsigned int assoclen, 73 unsigned int cryptlen, 74 unsigned int authsize); 75 76 static void crypto_aegis128_update(struct aegis_state *state) 77 { 78 union aegis_block tmp; 79 unsigned int i; 80 81 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1]; 82 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--) 83 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1], 84 &state->blocks[i]); 85 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]); 86 } 87 88 static void crypto_aegis128_update_a(struct aegis_state *state, 89 const union aegis_block *msg, 90 bool do_simd) 91 { 92 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 93 crypto_aegis128_update_simd(state, msg); 94 return; 95 } 96 97 crypto_aegis128_update(state); 98 crypto_aegis_block_xor(&state->blocks[0], msg); 99 } 100 101 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg, 102 bool do_simd) 103 { 104 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) { 105 crypto_aegis128_update_simd(state, msg); 106 return; 107 } 108 109 crypto_aegis128_update(state); 110 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE); 111 } 112 113 static void crypto_aegis128_init(struct aegis_state *state, 114 const union aegis_block *key, 115 const u8 *iv) 116 { 117 union aegis_block key_iv; 118 unsigned int i; 119 120 key_iv = *key; 121 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE); 122 123 state->blocks[0] = key_iv; 124 state->blocks[1] = crypto_aegis_const[1]; 125 state->blocks[2] = crypto_aegis_const[0]; 126 state->blocks[3] = *key; 127 state->blocks[4] = *key; 128 129 crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]); 130 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]); 131 132 for (i = 0; i < 5; i++) { 133 crypto_aegis128_update_a(state, key, false); 134 crypto_aegis128_update_a(state, &key_iv, false); 135 } 136 } 137 138 static void crypto_aegis128_ad(struct aegis_state *state, 139 const u8 *src, unsigned int size, 140 bool do_simd) 141 { 142 if (AEGIS_ALIGNED(src)) { 143 const union aegis_block *src_blk = 144 (const union aegis_block *)src; 145 146 while (size >= AEGIS_BLOCK_SIZE) { 147 crypto_aegis128_update_a(state, src_blk, do_simd); 148 149 size -= AEGIS_BLOCK_SIZE; 150 src_blk++; 151 } 152 } else { 153 while (size >= AEGIS_BLOCK_SIZE) { 154 crypto_aegis128_update_u(state, src, do_simd); 155 156 size -= AEGIS_BLOCK_SIZE; 157 src += AEGIS_BLOCK_SIZE; 158 } 159 } 160 } 161 162 static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst, 163 const u8 *src, unsigned int size) 164 { 165 memzero_explicit(dst, size); 166 } 167 168 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, 169 const u8 *src, unsigned int size) 170 { 171 union aegis_block tmp; 172 173 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 174 while (size >= AEGIS_BLOCK_SIZE) { 175 union aegis_block *dst_blk = 176 (union aegis_block *)dst; 177 const union aegis_block *src_blk = 178 (const union aegis_block *)src; 179 180 tmp = state->blocks[2]; 181 crypto_aegis_block_and(&tmp, &state->blocks[3]); 182 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 183 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 184 crypto_aegis_block_xor(&tmp, src_blk); 185 186 crypto_aegis128_update_a(state, src_blk, false); 187 188 *dst_blk = tmp; 189 190 size -= AEGIS_BLOCK_SIZE; 191 src += AEGIS_BLOCK_SIZE; 192 dst += AEGIS_BLOCK_SIZE; 193 } 194 } else { 195 while (size >= AEGIS_BLOCK_SIZE) { 196 tmp = state->blocks[2]; 197 crypto_aegis_block_and(&tmp, &state->blocks[3]); 198 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 199 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 200 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 201 202 crypto_aegis128_update_u(state, src, false); 203 204 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 205 206 size -= AEGIS_BLOCK_SIZE; 207 src += AEGIS_BLOCK_SIZE; 208 dst += AEGIS_BLOCK_SIZE; 209 } 210 } 211 212 if (size > 0) { 213 union aegis_block msg = {}; 214 memcpy(msg.bytes, src, size); 215 216 tmp = state->blocks[2]; 217 crypto_aegis_block_and(&tmp, &state->blocks[3]); 218 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 219 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 220 221 crypto_aegis128_update_a(state, &msg, false); 222 223 crypto_aegis_block_xor(&msg, &tmp); 224 225 memcpy(dst, msg.bytes, size); 226 } 227 } 228 229 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, 230 const u8 *src, unsigned int size) 231 { 232 union aegis_block tmp; 233 234 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 235 while (size >= AEGIS_BLOCK_SIZE) { 236 union aegis_block *dst_blk = 237 (union aegis_block *)dst; 238 const union aegis_block *src_blk = 239 (const union aegis_block *)src; 240 241 tmp = state->blocks[2]; 242 crypto_aegis_block_and(&tmp, &state->blocks[3]); 243 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 244 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 245 crypto_aegis_block_xor(&tmp, src_blk); 246 247 crypto_aegis128_update_a(state, &tmp, false); 248 249 *dst_blk = tmp; 250 251 size -= AEGIS_BLOCK_SIZE; 252 src += AEGIS_BLOCK_SIZE; 253 dst += AEGIS_BLOCK_SIZE; 254 } 255 } else { 256 while (size >= AEGIS_BLOCK_SIZE) { 257 tmp = state->blocks[2]; 258 crypto_aegis_block_and(&tmp, &state->blocks[3]); 259 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 260 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 261 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 262 263 crypto_aegis128_update_a(state, &tmp, false); 264 265 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 266 267 size -= AEGIS_BLOCK_SIZE; 268 src += AEGIS_BLOCK_SIZE; 269 dst += AEGIS_BLOCK_SIZE; 270 } 271 } 272 273 if (size > 0) { 274 union aegis_block msg = {}; 275 memcpy(msg.bytes, src, size); 276 277 tmp = state->blocks[2]; 278 crypto_aegis_block_and(&tmp, &state->blocks[3]); 279 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 280 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 281 crypto_aegis_block_xor(&msg, &tmp); 282 283 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); 284 285 crypto_aegis128_update_a(state, &msg, false); 286 287 memcpy(dst, msg.bytes, size); 288 } 289 } 290 291 static void crypto_aegis128_process_ad(struct aegis_state *state, 292 struct scatterlist *sg_src, 293 unsigned int assoclen, 294 bool do_simd) 295 { 296 struct scatter_walk walk; 297 union aegis_block buf; 298 unsigned int pos = 0; 299 300 scatterwalk_start(&walk, sg_src); 301 while (assoclen != 0) { 302 unsigned int size = scatterwalk_clamp(&walk, assoclen); 303 unsigned int left = size; 304 void *mapped = scatterwalk_map(&walk); 305 const u8 *src = (const u8 *)mapped; 306 307 if (pos + size >= AEGIS_BLOCK_SIZE) { 308 if (pos > 0) { 309 unsigned int fill = AEGIS_BLOCK_SIZE - pos; 310 memcpy(buf.bytes + pos, src, fill); 311 crypto_aegis128_update_a(state, &buf, do_simd); 312 pos = 0; 313 left -= fill; 314 src += fill; 315 } 316 317 crypto_aegis128_ad(state, src, left, do_simd); 318 src += left & ~(AEGIS_BLOCK_SIZE - 1); 319 left &= AEGIS_BLOCK_SIZE - 1; 320 } 321 322 memcpy(buf.bytes + pos, src, left); 323 324 pos += left; 325 assoclen -= size; 326 scatterwalk_unmap(mapped); 327 scatterwalk_advance(&walk, size); 328 scatterwalk_done(&walk, 0, assoclen); 329 } 330 331 if (pos > 0) { 332 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos); 333 crypto_aegis128_update_a(state, &buf, do_simd); 334 } 335 } 336 337 static __always_inline 338 int crypto_aegis128_process_crypt(struct aegis_state *state, 339 struct skcipher_walk *walk, 340 void (*crypt)(struct aegis_state *state, 341 u8 *dst, const u8 *src, 342 unsigned int size)) 343 { 344 int err = 0; 345 346 while (walk->nbytes) { 347 unsigned int nbytes = walk->nbytes; 348 349 if (nbytes < walk->total) 350 nbytes = round_down(nbytes, walk->stride); 351 352 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); 353 354 err = skcipher_walk_done(walk, walk->nbytes - nbytes); 355 } 356 return err; 357 } 358 359 static void crypto_aegis128_final(struct aegis_state *state, 360 union aegis_block *tag_xor, 361 u64 assoclen, u64 cryptlen) 362 { 363 u64 assocbits = assoclen * 8; 364 u64 cryptbits = cryptlen * 8; 365 366 union aegis_block tmp; 367 unsigned int i; 368 369 tmp.words64[0] = cpu_to_le64(assocbits); 370 tmp.words64[1] = cpu_to_le64(cryptbits); 371 372 crypto_aegis_block_xor(&tmp, &state->blocks[3]); 373 374 for (i = 0; i < 7; i++) 375 crypto_aegis128_update_a(state, &tmp, false); 376 377 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++) 378 crypto_aegis_block_xor(tag_xor, &state->blocks[i]); 379 } 380 381 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key, 382 unsigned int keylen) 383 { 384 struct aegis_ctx *ctx = crypto_aead_ctx(aead); 385 386 if (keylen != AEGIS128_KEY_SIZE) 387 return -EINVAL; 388 389 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE); 390 return 0; 391 } 392 393 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm, 394 unsigned int authsize) 395 { 396 if (authsize > AEGIS128_MAX_AUTH_SIZE) 397 return -EINVAL; 398 if (authsize < AEGIS128_MIN_AUTH_SIZE) 399 return -EINVAL; 400 return 0; 401 } 402 403 static int crypto_aegis128_encrypt_generic(struct aead_request *req) 404 { 405 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 406 union aegis_block tag = {}; 407 unsigned int authsize = crypto_aead_authsize(tfm); 408 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 409 unsigned int cryptlen = req->cryptlen; 410 struct skcipher_walk walk; 411 struct aegis_state state; 412 413 skcipher_walk_aead_encrypt(&walk, req, false); 414 crypto_aegis128_init(&state, &ctx->key, req->iv); 415 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 416 crypto_aegis128_process_crypt(&state, &walk, 417 crypto_aegis128_encrypt_chunk); 418 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 419 420 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 421 authsize, 1); 422 return 0; 423 } 424 425 static int crypto_aegis128_decrypt_generic(struct aead_request *req) 426 { 427 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {}; 428 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 429 union aegis_block tag; 430 unsigned int authsize = crypto_aead_authsize(tfm); 431 unsigned int cryptlen = req->cryptlen - authsize; 432 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 433 struct skcipher_walk walk; 434 struct aegis_state state; 435 436 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 437 authsize, 0); 438 439 skcipher_walk_aead_decrypt(&walk, req, false); 440 crypto_aegis128_init(&state, &ctx->key, req->iv); 441 crypto_aegis128_process_ad(&state, req->src, req->assoclen, false); 442 crypto_aegis128_process_crypt(&state, &walk, 443 crypto_aegis128_decrypt_chunk); 444 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 445 446 if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) { 447 /* 448 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0] 449 * 450 * "3. If verification fails, the decrypted plaintext and the 451 * wrong authentication tag should not be given as output." 452 * 453 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf 454 */ 455 skcipher_walk_aead_decrypt(&walk, req, false); 456 crypto_aegis128_process_crypt(NULL, &walk, 457 crypto_aegis128_wipe_chunk); 458 memzero_explicit(&tag, sizeof(tag)); 459 return -EBADMSG; 460 } 461 return 0; 462 } 463 464 static int crypto_aegis128_encrypt_simd(struct aead_request *req) 465 { 466 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 467 union aegis_block tag = {}; 468 unsigned int authsize = crypto_aead_authsize(tfm); 469 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 470 unsigned int cryptlen = req->cryptlen; 471 struct skcipher_walk walk; 472 struct aegis_state state; 473 474 if (!aegis128_do_simd()) 475 return crypto_aegis128_encrypt_generic(req); 476 477 skcipher_walk_aead_encrypt(&walk, req, false); 478 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 479 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 480 crypto_aegis128_process_crypt(&state, &walk, 481 crypto_aegis128_encrypt_chunk_simd); 482 crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0); 483 484 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 485 authsize, 1); 486 return 0; 487 } 488 489 static int crypto_aegis128_decrypt_simd(struct aead_request *req) 490 { 491 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 492 union aegis_block tag; 493 unsigned int authsize = crypto_aead_authsize(tfm); 494 unsigned int cryptlen = req->cryptlen - authsize; 495 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 496 struct skcipher_walk walk; 497 struct aegis_state state; 498 499 if (!aegis128_do_simd()) 500 return crypto_aegis128_decrypt_generic(req); 501 502 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 503 authsize, 0); 504 505 skcipher_walk_aead_decrypt(&walk, req, false); 506 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 507 crypto_aegis128_process_ad(&state, req->src, req->assoclen, true); 508 crypto_aegis128_process_crypt(&state, &walk, 509 crypto_aegis128_decrypt_chunk_simd); 510 511 if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen, 512 cryptlen, authsize))) { 513 skcipher_walk_aead_decrypt(&walk, req, false); 514 crypto_aegis128_process_crypt(NULL, &walk, 515 crypto_aegis128_wipe_chunk); 516 return -EBADMSG; 517 } 518 return 0; 519 } 520 521 static struct aead_alg crypto_aegis128_alg_generic = { 522 .setkey = crypto_aegis128_setkey, 523 .setauthsize = crypto_aegis128_setauthsize, 524 .encrypt = crypto_aegis128_encrypt_generic, 525 .decrypt = crypto_aegis128_decrypt_generic, 526 527 .ivsize = AEGIS128_NONCE_SIZE, 528 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 529 .chunksize = AEGIS_BLOCK_SIZE, 530 531 .base.cra_blocksize = 1, 532 .base.cra_ctxsize = sizeof(struct aegis_ctx), 533 .base.cra_alignmask = 0, 534 .base.cra_priority = 100, 535 .base.cra_name = "aegis128", 536 .base.cra_driver_name = "aegis128-generic", 537 .base.cra_module = THIS_MODULE, 538 }; 539 540 static struct aead_alg crypto_aegis128_alg_simd = { 541 .setkey = crypto_aegis128_setkey, 542 .setauthsize = crypto_aegis128_setauthsize, 543 .encrypt = crypto_aegis128_encrypt_simd, 544 .decrypt = crypto_aegis128_decrypt_simd, 545 546 .ivsize = AEGIS128_NONCE_SIZE, 547 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 548 .chunksize = AEGIS_BLOCK_SIZE, 549 550 .base.cra_blocksize = 1, 551 .base.cra_ctxsize = sizeof(struct aegis_ctx), 552 .base.cra_alignmask = 0, 553 .base.cra_priority = 200, 554 .base.cra_name = "aegis128", 555 .base.cra_driver_name = "aegis128-simd", 556 .base.cra_module = THIS_MODULE, 557 }; 558 559 static int __init crypto_aegis128_module_init(void) 560 { 561 int ret; 562 563 ret = crypto_register_aead(&crypto_aegis128_alg_generic); 564 if (ret) 565 return ret; 566 567 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 568 crypto_aegis128_have_simd()) { 569 ret = crypto_register_aead(&crypto_aegis128_alg_simd); 570 if (ret) { 571 crypto_unregister_aead(&crypto_aegis128_alg_generic); 572 return ret; 573 } 574 static_branch_enable(&have_simd); 575 } 576 return 0; 577 } 578 579 static void __exit crypto_aegis128_module_exit(void) 580 { 581 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 582 crypto_aegis128_have_simd()) 583 crypto_unregister_aead(&crypto_aegis128_alg_simd); 584 585 crypto_unregister_aead(&crypto_aegis128_alg_generic); 586 } 587 588 subsys_initcall(crypto_aegis128_module_init); 589 module_exit(crypto_aegis128_module_exit); 590 591 MODULE_LICENSE("GPL"); 592 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>"); 593 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm"); 594 MODULE_ALIAS_CRYPTO("aegis128"); 595 MODULE_ALIAS_CRYPTO("aegis128-generic"); 596 MODULE_ALIAS_CRYPTO("aegis128-simd"); 597