1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * The AEGIS-128 Authenticated-Encryption Algorithm 4 * 5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com> 6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved. 7 */ 8 9 #include <crypto/algapi.h> 10 #include <crypto/internal/aead.h> 11 #include <crypto/internal/simd.h> 12 #include <crypto/internal/skcipher.h> 13 #include <crypto/scatterwalk.h> 14 #include <linux/err.h> 15 #include <linux/init.h> 16 #include <linux/jump_label.h> 17 #include <linux/kernel.h> 18 #include <linux/module.h> 19 #include <linux/scatterlist.h> 20 21 #include <asm/simd.h> 22 23 #include "aegis.h" 24 25 #define AEGIS128_NONCE_SIZE 16 26 #define AEGIS128_STATE_BLOCKS 5 27 #define AEGIS128_KEY_SIZE 16 28 #define AEGIS128_MIN_AUTH_SIZE 8 29 #define AEGIS128_MAX_AUTH_SIZE 16 30 31 struct aegis_state { 32 union aegis_block blocks[AEGIS128_STATE_BLOCKS]; 33 }; 34 35 struct aegis_ctx { 36 union aegis_block key; 37 }; 38 39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd); 40 41 static const union aegis_block crypto_aegis_const[2] = { 42 { .words64 = { 43 cpu_to_le64(U64_C(0x0d08050302010100)), 44 cpu_to_le64(U64_C(0x6279e99059372215)), 45 } }, 46 { .words64 = { 47 cpu_to_le64(U64_C(0xf12fc26d55183ddb)), 48 cpu_to_le64(U64_C(0xdd28b57342311120)), 49 } }, 50 }; 51 52 static bool aegis128_do_simd(void) 53 { 54 #ifdef CONFIG_CRYPTO_AEGIS128_SIMD 55 if (static_branch_likely(&have_simd)) 56 return crypto_simd_usable(); 57 #endif 58 return false; 59 } 60 61 bool crypto_aegis128_have_simd(void); 62 void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg); 63 void crypto_aegis128_init_simd(struct aegis_state *state, 64 const union aegis_block *key, 65 const u8 *iv); 66 void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst, 67 const u8 *src, unsigned int size); 68 void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst, 69 const u8 *src, unsigned int size); 70 void crypto_aegis128_final_simd(struct aegis_state *state, 71 union aegis_block *tag_xor, 72 u64 assoclen, u64 cryptlen); 73 74 static void crypto_aegis128_update(struct aegis_state *state) 75 { 76 union aegis_block tmp; 77 unsigned int i; 78 79 tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1]; 80 for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--) 81 crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1], 82 &state->blocks[i]); 83 crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]); 84 } 85 86 static void crypto_aegis128_update_a(struct aegis_state *state, 87 const union aegis_block *msg) 88 { 89 if (aegis128_do_simd()) { 90 crypto_aegis128_update_simd(state, msg); 91 return; 92 } 93 94 crypto_aegis128_update(state); 95 crypto_aegis_block_xor(&state->blocks[0], msg); 96 } 97 98 static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg) 99 { 100 if (aegis128_do_simd()) { 101 crypto_aegis128_update_simd(state, msg); 102 return; 103 } 104 105 crypto_aegis128_update(state); 106 crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE); 107 } 108 109 static void crypto_aegis128_init(struct aegis_state *state, 110 const union aegis_block *key, 111 const u8 *iv) 112 { 113 union aegis_block key_iv; 114 unsigned int i; 115 116 key_iv = *key; 117 crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE); 118 119 state->blocks[0] = key_iv; 120 state->blocks[1] = crypto_aegis_const[1]; 121 state->blocks[2] = crypto_aegis_const[0]; 122 state->blocks[3] = *key; 123 state->blocks[4] = *key; 124 125 crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]); 126 crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]); 127 128 for (i = 0; i < 5; i++) { 129 crypto_aegis128_update_a(state, key); 130 crypto_aegis128_update_a(state, &key_iv); 131 } 132 } 133 134 static void crypto_aegis128_ad(struct aegis_state *state, 135 const u8 *src, unsigned int size) 136 { 137 if (AEGIS_ALIGNED(src)) { 138 const union aegis_block *src_blk = 139 (const union aegis_block *)src; 140 141 while (size >= AEGIS_BLOCK_SIZE) { 142 crypto_aegis128_update_a(state, src_blk); 143 144 size -= AEGIS_BLOCK_SIZE; 145 src_blk++; 146 } 147 } else { 148 while (size >= AEGIS_BLOCK_SIZE) { 149 crypto_aegis128_update_u(state, src); 150 151 size -= AEGIS_BLOCK_SIZE; 152 src += AEGIS_BLOCK_SIZE; 153 } 154 } 155 } 156 157 static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst, 158 const u8 *src, unsigned int size) 159 { 160 union aegis_block tmp; 161 162 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 163 while (size >= AEGIS_BLOCK_SIZE) { 164 union aegis_block *dst_blk = 165 (union aegis_block *)dst; 166 const union aegis_block *src_blk = 167 (const union aegis_block *)src; 168 169 tmp = state->blocks[2]; 170 crypto_aegis_block_and(&tmp, &state->blocks[3]); 171 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 172 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 173 crypto_aegis_block_xor(&tmp, src_blk); 174 175 crypto_aegis128_update_a(state, src_blk); 176 177 *dst_blk = tmp; 178 179 size -= AEGIS_BLOCK_SIZE; 180 src += AEGIS_BLOCK_SIZE; 181 dst += AEGIS_BLOCK_SIZE; 182 } 183 } else { 184 while (size >= AEGIS_BLOCK_SIZE) { 185 tmp = state->blocks[2]; 186 crypto_aegis_block_and(&tmp, &state->blocks[3]); 187 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 188 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 189 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 190 191 crypto_aegis128_update_u(state, src); 192 193 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 194 195 size -= AEGIS_BLOCK_SIZE; 196 src += AEGIS_BLOCK_SIZE; 197 dst += AEGIS_BLOCK_SIZE; 198 } 199 } 200 201 if (size > 0) { 202 union aegis_block msg = {}; 203 memcpy(msg.bytes, src, size); 204 205 tmp = state->blocks[2]; 206 crypto_aegis_block_and(&tmp, &state->blocks[3]); 207 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 208 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 209 210 crypto_aegis128_update_a(state, &msg); 211 212 crypto_aegis_block_xor(&msg, &tmp); 213 214 memcpy(dst, msg.bytes, size); 215 } 216 } 217 218 static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst, 219 const u8 *src, unsigned int size) 220 { 221 union aegis_block tmp; 222 223 if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) { 224 while (size >= AEGIS_BLOCK_SIZE) { 225 union aegis_block *dst_blk = 226 (union aegis_block *)dst; 227 const union aegis_block *src_blk = 228 (const union aegis_block *)src; 229 230 tmp = state->blocks[2]; 231 crypto_aegis_block_and(&tmp, &state->blocks[3]); 232 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 233 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 234 crypto_aegis_block_xor(&tmp, src_blk); 235 236 crypto_aegis128_update_a(state, &tmp); 237 238 *dst_blk = tmp; 239 240 size -= AEGIS_BLOCK_SIZE; 241 src += AEGIS_BLOCK_SIZE; 242 dst += AEGIS_BLOCK_SIZE; 243 } 244 } else { 245 while (size >= AEGIS_BLOCK_SIZE) { 246 tmp = state->blocks[2]; 247 crypto_aegis_block_and(&tmp, &state->blocks[3]); 248 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 249 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 250 crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE); 251 252 crypto_aegis128_update_a(state, &tmp); 253 254 memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE); 255 256 size -= AEGIS_BLOCK_SIZE; 257 src += AEGIS_BLOCK_SIZE; 258 dst += AEGIS_BLOCK_SIZE; 259 } 260 } 261 262 if (size > 0) { 263 union aegis_block msg = {}; 264 memcpy(msg.bytes, src, size); 265 266 tmp = state->blocks[2]; 267 crypto_aegis_block_and(&tmp, &state->blocks[3]); 268 crypto_aegis_block_xor(&tmp, &state->blocks[4]); 269 crypto_aegis_block_xor(&tmp, &state->blocks[1]); 270 crypto_aegis_block_xor(&msg, &tmp); 271 272 memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size); 273 274 crypto_aegis128_update_a(state, &msg); 275 276 memcpy(dst, msg.bytes, size); 277 } 278 } 279 280 static void crypto_aegis128_process_ad(struct aegis_state *state, 281 struct scatterlist *sg_src, 282 unsigned int assoclen) 283 { 284 struct scatter_walk walk; 285 union aegis_block buf; 286 unsigned int pos = 0; 287 288 scatterwalk_start(&walk, sg_src); 289 while (assoclen != 0) { 290 unsigned int size = scatterwalk_clamp(&walk, assoclen); 291 unsigned int left = size; 292 void *mapped = scatterwalk_map(&walk); 293 const u8 *src = (const u8 *)mapped; 294 295 if (pos + size >= AEGIS_BLOCK_SIZE) { 296 if (pos > 0) { 297 unsigned int fill = AEGIS_BLOCK_SIZE - pos; 298 memcpy(buf.bytes + pos, src, fill); 299 crypto_aegis128_update_a(state, &buf); 300 pos = 0; 301 left -= fill; 302 src += fill; 303 } 304 305 crypto_aegis128_ad(state, src, left); 306 src += left & ~(AEGIS_BLOCK_SIZE - 1); 307 left &= AEGIS_BLOCK_SIZE - 1; 308 } 309 310 memcpy(buf.bytes + pos, src, left); 311 312 pos += left; 313 assoclen -= size; 314 scatterwalk_unmap(mapped); 315 scatterwalk_advance(&walk, size); 316 scatterwalk_done(&walk, 0, assoclen); 317 } 318 319 if (pos > 0) { 320 memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos); 321 crypto_aegis128_update_a(state, &buf); 322 } 323 } 324 325 static __always_inline 326 int crypto_aegis128_process_crypt(struct aegis_state *state, 327 struct aead_request *req, 328 struct skcipher_walk *walk, 329 void (*crypt)(struct aegis_state *state, 330 u8 *dst, const u8 *src, 331 unsigned int size)) 332 { 333 int err = 0; 334 335 while (walk->nbytes) { 336 unsigned int nbytes = walk->nbytes; 337 338 if (nbytes < walk->total) 339 nbytes = round_down(nbytes, walk->stride); 340 341 crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes); 342 343 err = skcipher_walk_done(walk, walk->nbytes - nbytes); 344 } 345 return err; 346 } 347 348 static void crypto_aegis128_final(struct aegis_state *state, 349 union aegis_block *tag_xor, 350 u64 assoclen, u64 cryptlen) 351 { 352 u64 assocbits = assoclen * 8; 353 u64 cryptbits = cryptlen * 8; 354 355 union aegis_block tmp; 356 unsigned int i; 357 358 tmp.words64[0] = cpu_to_le64(assocbits); 359 tmp.words64[1] = cpu_to_le64(cryptbits); 360 361 crypto_aegis_block_xor(&tmp, &state->blocks[3]); 362 363 for (i = 0; i < 7; i++) 364 crypto_aegis128_update_a(state, &tmp); 365 366 for (i = 0; i < AEGIS128_STATE_BLOCKS; i++) 367 crypto_aegis_block_xor(tag_xor, &state->blocks[i]); 368 } 369 370 static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key, 371 unsigned int keylen) 372 { 373 struct aegis_ctx *ctx = crypto_aead_ctx(aead); 374 375 if (keylen != AEGIS128_KEY_SIZE) 376 return -EINVAL; 377 378 memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE); 379 return 0; 380 } 381 382 static int crypto_aegis128_setauthsize(struct crypto_aead *tfm, 383 unsigned int authsize) 384 { 385 if (authsize > AEGIS128_MAX_AUTH_SIZE) 386 return -EINVAL; 387 if (authsize < AEGIS128_MIN_AUTH_SIZE) 388 return -EINVAL; 389 return 0; 390 } 391 392 static int crypto_aegis128_encrypt(struct aead_request *req) 393 { 394 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 395 union aegis_block tag = {}; 396 unsigned int authsize = crypto_aead_authsize(tfm); 397 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 398 unsigned int cryptlen = req->cryptlen; 399 struct skcipher_walk walk; 400 struct aegis_state state; 401 402 skcipher_walk_aead_encrypt(&walk, req, false); 403 if (aegis128_do_simd()) { 404 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 405 crypto_aegis128_process_ad(&state, req->src, req->assoclen); 406 crypto_aegis128_process_crypt(&state, req, &walk, 407 crypto_aegis128_encrypt_chunk_simd); 408 crypto_aegis128_final_simd(&state, &tag, req->assoclen, 409 cryptlen); 410 } else { 411 crypto_aegis128_init(&state, &ctx->key, req->iv); 412 crypto_aegis128_process_ad(&state, req->src, req->assoclen); 413 crypto_aegis128_process_crypt(&state, req, &walk, 414 crypto_aegis128_encrypt_chunk); 415 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 416 } 417 418 scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, 419 authsize, 1); 420 return 0; 421 } 422 423 static int crypto_aegis128_decrypt(struct aead_request *req) 424 { 425 static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {}; 426 struct crypto_aead *tfm = crypto_aead_reqtfm(req); 427 union aegis_block tag; 428 unsigned int authsize = crypto_aead_authsize(tfm); 429 unsigned int cryptlen = req->cryptlen - authsize; 430 struct aegis_ctx *ctx = crypto_aead_ctx(tfm); 431 struct skcipher_walk walk; 432 struct aegis_state state; 433 434 scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, 435 authsize, 0); 436 437 skcipher_walk_aead_decrypt(&walk, req, false); 438 if (aegis128_do_simd()) { 439 crypto_aegis128_init_simd(&state, &ctx->key, req->iv); 440 crypto_aegis128_process_ad(&state, req->src, req->assoclen); 441 crypto_aegis128_process_crypt(&state, req, &walk, 442 crypto_aegis128_decrypt_chunk_simd); 443 crypto_aegis128_final_simd(&state, &tag, req->assoclen, 444 cryptlen); 445 } else { 446 crypto_aegis128_init(&state, &ctx->key, req->iv); 447 crypto_aegis128_process_ad(&state, req->src, req->assoclen); 448 crypto_aegis128_process_crypt(&state, req, &walk, 449 crypto_aegis128_decrypt_chunk); 450 crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen); 451 } 452 453 return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0; 454 } 455 456 static struct aead_alg crypto_aegis128_alg = { 457 .setkey = crypto_aegis128_setkey, 458 .setauthsize = crypto_aegis128_setauthsize, 459 .encrypt = crypto_aegis128_encrypt, 460 .decrypt = crypto_aegis128_decrypt, 461 462 .ivsize = AEGIS128_NONCE_SIZE, 463 .maxauthsize = AEGIS128_MAX_AUTH_SIZE, 464 .chunksize = AEGIS_BLOCK_SIZE, 465 466 .base = { 467 .cra_blocksize = 1, 468 .cra_ctxsize = sizeof(struct aegis_ctx), 469 .cra_alignmask = 0, 470 471 .cra_priority = 100, 472 473 .cra_name = "aegis128", 474 .cra_driver_name = "aegis128-generic", 475 476 .cra_module = THIS_MODULE, 477 } 478 }; 479 480 static int __init crypto_aegis128_module_init(void) 481 { 482 if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && 483 crypto_aegis128_have_simd()) 484 static_branch_enable(&have_simd); 485 486 return crypto_register_aead(&crypto_aegis128_alg); 487 } 488 489 static void __exit crypto_aegis128_module_exit(void) 490 { 491 crypto_unregister_aead(&crypto_aegis128_alg); 492 } 493 494 subsys_initcall(crypto_aegis128_module_init); 495 module_exit(crypto_aegis128_module_exit); 496 497 MODULE_LICENSE("GPL"); 498 MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>"); 499 MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm"); 500 MODULE_ALIAS_CRYPTO("aegis128"); 501 MODULE_ALIAS_CRYPTO("aegis128-generic"); 502