1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Cryptographic API. 4 * 5 * ARIA Cipher Algorithm. 6 * 7 * Documentation of ARIA can be found in RFC 5794. 8 * Copyright (c) 2022 Taehee Yoo <ap420073@gmail.com> 9 * 10 * Information for ARIA 11 * http://210.104.33.10/ARIA/index-e.html (English) 12 * http://seed.kisa.or.kr/ (Korean) 13 * 14 * Public domain version is distributed above. 15 */ 16 17 #include <crypto/aria.h> 18 19 static const u32 key_rc[20] = { 20 0x517cc1b7, 0x27220a94, 0xfe13abe8, 0xfa9a6ee0, 21 0x6db14acc, 0x9e21c820, 0xff28b1d5, 0xef5de2b0, 22 0xdb92371d, 0x2126e970, 0x03249775, 0x04e8c90e, 23 0x517cc1b7, 0x27220a94, 0xfe13abe8, 0xfa9a6ee0, 24 0x6db14acc, 0x9e21c820, 0xff28b1d5, 0xef5de2b0 25 }; 26 27 static void aria_set_encrypt_key(struct aria_ctx *ctx, const u8 *in_key, 28 unsigned int key_len) 29 { 30 const __be32 *key = (const __be32 *)in_key; 31 u32 w0[4], w1[4], w2[4], w3[4]; 32 u32 reg0, reg1, reg2, reg3; 33 const u32 *ck; 34 int rkidx = 0; 35 36 ck = &key_rc[(key_len - 16) / 2]; 37 38 w0[0] = be32_to_cpu(key[0]); 39 w0[1] = be32_to_cpu(key[1]); 40 w0[2] = be32_to_cpu(key[2]); 41 w0[3] = be32_to_cpu(key[3]); 42 43 reg0 = w0[0] ^ ck[0]; 44 reg1 = w0[1] ^ ck[1]; 45 reg2 = w0[2] ^ ck[2]; 46 reg3 = w0[3] ^ ck[3]; 47 48 aria_subst_diff_odd(®0, ®1, ®2, ®3); 49 50 if (key_len > 16) { 51 w1[0] = be32_to_cpu(key[4]); 52 w1[1] = be32_to_cpu(key[5]); 53 if (key_len > 24) { 54 w1[2] = be32_to_cpu(key[6]); 55 w1[3] = be32_to_cpu(key[7]); 56 } else { 57 w1[2] = 0; 58 w1[3] = 0; 59 } 60 } else { 61 w1[0] = 0; 62 w1[1] = 0; 63 w1[2] = 0; 64 w1[3] = 0; 65 } 66 67 w1[0] ^= reg0; 68 w1[1] ^= reg1; 69 w1[2] ^= reg2; 70 w1[3] ^= reg3; 71 72 reg0 = w1[0]; 73 reg1 = w1[1]; 74 reg2 = w1[2]; 75 reg3 = w1[3]; 76 77 reg0 ^= ck[4]; 78 reg1 ^= ck[5]; 79 reg2 ^= ck[6]; 80 reg3 ^= ck[7]; 81 82 aria_subst_diff_even(®0, ®1, ®2, ®3); 83 84 reg0 ^= w0[0]; 85 reg1 ^= w0[1]; 86 reg2 ^= w0[2]; 87 reg3 ^= w0[3]; 88 89 w2[0] = reg0; 90 w2[1] = reg1; 91 w2[2] = reg2; 92 w2[3] = reg3; 93 94 reg0 ^= ck[8]; 95 reg1 ^= ck[9]; 96 reg2 ^= ck[10]; 97 reg3 ^= ck[11]; 98 99 aria_subst_diff_odd(®0, ®1, ®2, ®3); 100 101 w3[0] = reg0 ^ w1[0]; 102 w3[1] = reg1 ^ w1[1]; 103 w3[2] = reg2 ^ w1[2]; 104 w3[3] = reg3 ^ w1[3]; 105 106 aria_gsrk(ctx->enc_key[rkidx], w0, w1, 19); 107 rkidx++; 108 aria_gsrk(ctx->enc_key[rkidx], w1, w2, 19); 109 rkidx++; 110 aria_gsrk(ctx->enc_key[rkidx], w2, w3, 19); 111 rkidx++; 112 aria_gsrk(ctx->enc_key[rkidx], w3, w0, 19); 113 114 rkidx++; 115 aria_gsrk(ctx->enc_key[rkidx], w0, w1, 31); 116 rkidx++; 117 aria_gsrk(ctx->enc_key[rkidx], w1, w2, 31); 118 rkidx++; 119 aria_gsrk(ctx->enc_key[rkidx], w2, w3, 31); 120 rkidx++; 121 aria_gsrk(ctx->enc_key[rkidx], w3, w0, 31); 122 123 rkidx++; 124 aria_gsrk(ctx->enc_key[rkidx], w0, w1, 67); 125 rkidx++; 126 aria_gsrk(ctx->enc_key[rkidx], w1, w2, 67); 127 rkidx++; 128 aria_gsrk(ctx->enc_key[rkidx], w2, w3, 67); 129 rkidx++; 130 aria_gsrk(ctx->enc_key[rkidx], w3, w0, 67); 131 132 rkidx++; 133 aria_gsrk(ctx->enc_key[rkidx], w0, w1, 97); 134 if (key_len > 16) { 135 rkidx++; 136 aria_gsrk(ctx->enc_key[rkidx], w1, w2, 97); 137 rkidx++; 138 aria_gsrk(ctx->enc_key[rkidx], w2, w3, 97); 139 140 if (key_len > 24) { 141 rkidx++; 142 aria_gsrk(ctx->enc_key[rkidx], w3, w0, 97); 143 144 rkidx++; 145 aria_gsrk(ctx->enc_key[rkidx], w0, w1, 109); 146 } 147 } 148 } 149 150 static void aria_set_decrypt_key(struct aria_ctx *ctx) 151 { 152 int i; 153 154 for (i = 0; i < 4; i++) { 155 ctx->dec_key[0][i] = ctx->enc_key[ctx->rounds][i]; 156 ctx->dec_key[ctx->rounds][i] = ctx->enc_key[0][i]; 157 } 158 159 for (i = 1; i < ctx->rounds; i++) { 160 ctx->dec_key[i][0] = aria_m(ctx->enc_key[ctx->rounds - i][0]); 161 ctx->dec_key[i][1] = aria_m(ctx->enc_key[ctx->rounds - i][1]); 162 ctx->dec_key[i][2] = aria_m(ctx->enc_key[ctx->rounds - i][2]); 163 ctx->dec_key[i][3] = aria_m(ctx->enc_key[ctx->rounds - i][3]); 164 165 aria_diff_word(&ctx->dec_key[i][0], &ctx->dec_key[i][1], 166 &ctx->dec_key[i][2], &ctx->dec_key[i][3]); 167 aria_diff_byte(&ctx->dec_key[i][1], 168 &ctx->dec_key[i][2], &ctx->dec_key[i][3]); 169 aria_diff_word(&ctx->dec_key[i][0], &ctx->dec_key[i][1], 170 &ctx->dec_key[i][2], &ctx->dec_key[i][3]); 171 } 172 } 173 174 int aria_set_key(struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len) 175 { 176 struct aria_ctx *ctx = crypto_tfm_ctx(tfm); 177 178 if (key_len != 16 && key_len != 24 && key_len != 32) 179 return -EINVAL; 180 181 ctx->key_length = key_len; 182 ctx->rounds = (key_len + 32) / 4; 183 184 aria_set_encrypt_key(ctx, in_key, key_len); 185 aria_set_decrypt_key(ctx); 186 187 return 0; 188 } 189 EXPORT_SYMBOL_GPL(aria_set_key); 190 191 static void __aria_crypt(struct aria_ctx *ctx, u8 *out, const u8 *in, 192 u32 key[][ARIA_RD_KEY_WORDS]) 193 { 194 const __be32 *src = (const __be32 *)in; 195 __be32 *dst = (__be32 *)out; 196 u32 reg0, reg1, reg2, reg3; 197 int rounds, rkidx = 0; 198 199 rounds = ctx->rounds; 200 201 reg0 = be32_to_cpu(src[0]); 202 reg1 = be32_to_cpu(src[1]); 203 reg2 = be32_to_cpu(src[2]); 204 reg3 = be32_to_cpu(src[3]); 205 206 aria_add_round_key(key[rkidx], ®0, ®1, ®2, ®3); 207 rkidx++; 208 209 aria_subst_diff_odd(®0, ®1, ®2, ®3); 210 aria_add_round_key(key[rkidx], ®0, ®1, ®2, ®3); 211 rkidx++; 212 213 while ((rounds -= 2) > 0) { 214 aria_subst_diff_even(®0, ®1, ®2, ®3); 215 aria_add_round_key(key[rkidx], ®0, ®1, ®2, ®3); 216 rkidx++; 217 218 aria_subst_diff_odd(®0, ®1, ®2, ®3); 219 aria_add_round_key(key[rkidx], ®0, ®1, ®2, ®3); 220 rkidx++; 221 } 222 223 reg0 = key[rkidx][0] ^ make_u32((u8)(x1[get_u8(reg0, 0)]), 224 (u8)(x2[get_u8(reg0, 1)] >> 8), 225 (u8)(s1[get_u8(reg0, 2)]), 226 (u8)(s2[get_u8(reg0, 3)])); 227 reg1 = key[rkidx][1] ^ make_u32((u8)(x1[get_u8(reg1, 0)]), 228 (u8)(x2[get_u8(reg1, 1)] >> 8), 229 (u8)(s1[get_u8(reg1, 2)]), 230 (u8)(s2[get_u8(reg1, 3)])); 231 reg2 = key[rkidx][2] ^ make_u32((u8)(x1[get_u8(reg2, 0)]), 232 (u8)(x2[get_u8(reg2, 1)] >> 8), 233 (u8)(s1[get_u8(reg2, 2)]), 234 (u8)(s2[get_u8(reg2, 3)])); 235 reg3 = key[rkidx][3] ^ make_u32((u8)(x1[get_u8(reg3, 0)]), 236 (u8)(x2[get_u8(reg3, 1)] >> 8), 237 (u8)(s1[get_u8(reg3, 2)]), 238 (u8)(s2[get_u8(reg3, 3)])); 239 240 dst[0] = cpu_to_be32(reg0); 241 dst[1] = cpu_to_be32(reg1); 242 dst[2] = cpu_to_be32(reg2); 243 dst[3] = cpu_to_be32(reg3); 244 } 245 246 void aria_encrypt(void *_ctx, u8 *out, const u8 *in) 247 { 248 struct aria_ctx *ctx = (struct aria_ctx *)_ctx; 249 250 __aria_crypt(ctx, out, in, ctx->enc_key); 251 } 252 EXPORT_SYMBOL_GPL(aria_encrypt); 253 254 void aria_decrypt(void *_ctx, u8 *out, const u8 *in) 255 { 256 struct aria_ctx *ctx = (struct aria_ctx *)_ctx; 257 258 __aria_crypt(ctx, out, in, ctx->dec_key); 259 } 260 EXPORT_SYMBOL_GPL(aria_decrypt); 261 262 static void __aria_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 263 { 264 struct aria_ctx *ctx = crypto_tfm_ctx(tfm); 265 266 __aria_crypt(ctx, out, in, ctx->enc_key); 267 } 268 269 static void __aria_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in) 270 { 271 struct aria_ctx *ctx = crypto_tfm_ctx(tfm); 272 273 __aria_crypt(ctx, out, in, ctx->dec_key); 274 } 275 276 static struct crypto_alg aria_alg = { 277 .cra_name = "aria", 278 .cra_driver_name = "aria-generic", 279 .cra_priority = 100, 280 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 281 .cra_blocksize = ARIA_BLOCK_SIZE, 282 .cra_ctxsize = sizeof(struct aria_ctx), 283 .cra_alignmask = 3, 284 .cra_module = THIS_MODULE, 285 .cra_u = { 286 .cipher = { 287 .cia_min_keysize = ARIA_MIN_KEY_SIZE, 288 .cia_max_keysize = ARIA_MAX_KEY_SIZE, 289 .cia_setkey = aria_set_key, 290 .cia_encrypt = __aria_encrypt, 291 .cia_decrypt = __aria_decrypt 292 } 293 } 294 }; 295 296 static int __init aria_init(void) 297 { 298 return crypto_register_alg(&aria_alg); 299 } 300 301 static void __exit aria_fini(void) 302 { 303 crypto_unregister_alg(&aria_alg); 304 } 305 306 subsys_initcall(aria_init); 307 module_exit(aria_fini); 308 309 MODULE_DESCRIPTION("ARIA Cipher Algorithm"); 310 MODULE_LICENSE("GPL"); 311 MODULE_AUTHOR("Taehee Yoo <ap420073@gmail.com>"); 312 MODULE_ALIAS_CRYPTO("aria"); 313 MODULE_ALIAS_CRYPTO("aria-generic"); 314