1 // SPDX-License-Identifier: GPL-2.0-or-later 2 /* 3 * Cryptographic API. 4 * 5 * TEA, XTEA, and XETA crypto alogrithms 6 * 7 * The TEA and Xtended TEA algorithms were developed by David Wheeler 8 * and Roger Needham at the Computer Laboratory of Cambridge University. 9 * 10 * Due to the order of evaluation in XTEA many people have incorrectly 11 * implemented it. XETA (XTEA in the wrong order), exists for 12 * compatibility with these implementations. 13 * 14 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com 15 */ 16 17 #include <crypto/algapi.h> 18 #include <linux/init.h> 19 #include <linux/module.h> 20 #include <linux/mm.h> 21 #include <asm/byteorder.h> 22 #include <linux/types.h> 23 24 #define TEA_KEY_SIZE 16 25 #define TEA_BLOCK_SIZE 8 26 #define TEA_ROUNDS 32 27 #define TEA_DELTA 0x9e3779b9 28 29 #define XTEA_KEY_SIZE 16 30 #define XTEA_BLOCK_SIZE 8 31 #define XTEA_ROUNDS 32 32 #define XTEA_DELTA 0x9e3779b9 33 34 struct tea_ctx { 35 u32 KEY[4]; 36 }; 37 38 struct xtea_ctx { 39 u32 KEY[4]; 40 }; 41 42 static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 43 unsigned int key_len) 44 { 45 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 46 const __le32 *key = (const __le32 *)in_key; 47 48 ctx->KEY[0] = le32_to_cpu(key[0]); 49 ctx->KEY[1] = le32_to_cpu(key[1]); 50 ctx->KEY[2] = le32_to_cpu(key[2]); 51 ctx->KEY[3] = le32_to_cpu(key[3]); 52 53 return 0; 54 55 } 56 57 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 58 { 59 u32 y, z, n, sum = 0; 60 u32 k0, k1, k2, k3; 61 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 62 const __le32 *in = (const __le32 *)src; 63 __le32 *out = (__le32 *)dst; 64 65 y = le32_to_cpu(in[0]); 66 z = le32_to_cpu(in[1]); 67 68 k0 = ctx->KEY[0]; 69 k1 = ctx->KEY[1]; 70 k2 = ctx->KEY[2]; 71 k3 = ctx->KEY[3]; 72 73 n = TEA_ROUNDS; 74 75 while (n-- > 0) { 76 sum += TEA_DELTA; 77 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); 78 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); 79 } 80 81 out[0] = cpu_to_le32(y); 82 out[1] = cpu_to_le32(z); 83 } 84 85 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 86 { 87 u32 y, z, n, sum; 88 u32 k0, k1, k2, k3; 89 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 90 const __le32 *in = (const __le32 *)src; 91 __le32 *out = (__le32 *)dst; 92 93 y = le32_to_cpu(in[0]); 94 z = le32_to_cpu(in[1]); 95 96 k0 = ctx->KEY[0]; 97 k1 = ctx->KEY[1]; 98 k2 = ctx->KEY[2]; 99 k3 = ctx->KEY[3]; 100 101 sum = TEA_DELTA << 5; 102 103 n = TEA_ROUNDS; 104 105 while (n-- > 0) { 106 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); 107 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); 108 sum -= TEA_DELTA; 109 } 110 111 out[0] = cpu_to_le32(y); 112 out[1] = cpu_to_le32(z); 113 } 114 115 static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 116 unsigned int key_len) 117 { 118 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 119 const __le32 *key = (const __le32 *)in_key; 120 121 ctx->KEY[0] = le32_to_cpu(key[0]); 122 ctx->KEY[1] = le32_to_cpu(key[1]); 123 ctx->KEY[2] = le32_to_cpu(key[2]); 124 ctx->KEY[3] = le32_to_cpu(key[3]); 125 126 return 0; 127 128 } 129 130 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 131 { 132 u32 y, z, sum = 0; 133 u32 limit = XTEA_DELTA * XTEA_ROUNDS; 134 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 135 const __le32 *in = (const __le32 *)src; 136 __le32 *out = (__le32 *)dst; 137 138 y = le32_to_cpu(in[0]); 139 z = le32_to_cpu(in[1]); 140 141 while (sum != limit) { 142 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 143 sum += XTEA_DELTA; 144 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 145 } 146 147 out[0] = cpu_to_le32(y); 148 out[1] = cpu_to_le32(z); 149 } 150 151 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 152 { 153 u32 y, z, sum; 154 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 155 const __le32 *in = (const __le32 *)src; 156 __le32 *out = (__le32 *)dst; 157 158 y = le32_to_cpu(in[0]); 159 z = le32_to_cpu(in[1]); 160 161 sum = XTEA_DELTA * XTEA_ROUNDS; 162 163 while (sum) { 164 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]); 165 sum -= XTEA_DELTA; 166 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]); 167 } 168 169 out[0] = cpu_to_le32(y); 170 out[1] = cpu_to_le32(z); 171 } 172 173 174 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 175 { 176 u32 y, z, sum = 0; 177 u32 limit = XTEA_DELTA * XTEA_ROUNDS; 178 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 179 const __le32 *in = (const __le32 *)src; 180 __le32 *out = (__le32 *)dst; 181 182 y = le32_to_cpu(in[0]); 183 z = le32_to_cpu(in[1]); 184 185 while (sum != limit) { 186 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3]; 187 sum += XTEA_DELTA; 188 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3]; 189 } 190 191 out[0] = cpu_to_le32(y); 192 out[1] = cpu_to_le32(z); 193 } 194 195 static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 196 { 197 u32 y, z, sum; 198 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 199 const __le32 *in = (const __le32 *)src; 200 __le32 *out = (__le32 *)dst; 201 202 y = le32_to_cpu(in[0]); 203 z = le32_to_cpu(in[1]); 204 205 sum = XTEA_DELTA * XTEA_ROUNDS; 206 207 while (sum) { 208 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3]; 209 sum -= XTEA_DELTA; 210 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3]; 211 } 212 213 out[0] = cpu_to_le32(y); 214 out[1] = cpu_to_le32(z); 215 } 216 217 static struct crypto_alg tea_algs[3] = { { 218 .cra_name = "tea", 219 .cra_driver_name = "tea-generic", 220 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 221 .cra_blocksize = TEA_BLOCK_SIZE, 222 .cra_ctxsize = sizeof (struct tea_ctx), 223 .cra_alignmask = 3, 224 .cra_module = THIS_MODULE, 225 .cra_u = { .cipher = { 226 .cia_min_keysize = TEA_KEY_SIZE, 227 .cia_max_keysize = TEA_KEY_SIZE, 228 .cia_setkey = tea_setkey, 229 .cia_encrypt = tea_encrypt, 230 .cia_decrypt = tea_decrypt } } 231 }, { 232 .cra_name = "xtea", 233 .cra_driver_name = "xtea-generic", 234 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 235 .cra_blocksize = XTEA_BLOCK_SIZE, 236 .cra_ctxsize = sizeof (struct xtea_ctx), 237 .cra_alignmask = 3, 238 .cra_module = THIS_MODULE, 239 .cra_u = { .cipher = { 240 .cia_min_keysize = XTEA_KEY_SIZE, 241 .cia_max_keysize = XTEA_KEY_SIZE, 242 .cia_setkey = xtea_setkey, 243 .cia_encrypt = xtea_encrypt, 244 .cia_decrypt = xtea_decrypt } } 245 }, { 246 .cra_name = "xeta", 247 .cra_driver_name = "xeta-generic", 248 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 249 .cra_blocksize = XTEA_BLOCK_SIZE, 250 .cra_ctxsize = sizeof (struct xtea_ctx), 251 .cra_alignmask = 3, 252 .cra_module = THIS_MODULE, 253 .cra_u = { .cipher = { 254 .cia_min_keysize = XTEA_KEY_SIZE, 255 .cia_max_keysize = XTEA_KEY_SIZE, 256 .cia_setkey = xtea_setkey, 257 .cia_encrypt = xeta_encrypt, 258 .cia_decrypt = xeta_decrypt } } 259 } }; 260 261 static int __init tea_mod_init(void) 262 { 263 return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs)); 264 } 265 266 static void __exit tea_mod_fini(void) 267 { 268 crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs)); 269 } 270 271 MODULE_ALIAS_CRYPTO("tea"); 272 MODULE_ALIAS_CRYPTO("xtea"); 273 MODULE_ALIAS_CRYPTO("xeta"); 274 275 subsys_initcall(tea_mod_init); 276 module_exit(tea_mod_fini); 277 278 MODULE_LICENSE("GPL"); 279 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms"); 280