1 /* 2 * Cryptographic API. 3 * 4 * TEA, XTEA, and XETA crypto alogrithms 5 * 6 * The TEA and Xtended TEA algorithms were developed by David Wheeler 7 * and Roger Needham at the Computer Laboratory of Cambridge University. 8 * 9 * Due to the order of evaluation in XTEA many people have incorrectly 10 * implemented it. XETA (XTEA in the wrong order), exists for 11 * compatibility with these implementations. 12 * 13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com 14 * 15 * This program is free software; you can redistribute it and/or modify 16 * it under the terms of the GNU General Public License as published by 17 * the Free Software Foundation; either version 2 of the License, or 18 * (at your option) any later version. 19 * 20 */ 21 22 #include <linux/init.h> 23 #include <linux/module.h> 24 #include <linux/mm.h> 25 #include <asm/byteorder.h> 26 #include <linux/crypto.h> 27 #include <linux/types.h> 28 29 #define TEA_KEY_SIZE 16 30 #define TEA_BLOCK_SIZE 8 31 #define TEA_ROUNDS 32 32 #define TEA_DELTA 0x9e3779b9 33 34 #define XTEA_KEY_SIZE 16 35 #define XTEA_BLOCK_SIZE 8 36 #define XTEA_ROUNDS 32 37 #define XTEA_DELTA 0x9e3779b9 38 39 struct tea_ctx { 40 u32 KEY[4]; 41 }; 42 43 struct xtea_ctx { 44 u32 KEY[4]; 45 }; 46 47 static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 48 unsigned int key_len) 49 { 50 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 51 const __le32 *key = (const __le32 *)in_key; 52 53 ctx->KEY[0] = le32_to_cpu(key[0]); 54 ctx->KEY[1] = le32_to_cpu(key[1]); 55 ctx->KEY[2] = le32_to_cpu(key[2]); 56 ctx->KEY[3] = le32_to_cpu(key[3]); 57 58 return 0; 59 60 } 61 62 static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 63 { 64 u32 y, z, n, sum = 0; 65 u32 k0, k1, k2, k3; 66 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 67 const __le32 *in = (const __le32 *)src; 68 __le32 *out = (__le32 *)dst; 69 70 y = le32_to_cpu(in[0]); 71 z = le32_to_cpu(in[1]); 72 73 k0 = ctx->KEY[0]; 74 k1 = ctx->KEY[1]; 75 k2 = ctx->KEY[2]; 76 k3 = ctx->KEY[3]; 77 78 n = TEA_ROUNDS; 79 80 while (n-- > 0) { 81 sum += TEA_DELTA; 82 y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); 83 z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); 84 } 85 86 out[0] = cpu_to_le32(y); 87 out[1] = cpu_to_le32(z); 88 } 89 90 static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 91 { 92 u32 y, z, n, sum; 93 u32 k0, k1, k2, k3; 94 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 95 const __le32 *in = (const __le32 *)src; 96 __le32 *out = (__le32 *)dst; 97 98 y = le32_to_cpu(in[0]); 99 z = le32_to_cpu(in[1]); 100 101 k0 = ctx->KEY[0]; 102 k1 = ctx->KEY[1]; 103 k2 = ctx->KEY[2]; 104 k3 = ctx->KEY[3]; 105 106 sum = TEA_DELTA << 5; 107 108 n = TEA_ROUNDS; 109 110 while (n-- > 0) { 111 z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3); 112 y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1); 113 sum -= TEA_DELTA; 114 } 115 116 out[0] = cpu_to_le32(y); 117 out[1] = cpu_to_le32(z); 118 } 119 120 static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key, 121 unsigned int key_len) 122 { 123 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 124 const __le32 *key = (const __le32 *)in_key; 125 126 ctx->KEY[0] = le32_to_cpu(key[0]); 127 ctx->KEY[1] = le32_to_cpu(key[1]); 128 ctx->KEY[2] = le32_to_cpu(key[2]); 129 ctx->KEY[3] = le32_to_cpu(key[3]); 130 131 return 0; 132 133 } 134 135 static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 136 { 137 u32 y, z, sum = 0; 138 u32 limit = XTEA_DELTA * XTEA_ROUNDS; 139 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 140 const __le32 *in = (const __le32 *)src; 141 __le32 *out = (__le32 *)dst; 142 143 y = le32_to_cpu(in[0]); 144 z = le32_to_cpu(in[1]); 145 146 while (sum != limit) { 147 y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 148 sum += XTEA_DELTA; 149 z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 150 } 151 152 out[0] = cpu_to_le32(y); 153 out[1] = cpu_to_le32(z); 154 } 155 156 static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 157 { 158 u32 y, z, sum; 159 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 160 const __le32 *in = (const __le32 *)src; 161 __le32 *out = (__le32 *)dst; 162 163 y = le32_to_cpu(in[0]); 164 z = le32_to_cpu(in[1]); 165 166 sum = XTEA_DELTA * XTEA_ROUNDS; 167 168 while (sum) { 169 z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]); 170 sum -= XTEA_DELTA; 171 y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]); 172 } 173 174 out[0] = cpu_to_le32(y); 175 out[1] = cpu_to_le32(z); 176 } 177 178 179 static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 180 { 181 u32 y, z, sum = 0; 182 u32 limit = XTEA_DELTA * XTEA_ROUNDS; 183 struct xtea_ctx *ctx = crypto_tfm_ctx(tfm); 184 const __le32 *in = (const __le32 *)src; 185 __le32 *out = (__le32 *)dst; 186 187 y = le32_to_cpu(in[0]); 188 z = le32_to_cpu(in[1]); 189 190 while (sum != limit) { 191 y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3]; 192 sum += XTEA_DELTA; 193 z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3]; 194 } 195 196 out[0] = cpu_to_le32(y); 197 out[1] = cpu_to_le32(z); 198 } 199 200 static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src) 201 { 202 u32 y, z, sum; 203 struct tea_ctx *ctx = crypto_tfm_ctx(tfm); 204 const __le32 *in = (const __le32 *)src; 205 __le32 *out = (__le32 *)dst; 206 207 y = le32_to_cpu(in[0]); 208 z = le32_to_cpu(in[1]); 209 210 sum = XTEA_DELTA * XTEA_ROUNDS; 211 212 while (sum) { 213 z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3]; 214 sum -= XTEA_DELTA; 215 y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3]; 216 } 217 218 out[0] = cpu_to_le32(y); 219 out[1] = cpu_to_le32(z); 220 } 221 222 static struct crypto_alg tea_alg = { 223 .cra_name = "tea", 224 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 225 .cra_blocksize = TEA_BLOCK_SIZE, 226 .cra_ctxsize = sizeof (struct tea_ctx), 227 .cra_alignmask = 3, 228 .cra_module = THIS_MODULE, 229 .cra_list = LIST_HEAD_INIT(tea_alg.cra_list), 230 .cra_u = { .cipher = { 231 .cia_min_keysize = TEA_KEY_SIZE, 232 .cia_max_keysize = TEA_KEY_SIZE, 233 .cia_setkey = tea_setkey, 234 .cia_encrypt = tea_encrypt, 235 .cia_decrypt = tea_decrypt } } 236 }; 237 238 static struct crypto_alg xtea_alg = { 239 .cra_name = "xtea", 240 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 241 .cra_blocksize = XTEA_BLOCK_SIZE, 242 .cra_ctxsize = sizeof (struct xtea_ctx), 243 .cra_alignmask = 3, 244 .cra_module = THIS_MODULE, 245 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), 246 .cra_u = { .cipher = { 247 .cia_min_keysize = XTEA_KEY_SIZE, 248 .cia_max_keysize = XTEA_KEY_SIZE, 249 .cia_setkey = xtea_setkey, 250 .cia_encrypt = xtea_encrypt, 251 .cia_decrypt = xtea_decrypt } } 252 }; 253 254 static struct crypto_alg xeta_alg = { 255 .cra_name = "xeta", 256 .cra_flags = CRYPTO_ALG_TYPE_CIPHER, 257 .cra_blocksize = XTEA_BLOCK_SIZE, 258 .cra_ctxsize = sizeof (struct xtea_ctx), 259 .cra_alignmask = 3, 260 .cra_module = THIS_MODULE, 261 .cra_list = LIST_HEAD_INIT(xtea_alg.cra_list), 262 .cra_u = { .cipher = { 263 .cia_min_keysize = XTEA_KEY_SIZE, 264 .cia_max_keysize = XTEA_KEY_SIZE, 265 .cia_setkey = xtea_setkey, 266 .cia_encrypt = xeta_encrypt, 267 .cia_decrypt = xeta_decrypt } } 268 }; 269 270 static int __init init(void) 271 { 272 int ret = 0; 273 274 ret = crypto_register_alg(&tea_alg); 275 if (ret < 0) 276 goto out; 277 278 ret = crypto_register_alg(&xtea_alg); 279 if (ret < 0) { 280 crypto_unregister_alg(&tea_alg); 281 goto out; 282 } 283 284 ret = crypto_register_alg(&xeta_alg); 285 if (ret < 0) { 286 crypto_unregister_alg(&tea_alg); 287 crypto_unregister_alg(&xtea_alg); 288 goto out; 289 } 290 291 out: 292 return ret; 293 } 294 295 static void __exit fini(void) 296 { 297 crypto_unregister_alg(&tea_alg); 298 crypto_unregister_alg(&xtea_alg); 299 crypto_unregister_alg(&xeta_alg); 300 } 301 302 MODULE_ALIAS("xtea"); 303 MODULE_ALIAS("xeta"); 304 305 module_init(init); 306 module_exit(fini); 307 308 MODULE_LICENSE("GPL"); 309 MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms"); 310