1 /* 2 * Copyright (C)2006 USAGI/WIDE Project 3 * 4 * This program is free software; you can redistribute it and/or modify 5 * it under the terms of the GNU General Public License as published by 6 * the Free Software Foundation; either version 2 of the License, or 7 * (at your option) any later version. 8 * 9 * This program is distributed in the hope that it will be useful, 10 * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 * GNU General Public License for more details. 13 * 14 * You should have received a copy of the GNU General Public License 15 * along with this program. If not, see <http://www.gnu.org/licenses/>. 16 * 17 * Author: 18 * Kazunori Miyazawa <miyazawa@linux-ipv6.org> 19 */ 20 21 #include <crypto/internal/hash.h> 22 #include <linux/err.h> 23 #include <linux/kernel.h> 24 #include <linux/module.h> 25 26 static u_int32_t ks[12] = {0x01010101, 0x01010101, 0x01010101, 0x01010101, 27 0x02020202, 0x02020202, 0x02020202, 0x02020202, 28 0x03030303, 0x03030303, 0x03030303, 0x03030303}; 29 30 /* 31 * +------------------------ 32 * | <parent tfm> 33 * +------------------------ 34 * | xcbc_tfm_ctx 35 * +------------------------ 36 * | consts (block size * 2) 37 * +------------------------ 38 */ 39 struct xcbc_tfm_ctx { 40 struct crypto_cipher *child; 41 u8 ctx[]; 42 }; 43 44 /* 45 * +------------------------ 46 * | <shash desc> 47 * +------------------------ 48 * | xcbc_desc_ctx 49 * +------------------------ 50 * | odds (block size) 51 * +------------------------ 52 * | prev (block size) 53 * +------------------------ 54 */ 55 struct xcbc_desc_ctx { 56 unsigned int len; 57 u8 ctx[]; 58 }; 59 60 static int crypto_xcbc_digest_setkey(struct crypto_shash *parent, 61 const u8 *inkey, unsigned int keylen) 62 { 63 unsigned long alignmask = crypto_shash_alignmask(parent); 64 struct xcbc_tfm_ctx *ctx = crypto_shash_ctx(parent); 65 int bs = crypto_shash_blocksize(parent); 66 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 67 int err = 0; 68 u8 key1[bs]; 69 70 if ((err = crypto_cipher_setkey(ctx->child, inkey, keylen))) 71 return err; 72 73 crypto_cipher_encrypt_one(ctx->child, consts, (u8 *)ks + bs); 74 crypto_cipher_encrypt_one(ctx->child, consts + bs, (u8 *)ks + bs * 2); 75 crypto_cipher_encrypt_one(ctx->child, key1, (u8 *)ks); 76 77 return crypto_cipher_setkey(ctx->child, key1, bs); 78 79 } 80 81 static int crypto_xcbc_digest_init(struct shash_desc *pdesc) 82 { 83 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); 84 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 85 int bs = crypto_shash_blocksize(pdesc->tfm); 86 u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs; 87 88 ctx->len = 0; 89 memset(prev, 0, bs); 90 91 return 0; 92 } 93 94 static int crypto_xcbc_digest_update(struct shash_desc *pdesc, const u8 *p, 95 unsigned int len) 96 { 97 struct crypto_shash *parent = pdesc->tfm; 98 unsigned long alignmask = crypto_shash_alignmask(parent); 99 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); 100 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 101 struct crypto_cipher *tfm = tctx->child; 102 int bs = crypto_shash_blocksize(parent); 103 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 104 u8 *prev = odds + bs; 105 106 /* checking the data can fill the block */ 107 if ((ctx->len + len) <= bs) { 108 memcpy(odds + ctx->len, p, len); 109 ctx->len += len; 110 return 0; 111 } 112 113 /* filling odds with new data and encrypting it */ 114 memcpy(odds + ctx->len, p, bs - ctx->len); 115 len -= bs - ctx->len; 116 p += bs - ctx->len; 117 118 crypto_xor(prev, odds, bs); 119 crypto_cipher_encrypt_one(tfm, prev, prev); 120 121 /* clearing the length */ 122 ctx->len = 0; 123 124 /* encrypting the rest of data */ 125 while (len > bs) { 126 crypto_xor(prev, p, bs); 127 crypto_cipher_encrypt_one(tfm, prev, prev); 128 p += bs; 129 len -= bs; 130 } 131 132 /* keeping the surplus of blocksize */ 133 if (len) { 134 memcpy(odds, p, len); 135 ctx->len = len; 136 } 137 138 return 0; 139 } 140 141 static int crypto_xcbc_digest_final(struct shash_desc *pdesc, u8 *out) 142 { 143 struct crypto_shash *parent = pdesc->tfm; 144 unsigned long alignmask = crypto_shash_alignmask(parent); 145 struct xcbc_tfm_ctx *tctx = crypto_shash_ctx(parent); 146 struct xcbc_desc_ctx *ctx = shash_desc_ctx(pdesc); 147 struct crypto_cipher *tfm = tctx->child; 148 int bs = crypto_shash_blocksize(parent); 149 u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); 150 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); 151 u8 *prev = odds + bs; 152 unsigned int offset = 0; 153 154 if (ctx->len != bs) { 155 unsigned int rlen; 156 u8 *p = odds + ctx->len; 157 158 *p = 0x80; 159 p++; 160 161 rlen = bs - ctx->len -1; 162 if (rlen) 163 memset(p, 0, rlen); 164 165 offset += bs; 166 } 167 168 crypto_xor(prev, odds, bs); 169 crypto_xor(prev, consts + offset, bs); 170 171 crypto_cipher_encrypt_one(tfm, out, prev); 172 173 return 0; 174 } 175 176 static int xcbc_init_tfm(struct crypto_tfm *tfm) 177 { 178 struct crypto_cipher *cipher; 179 struct crypto_instance *inst = (void *)tfm->__crt_alg; 180 struct crypto_spawn *spawn = crypto_instance_ctx(inst); 181 struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 182 183 cipher = crypto_spawn_cipher(spawn); 184 if (IS_ERR(cipher)) 185 return PTR_ERR(cipher); 186 187 ctx->child = cipher; 188 189 return 0; 190 }; 191 192 static void xcbc_exit_tfm(struct crypto_tfm *tfm) 193 { 194 struct xcbc_tfm_ctx *ctx = crypto_tfm_ctx(tfm); 195 crypto_free_cipher(ctx->child); 196 } 197 198 static int xcbc_create(struct crypto_template *tmpl, struct rtattr **tb) 199 { 200 struct shash_instance *inst; 201 struct crypto_alg *alg; 202 unsigned long alignmask; 203 int err; 204 205 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 206 if (err) 207 return err; 208 209 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, 210 CRYPTO_ALG_TYPE_MASK); 211 if (IS_ERR(alg)) 212 return PTR_ERR(alg); 213 214 switch(alg->cra_blocksize) { 215 case 16: 216 break; 217 default: 218 goto out_put_alg; 219 } 220 221 inst = shash_alloc_instance("xcbc", alg); 222 err = PTR_ERR(inst); 223 if (IS_ERR(inst)) 224 goto out_put_alg; 225 226 err = crypto_init_spawn(shash_instance_ctx(inst), alg, 227 shash_crypto_instance(inst), 228 CRYPTO_ALG_TYPE_MASK); 229 if (err) 230 goto out_free_inst; 231 232 alignmask = alg->cra_alignmask | 3; 233 inst->alg.base.cra_alignmask = alignmask; 234 inst->alg.base.cra_priority = alg->cra_priority; 235 inst->alg.base.cra_blocksize = alg->cra_blocksize; 236 237 inst->alg.digestsize = alg->cra_blocksize; 238 inst->alg.descsize = ALIGN(sizeof(struct xcbc_desc_ctx), 239 crypto_tfm_ctx_alignment()) + 240 (alignmask & 241 ~(crypto_tfm_ctx_alignment() - 1)) + 242 alg->cra_blocksize * 2; 243 244 inst->alg.base.cra_ctxsize = ALIGN(sizeof(struct xcbc_tfm_ctx), 245 alignmask + 1) + 246 alg->cra_blocksize * 2; 247 inst->alg.base.cra_init = xcbc_init_tfm; 248 inst->alg.base.cra_exit = xcbc_exit_tfm; 249 250 inst->alg.init = crypto_xcbc_digest_init; 251 inst->alg.update = crypto_xcbc_digest_update; 252 inst->alg.final = crypto_xcbc_digest_final; 253 inst->alg.setkey = crypto_xcbc_digest_setkey; 254 255 err = shash_register_instance(tmpl, inst); 256 if (err) { 257 out_free_inst: 258 shash_free_instance(shash_crypto_instance(inst)); 259 } 260 261 out_put_alg: 262 crypto_mod_put(alg); 263 return err; 264 } 265 266 static struct crypto_template crypto_xcbc_tmpl = { 267 .name = "xcbc", 268 .create = xcbc_create, 269 .free = shash_free_instance, 270 .module = THIS_MODULE, 271 }; 272 273 static int __init crypto_xcbc_module_init(void) 274 { 275 return crypto_register_template(&crypto_xcbc_tmpl); 276 } 277 278 static void __exit crypto_xcbc_module_exit(void) 279 { 280 crypto_unregister_template(&crypto_xcbc_tmpl); 281 } 282 283 module_init(crypto_xcbc_module_init); 284 module_exit(crypto_xcbc_module_exit); 285 286 MODULE_LICENSE("GPL"); 287 MODULE_DESCRIPTION("XCBC keyed hash algorithm"); 288 MODULE_ALIAS_CRYPTO("xcbc"); 289