1 /* 2 * Symmetric key cipher operations. 3 * 4 * Generic encrypt/decrypt wrapper for ciphers, handles operations across 5 * multiple page boundaries by using temporary blocks. In user context, 6 * the kernel is given a chance to schedule us once per page. 7 * 8 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au> 9 * 10 * This program is free software; you can redistribute it and/or modify it 11 * under the terms of the GNU General Public License as published by the Free 12 * Software Foundation; either version 2 of the License, or (at your option) 13 * any later version. 14 * 15 */ 16 17 #include <crypto/internal/skcipher.h> 18 #include <linux/bug.h> 19 #include <linux/cryptouser.h> 20 #include <linux/module.h> 21 #include <linux/rtnetlink.h> 22 #include <linux/seq_file.h> 23 #include <net/netlink.h> 24 25 #include "internal.h" 26 27 static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg) 28 { 29 if (alg->cra_type == &crypto_blkcipher_type) 30 return sizeof(struct crypto_blkcipher *); 31 32 if (alg->cra_type == &crypto_ablkcipher_type || 33 alg->cra_type == &crypto_givcipher_type) 34 return sizeof(struct crypto_ablkcipher *); 35 36 return crypto_alg_extsize(alg); 37 } 38 39 static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm, 40 const u8 *key, unsigned int keylen) 41 { 42 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 43 struct crypto_blkcipher *blkcipher = *ctx; 44 int err; 45 46 crypto_blkcipher_clear_flags(blkcipher, ~0); 47 crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) & 48 CRYPTO_TFM_REQ_MASK); 49 err = crypto_blkcipher_setkey(blkcipher, key, keylen); 50 crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) & 51 CRYPTO_TFM_RES_MASK); 52 53 return err; 54 } 55 56 static int skcipher_crypt_blkcipher(struct skcipher_request *req, 57 int (*crypt)(struct blkcipher_desc *, 58 struct scatterlist *, 59 struct scatterlist *, 60 unsigned int)) 61 { 62 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 63 struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm); 64 struct blkcipher_desc desc = { 65 .tfm = *ctx, 66 .info = req->iv, 67 .flags = req->base.flags, 68 }; 69 70 71 return crypt(&desc, req->dst, req->src, req->cryptlen); 72 } 73 74 static int skcipher_encrypt_blkcipher(struct skcipher_request *req) 75 { 76 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 77 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 78 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 79 80 return skcipher_crypt_blkcipher(req, alg->encrypt); 81 } 82 83 static int skcipher_decrypt_blkcipher(struct skcipher_request *req) 84 { 85 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 86 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 87 struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher; 88 89 return skcipher_crypt_blkcipher(req, alg->decrypt); 90 } 91 92 static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 93 { 94 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 95 96 crypto_free_blkcipher(*ctx); 97 } 98 99 static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm) 100 { 101 struct crypto_alg *calg = tfm->__crt_alg; 102 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 103 struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm); 104 struct crypto_blkcipher *blkcipher; 105 struct crypto_tfm *btfm; 106 107 if (!crypto_mod_get(calg)) 108 return -EAGAIN; 109 110 btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER, 111 CRYPTO_ALG_TYPE_MASK); 112 if (IS_ERR(btfm)) { 113 crypto_mod_put(calg); 114 return PTR_ERR(btfm); 115 } 116 117 blkcipher = __crypto_blkcipher_cast(btfm); 118 *ctx = blkcipher; 119 tfm->exit = crypto_exit_skcipher_ops_blkcipher; 120 121 skcipher->setkey = skcipher_setkey_blkcipher; 122 skcipher->encrypt = skcipher_encrypt_blkcipher; 123 skcipher->decrypt = skcipher_decrypt_blkcipher; 124 125 skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher); 126 skcipher->keysize = calg->cra_blkcipher.max_keysize; 127 128 return 0; 129 } 130 131 static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm, 132 const u8 *key, unsigned int keylen) 133 { 134 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); 135 struct crypto_ablkcipher *ablkcipher = *ctx; 136 int err; 137 138 crypto_ablkcipher_clear_flags(ablkcipher, ~0); 139 crypto_ablkcipher_set_flags(ablkcipher, 140 crypto_skcipher_get_flags(tfm) & 141 CRYPTO_TFM_REQ_MASK); 142 err = crypto_ablkcipher_setkey(ablkcipher, key, keylen); 143 crypto_skcipher_set_flags(tfm, 144 crypto_ablkcipher_get_flags(ablkcipher) & 145 CRYPTO_TFM_RES_MASK); 146 147 return err; 148 } 149 150 static int skcipher_crypt_ablkcipher(struct skcipher_request *req, 151 int (*crypt)(struct ablkcipher_request *)) 152 { 153 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 154 struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm); 155 struct ablkcipher_request *subreq = skcipher_request_ctx(req); 156 157 ablkcipher_request_set_tfm(subreq, *ctx); 158 ablkcipher_request_set_callback(subreq, skcipher_request_flags(req), 159 req->base.complete, req->base.data); 160 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, 161 req->iv); 162 163 return crypt(subreq); 164 } 165 166 static int skcipher_encrypt_ablkcipher(struct skcipher_request *req) 167 { 168 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 169 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 170 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 171 172 return skcipher_crypt_ablkcipher(req, alg->encrypt); 173 } 174 175 static int skcipher_decrypt_ablkcipher(struct skcipher_request *req) 176 { 177 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 178 struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher); 179 struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; 180 181 return skcipher_crypt_ablkcipher(req, alg->decrypt); 182 } 183 184 static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 185 { 186 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 187 188 crypto_free_ablkcipher(*ctx); 189 } 190 191 static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm) 192 { 193 struct crypto_alg *calg = tfm->__crt_alg; 194 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 195 struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm); 196 struct crypto_ablkcipher *ablkcipher; 197 struct crypto_tfm *abtfm; 198 199 if (!crypto_mod_get(calg)) 200 return -EAGAIN; 201 202 abtfm = __crypto_alloc_tfm(calg, 0, 0); 203 if (IS_ERR(abtfm)) { 204 crypto_mod_put(calg); 205 return PTR_ERR(abtfm); 206 } 207 208 ablkcipher = __crypto_ablkcipher_cast(abtfm); 209 *ctx = ablkcipher; 210 tfm->exit = crypto_exit_skcipher_ops_ablkcipher; 211 212 skcipher->setkey = skcipher_setkey_ablkcipher; 213 skcipher->encrypt = skcipher_encrypt_ablkcipher; 214 skcipher->decrypt = skcipher_decrypt_ablkcipher; 215 216 skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher); 217 skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) + 218 sizeof(struct ablkcipher_request); 219 skcipher->keysize = calg->cra_ablkcipher.max_keysize; 220 221 return 0; 222 } 223 224 static void crypto_skcipher_exit_tfm(struct crypto_tfm *tfm) 225 { 226 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 227 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 228 229 alg->exit(skcipher); 230 } 231 232 static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm) 233 { 234 struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm); 235 struct skcipher_alg *alg = crypto_skcipher_alg(skcipher); 236 237 if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type) 238 return crypto_init_skcipher_ops_blkcipher(tfm); 239 240 if (tfm->__crt_alg->cra_type == &crypto_ablkcipher_type || 241 tfm->__crt_alg->cra_type == &crypto_givcipher_type) 242 return crypto_init_skcipher_ops_ablkcipher(tfm); 243 244 skcipher->setkey = alg->setkey; 245 skcipher->encrypt = alg->encrypt; 246 skcipher->decrypt = alg->decrypt; 247 skcipher->ivsize = alg->ivsize; 248 skcipher->keysize = alg->max_keysize; 249 250 if (alg->exit) 251 skcipher->base.exit = crypto_skcipher_exit_tfm; 252 253 if (alg->init) 254 return alg->init(skcipher); 255 256 return 0; 257 } 258 259 static void crypto_skcipher_free_instance(struct crypto_instance *inst) 260 { 261 struct skcipher_instance *skcipher = 262 container_of(inst, struct skcipher_instance, s.base); 263 264 skcipher->free(skcipher); 265 } 266 267 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 268 __attribute__ ((unused)); 269 static void crypto_skcipher_show(struct seq_file *m, struct crypto_alg *alg) 270 { 271 struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, 272 base); 273 274 seq_printf(m, "type : skcipher\n"); 275 seq_printf(m, "async : %s\n", 276 alg->cra_flags & CRYPTO_ALG_ASYNC ? "yes" : "no"); 277 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); 278 seq_printf(m, "min keysize : %u\n", skcipher->min_keysize); 279 seq_printf(m, "max keysize : %u\n", skcipher->max_keysize); 280 seq_printf(m, "ivsize : %u\n", skcipher->ivsize); 281 seq_printf(m, "chunksize : %u\n", skcipher->chunksize); 282 } 283 284 #ifdef CONFIG_NET 285 static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg) 286 { 287 struct crypto_report_blkcipher rblkcipher; 288 struct skcipher_alg *skcipher = container_of(alg, struct skcipher_alg, 289 base); 290 291 strncpy(rblkcipher.type, "skcipher", sizeof(rblkcipher.type)); 292 strncpy(rblkcipher.geniv, "<none>", sizeof(rblkcipher.geniv)); 293 294 rblkcipher.blocksize = alg->cra_blocksize; 295 rblkcipher.min_keysize = skcipher->min_keysize; 296 rblkcipher.max_keysize = skcipher->max_keysize; 297 rblkcipher.ivsize = skcipher->ivsize; 298 299 if (nla_put(skb, CRYPTOCFGA_REPORT_BLKCIPHER, 300 sizeof(struct crypto_report_blkcipher), &rblkcipher)) 301 goto nla_put_failure; 302 return 0; 303 304 nla_put_failure: 305 return -EMSGSIZE; 306 } 307 #else 308 static int crypto_skcipher_report(struct sk_buff *skb, struct crypto_alg *alg) 309 { 310 return -ENOSYS; 311 } 312 #endif 313 314 static const struct crypto_type crypto_skcipher_type2 = { 315 .extsize = crypto_skcipher_extsize, 316 .init_tfm = crypto_skcipher_init_tfm, 317 .free = crypto_skcipher_free_instance, 318 #ifdef CONFIG_PROC_FS 319 .show = crypto_skcipher_show, 320 #endif 321 .report = crypto_skcipher_report, 322 .maskclear = ~CRYPTO_ALG_TYPE_MASK, 323 .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK, 324 .type = CRYPTO_ALG_TYPE_SKCIPHER, 325 .tfmsize = offsetof(struct crypto_skcipher, base), 326 }; 327 328 int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, 329 const char *name, u32 type, u32 mask) 330 { 331 spawn->base.frontend = &crypto_skcipher_type2; 332 return crypto_grab_spawn(&spawn->base, name, type, mask); 333 } 334 EXPORT_SYMBOL_GPL(crypto_grab_skcipher); 335 336 struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name, 337 u32 type, u32 mask) 338 { 339 return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask); 340 } 341 EXPORT_SYMBOL_GPL(crypto_alloc_skcipher); 342 343 int crypto_has_skcipher2(const char *alg_name, u32 type, u32 mask) 344 { 345 return crypto_type_has_alg(alg_name, &crypto_skcipher_type2, 346 type, mask); 347 } 348 EXPORT_SYMBOL_GPL(crypto_has_skcipher2); 349 350 static int skcipher_prepare_alg(struct skcipher_alg *alg) 351 { 352 struct crypto_alg *base = &alg->base; 353 354 if (alg->ivsize > PAGE_SIZE / 8 || alg->chunksize > PAGE_SIZE / 8) 355 return -EINVAL; 356 357 if (!alg->chunksize) 358 alg->chunksize = base->cra_blocksize; 359 360 base->cra_type = &crypto_skcipher_type2; 361 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; 362 base->cra_flags |= CRYPTO_ALG_TYPE_SKCIPHER; 363 364 return 0; 365 } 366 367 int crypto_register_skcipher(struct skcipher_alg *alg) 368 { 369 struct crypto_alg *base = &alg->base; 370 int err; 371 372 err = skcipher_prepare_alg(alg); 373 if (err) 374 return err; 375 376 return crypto_register_alg(base); 377 } 378 EXPORT_SYMBOL_GPL(crypto_register_skcipher); 379 380 void crypto_unregister_skcipher(struct skcipher_alg *alg) 381 { 382 crypto_unregister_alg(&alg->base); 383 } 384 EXPORT_SYMBOL_GPL(crypto_unregister_skcipher); 385 386 int crypto_register_skciphers(struct skcipher_alg *algs, int count) 387 { 388 int i, ret; 389 390 for (i = 0; i < count; i++) { 391 ret = crypto_register_skcipher(&algs[i]); 392 if (ret) 393 goto err; 394 } 395 396 return 0; 397 398 err: 399 for (--i; i >= 0; --i) 400 crypto_unregister_skcipher(&algs[i]); 401 402 return ret; 403 } 404 EXPORT_SYMBOL_GPL(crypto_register_skciphers); 405 406 void crypto_unregister_skciphers(struct skcipher_alg *algs, int count) 407 { 408 int i; 409 410 for (i = count - 1; i >= 0; --i) 411 crypto_unregister_skcipher(&algs[i]); 412 } 413 EXPORT_SYMBOL_GPL(crypto_unregister_skciphers); 414 415 int skcipher_register_instance(struct crypto_template *tmpl, 416 struct skcipher_instance *inst) 417 { 418 int err; 419 420 err = skcipher_prepare_alg(&inst->alg); 421 if (err) 422 return err; 423 424 return crypto_register_instance(tmpl, skcipher_crypto_instance(inst)); 425 } 426 EXPORT_SYMBOL_GPL(skcipher_register_instance); 427 428 MODULE_LICENSE("GPL"); 429 MODULE_DESCRIPTION("Symmetric key cipher type"); 430