1 /* 2 * Scatterlist Cryptographic API. 3 * 4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au> 5 * Copyright (c) 2002 David S. Miller (davem@redhat.com) 6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au> 7 * 8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no> 9 * and Nettle, by Niels Möller. 10 * 11 * This program is free software; you can redistribute it and/or modify it 12 * under the terms of the GNU General Public License as published by the Free 13 * Software Foundation; either version 2 of the License, or (at your option) 14 * any later version. 15 * 16 */ 17 18 #include <linux/err.h> 19 #include <linux/errno.h> 20 #include <linux/kernel.h> 21 #include <linux/kmod.h> 22 #include <linux/module.h> 23 #include <linux/param.h> 24 #include <linux/sched.h> 25 #include <linux/slab.h> 26 #include <linux/string.h> 27 #include "internal.h" 28 29 LIST_HEAD(crypto_alg_list); 30 EXPORT_SYMBOL_GPL(crypto_alg_list); 31 DECLARE_RWSEM(crypto_alg_sem); 32 EXPORT_SYMBOL_GPL(crypto_alg_sem); 33 34 BLOCKING_NOTIFIER_HEAD(crypto_chain); 35 EXPORT_SYMBOL_GPL(crypto_chain); 36 37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg) 38 { 39 atomic_inc(&alg->cra_refcnt); 40 return alg; 41 } 42 43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg) 44 { 45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL; 46 } 47 EXPORT_SYMBOL_GPL(crypto_mod_get); 48 49 void crypto_mod_put(struct crypto_alg *alg) 50 { 51 struct module *module = alg->cra_module; 52 53 crypto_alg_put(alg); 54 module_put(module); 55 } 56 EXPORT_SYMBOL_GPL(crypto_mod_put); 57 58 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask) 59 { 60 struct crypto_alg *q, *alg = NULL; 61 int best = -2; 62 63 list_for_each_entry(q, &crypto_alg_list, cra_list) { 64 int exact, fuzzy; 65 66 if (crypto_is_moribund(q)) 67 continue; 68 69 if ((q->cra_flags ^ type) & mask) 70 continue; 71 72 if (crypto_is_larval(q) && 73 ((struct crypto_larval *)q)->mask != mask) 74 continue; 75 76 exact = !strcmp(q->cra_driver_name, name); 77 fuzzy = !strcmp(q->cra_name, name); 78 if (!exact && !(fuzzy && q->cra_priority > best)) 79 continue; 80 81 if (unlikely(!crypto_mod_get(q))) 82 continue; 83 84 best = q->cra_priority; 85 if (alg) 86 crypto_mod_put(alg); 87 alg = q; 88 89 if (exact) 90 break; 91 } 92 93 return alg; 94 } 95 EXPORT_SYMBOL_GPL(__crypto_alg_lookup); 96 97 static void crypto_larval_destroy(struct crypto_alg *alg) 98 { 99 struct crypto_larval *larval = (void *)alg; 100 101 BUG_ON(!crypto_is_larval(alg)); 102 if (larval->adult) 103 crypto_mod_put(larval->adult); 104 kfree(larval); 105 } 106 107 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type, 108 u32 mask) 109 { 110 struct crypto_alg *alg; 111 struct crypto_larval *larval; 112 113 larval = kzalloc(sizeof(*larval), GFP_KERNEL); 114 if (!larval) 115 return ERR_PTR(-ENOMEM); 116 117 larval->mask = mask; 118 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type; 119 larval->alg.cra_priority = -1; 120 larval->alg.cra_destroy = crypto_larval_destroy; 121 122 atomic_set(&larval->alg.cra_refcnt, 2); 123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME); 124 init_completion(&larval->completion); 125 126 down_write(&crypto_alg_sem); 127 alg = __crypto_alg_lookup(name, type, mask); 128 if (!alg) { 129 alg = &larval->alg; 130 list_add(&alg->cra_list, &crypto_alg_list); 131 } 132 up_write(&crypto_alg_sem); 133 134 if (alg != &larval->alg) 135 kfree(larval); 136 137 return alg; 138 } 139 140 void crypto_larval_kill(struct crypto_alg *alg) 141 { 142 struct crypto_larval *larval = (void *)alg; 143 144 down_write(&crypto_alg_sem); 145 list_del(&alg->cra_list); 146 up_write(&crypto_alg_sem); 147 complete_all(&larval->completion); 148 crypto_alg_put(alg); 149 } 150 EXPORT_SYMBOL_GPL(crypto_larval_kill); 151 152 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg) 153 { 154 struct crypto_larval *larval = (void *)alg; 155 156 wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ); 157 alg = larval->adult; 158 if (alg) { 159 if (!crypto_mod_get(alg)) 160 alg = ERR_PTR(-EAGAIN); 161 } else 162 alg = ERR_PTR(-ENOENT); 163 crypto_mod_put(&larval->alg); 164 165 return alg; 166 } 167 168 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, 169 u32 mask) 170 { 171 struct crypto_alg *alg; 172 173 down_read(&crypto_alg_sem); 174 alg = __crypto_alg_lookup(name, type, mask); 175 up_read(&crypto_alg_sem); 176 177 return alg; 178 } 179 180 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask) 181 { 182 struct crypto_alg *alg; 183 184 if (!name) 185 return ERR_PTR(-ENOENT); 186 187 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD); 188 type &= mask; 189 190 alg = try_then_request_module(crypto_alg_lookup(name, type, mask), 191 name); 192 if (alg) 193 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg; 194 195 return crypto_larval_alloc(name, type, mask); 196 } 197 EXPORT_SYMBOL_GPL(crypto_larval_lookup); 198 199 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask) 200 { 201 struct crypto_alg *alg; 202 struct crypto_alg *larval; 203 int ok; 204 205 larval = crypto_larval_lookup(name, type, mask); 206 if (IS_ERR(larval) || !crypto_is_larval(larval)) 207 return larval; 208 209 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval); 210 if (ok == NOTIFY_DONE) { 211 request_module("cryptomgr"); 212 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval); 213 } 214 215 if (ok == NOTIFY_STOP) 216 alg = crypto_larval_wait(larval); 217 else { 218 crypto_mod_put(larval); 219 alg = ERR_PTR(-ENOENT); 220 } 221 crypto_larval_kill(larval); 222 return alg; 223 } 224 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup); 225 226 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask) 227 { 228 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type; 229 230 if (type_obj) 231 return type_obj->init(tfm, type, mask); 232 233 switch (crypto_tfm_alg_type(tfm)) { 234 case CRYPTO_ALG_TYPE_CIPHER: 235 return crypto_init_cipher_ops(tfm); 236 237 case CRYPTO_ALG_TYPE_DIGEST: 238 return crypto_init_digest_ops(tfm); 239 240 case CRYPTO_ALG_TYPE_COMPRESS: 241 return crypto_init_compress_ops(tfm); 242 243 default: 244 break; 245 } 246 247 BUG(); 248 return -EINVAL; 249 } 250 251 static void crypto_exit_ops(struct crypto_tfm *tfm) 252 { 253 const struct crypto_type *type = tfm->__crt_alg->cra_type; 254 255 if (type) { 256 if (type->exit) 257 type->exit(tfm); 258 return; 259 } 260 261 switch (crypto_tfm_alg_type(tfm)) { 262 case CRYPTO_ALG_TYPE_CIPHER: 263 crypto_exit_cipher_ops(tfm); 264 break; 265 266 case CRYPTO_ALG_TYPE_DIGEST: 267 crypto_exit_digest_ops(tfm); 268 break; 269 270 case CRYPTO_ALG_TYPE_COMPRESS: 271 crypto_exit_compress_ops(tfm); 272 break; 273 274 default: 275 BUG(); 276 277 } 278 } 279 280 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask) 281 { 282 const struct crypto_type *type_obj = alg->cra_type; 283 unsigned int len; 284 285 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); 286 if (type_obj) 287 return len + type_obj->ctxsize(alg, type, mask); 288 289 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) { 290 default: 291 BUG(); 292 293 case CRYPTO_ALG_TYPE_CIPHER: 294 len += crypto_cipher_ctxsize(alg); 295 break; 296 297 case CRYPTO_ALG_TYPE_DIGEST: 298 len += crypto_digest_ctxsize(alg); 299 break; 300 301 case CRYPTO_ALG_TYPE_COMPRESS: 302 len += crypto_compress_ctxsize(alg); 303 break; 304 } 305 306 return len; 307 } 308 309 void crypto_shoot_alg(struct crypto_alg *alg) 310 { 311 down_write(&crypto_alg_sem); 312 alg->cra_flags |= CRYPTO_ALG_DYING; 313 up_write(&crypto_alg_sem); 314 } 315 EXPORT_SYMBOL_GPL(crypto_shoot_alg); 316 317 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type, 318 u32 mask) 319 { 320 struct crypto_tfm *tfm = NULL; 321 unsigned int tfm_size; 322 int err = -ENOMEM; 323 324 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask); 325 tfm = kzalloc(tfm_size, GFP_KERNEL); 326 if (tfm == NULL) 327 goto out_err; 328 329 tfm->__crt_alg = alg; 330 331 err = crypto_init_ops(tfm, type, mask); 332 if (err) 333 goto out_free_tfm; 334 335 if (alg->cra_init && (err = alg->cra_init(tfm))) { 336 if (err == -EAGAIN) 337 crypto_shoot_alg(alg); 338 goto cra_init_failed; 339 } 340 341 goto out; 342 343 cra_init_failed: 344 crypto_exit_ops(tfm); 345 out_free_tfm: 346 kfree(tfm); 347 out_err: 348 tfm = ERR_PTR(err); 349 out: 350 return tfm; 351 } 352 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm); 353 354 /* 355 * crypto_alloc_base - Locate algorithm and allocate transform 356 * @alg_name: Name of algorithm 357 * @type: Type of algorithm 358 * @mask: Mask for type comparison 359 * 360 * crypto_alloc_base() will first attempt to locate an already loaded 361 * algorithm. If that fails and the kernel supports dynamically loadable 362 * modules, it will then attempt to load a module of the same name or 363 * alias. If that fails it will send a query to any loaded crypto manager 364 * to construct an algorithm on the fly. A refcount is grabbed on the 365 * algorithm which is then associated with the new transform. 366 * 367 * The returned transform is of a non-determinate type. Most people 368 * should use one of the more specific allocation functions such as 369 * crypto_alloc_blkcipher. 370 * 371 * In case of error the return value is an error pointer. 372 */ 373 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask) 374 { 375 struct crypto_tfm *tfm; 376 int err; 377 378 for (;;) { 379 struct crypto_alg *alg; 380 381 alg = crypto_alg_mod_lookup(alg_name, type, mask); 382 if (IS_ERR(alg)) { 383 err = PTR_ERR(alg); 384 goto err; 385 } 386 387 tfm = __crypto_alloc_tfm(alg, type, mask); 388 if (!IS_ERR(tfm)) 389 return tfm; 390 391 crypto_mod_put(alg); 392 err = PTR_ERR(tfm); 393 394 err: 395 if (err != -EAGAIN) 396 break; 397 if (signal_pending(current)) { 398 err = -EINTR; 399 break; 400 } 401 } 402 403 return ERR_PTR(err); 404 } 405 EXPORT_SYMBOL_GPL(crypto_alloc_base); 406 407 /* 408 * crypto_free_tfm - Free crypto transform 409 * @tfm: Transform to free 410 * 411 * crypto_free_tfm() frees up the transform and any associated resources, 412 * then drops the refcount on the associated algorithm. 413 */ 414 void crypto_free_tfm(struct crypto_tfm *tfm) 415 { 416 struct crypto_alg *alg; 417 int size; 418 419 if (unlikely(!tfm)) 420 return; 421 422 alg = tfm->__crt_alg; 423 size = sizeof(*tfm) + alg->cra_ctxsize; 424 425 if (alg->cra_exit) 426 alg->cra_exit(tfm); 427 crypto_exit_ops(tfm); 428 crypto_mod_put(alg); 429 memset(tfm, 0, size); 430 kfree(tfm); 431 } 432 433 EXPORT_SYMBOL_GPL(crypto_free_tfm); 434 435 int crypto_has_alg(const char *name, u32 type, u32 mask) 436 { 437 int ret = 0; 438 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask); 439 440 if (!IS_ERR(alg)) { 441 crypto_mod_put(alg); 442 ret = 1; 443 } 444 445 return ret; 446 } 447 EXPORT_SYMBOL_GPL(crypto_has_alg); 448