xref: /openbmc/linux/crypto/essiv.c (revision 400c2a45)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ESSIV skcipher and aead template for block encryption
4  *
5  * This template encapsulates the ESSIV IV generation algorithm used by
6  * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7  * used for block encryption, by encrypting it using the hash of the
8  * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9  * number in LE representation zero-padded to the size of the IV, but this
10  * is not assumed by this driver.
11  *
12  * The typical use of this template is to instantiate the skcipher
13  * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14  * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15  * also permits ESSIV to be used in combination with the authenc template,
16  * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17  * we need to instantiate an aead that accepts the same special key format
18  * as the authenc template, and deals with the way the encrypted IV is
19  * embedded into the AAD area of the aead request. This means the AEAD
20  * flavor produced by this template is tightly coupled to the way dm-crypt
21  * happens to use it.
22  *
23  * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
24  *
25  * Heavily based on:
26  * adiantum length-preserving encryption mode
27  *
28  * Copyright 2018 Google LLC
29  */
30 
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/hash.h>
34 #include <crypto/internal/skcipher.h>
35 #include <crypto/scatterwalk.h>
36 #include <linux/module.h>
37 
38 #include "internal.h"
39 
40 struct essiv_instance_ctx {
41 	union {
42 		struct crypto_skcipher_spawn	skcipher_spawn;
43 		struct crypto_aead_spawn	aead_spawn;
44 	} u;
45 	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
46 	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
47 };
48 
49 struct essiv_tfm_ctx {
50 	union {
51 		struct crypto_skcipher	*skcipher;
52 		struct crypto_aead	*aead;
53 	} u;
54 	struct crypto_cipher		*essiv_cipher;
55 	struct crypto_shash		*hash;
56 	int				ivoffset;
57 };
58 
59 struct essiv_aead_request_ctx {
60 	struct scatterlist		sg[4];
61 	u8				*assoc;
62 	struct aead_request		aead_req;
63 };
64 
65 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
66 				 const u8 *key, unsigned int keylen)
67 {
68 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
69 	u8 salt[HASH_MAX_DIGESTSIZE];
70 	int err;
71 
72 	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
73 	crypto_skcipher_set_flags(tctx->u.skcipher,
74 				  crypto_skcipher_get_flags(tfm) &
75 				  CRYPTO_TFM_REQ_MASK);
76 	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
77 	if (err)
78 		return err;
79 
80 	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
81 	if (err)
82 		return err;
83 
84 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
85 	crypto_cipher_set_flags(tctx->essiv_cipher,
86 				crypto_skcipher_get_flags(tfm) &
87 				CRYPTO_TFM_REQ_MASK);
88 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
89 				    crypto_shash_digestsize(tctx->hash));
90 }
91 
92 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
93 			     unsigned int keylen)
94 {
95 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
96 	SHASH_DESC_ON_STACK(desc, tctx->hash);
97 	struct crypto_authenc_keys keys;
98 	u8 salt[HASH_MAX_DIGESTSIZE];
99 	int err;
100 
101 	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
102 	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
103 					    CRYPTO_TFM_REQ_MASK);
104 	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
105 	if (err)
106 		return err;
107 
108 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
109 		return -EINVAL;
110 
111 	desc->tfm = tctx->hash;
112 	err = crypto_shash_init(desc) ?:
113 	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
114 	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
115 	if (err)
116 		return err;
117 
118 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
119 	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
120 						    CRYPTO_TFM_REQ_MASK);
121 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
122 				    crypto_shash_digestsize(tctx->hash));
123 }
124 
125 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
126 				  unsigned int authsize)
127 {
128 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
129 
130 	return crypto_aead_setauthsize(tctx->u.aead, authsize);
131 }
132 
133 static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
134 {
135 	struct skcipher_request *req = areq->data;
136 
137 	skcipher_request_complete(req, err);
138 }
139 
140 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
141 {
142 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
143 	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
144 	struct skcipher_request *subreq = skcipher_request_ctx(req);
145 
146 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
147 
148 	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
149 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
150 				   req->iv);
151 	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
152 				      essiv_skcipher_done, req);
153 
154 	return enc ? crypto_skcipher_encrypt(subreq) :
155 		     crypto_skcipher_decrypt(subreq);
156 }
157 
158 static int essiv_skcipher_encrypt(struct skcipher_request *req)
159 {
160 	return essiv_skcipher_crypt(req, true);
161 }
162 
163 static int essiv_skcipher_decrypt(struct skcipher_request *req)
164 {
165 	return essiv_skcipher_crypt(req, false);
166 }
167 
168 static void essiv_aead_done(struct crypto_async_request *areq, int err)
169 {
170 	struct aead_request *req = areq->data;
171 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
172 
173 	kfree(rctx->assoc);
174 	aead_request_complete(req, err);
175 }
176 
177 static int essiv_aead_crypt(struct aead_request *req, bool enc)
178 {
179 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
180 	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
181 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
182 	struct aead_request *subreq = &rctx->aead_req;
183 	struct scatterlist *src = req->src;
184 	int err;
185 
186 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
187 
188 	/*
189 	 * dm-crypt embeds the sector number and the IV in the AAD region, so
190 	 * we have to copy the converted IV into the right scatterlist before
191 	 * we pass it on.
192 	 */
193 	rctx->assoc = NULL;
194 	if (req->src == req->dst || !enc) {
195 		scatterwalk_map_and_copy(req->iv, req->dst,
196 					 req->assoclen - crypto_aead_ivsize(tfm),
197 					 crypto_aead_ivsize(tfm), 1);
198 	} else {
199 		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
200 		int ivsize = crypto_aead_ivsize(tfm);
201 		int ssize = req->assoclen - ivsize;
202 		struct scatterlist *sg;
203 		int nents;
204 
205 		if (ssize < 0)
206 			return -EINVAL;
207 
208 		nents = sg_nents_for_len(req->src, ssize);
209 		if (nents < 0)
210 			return -EINVAL;
211 
212 		memcpy(iv, req->iv, ivsize);
213 		sg_init_table(rctx->sg, 4);
214 
215 		if (unlikely(nents > 1)) {
216 			/*
217 			 * This is a case that rarely occurs in practice, but
218 			 * for correctness, we have to deal with it nonetheless.
219 			 */
220 			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
221 			if (!rctx->assoc)
222 				return -ENOMEM;
223 
224 			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
225 						 ssize, 0);
226 			sg_set_buf(rctx->sg, rctx->assoc, ssize);
227 		} else {
228 			sg_set_page(rctx->sg, sg_page(req->src), ssize,
229 				    req->src->offset);
230 		}
231 
232 		sg_set_buf(rctx->sg + 1, iv, ivsize);
233 		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
234 		if (sg != rctx->sg + 2)
235 			sg_chain(rctx->sg, 3, sg);
236 
237 		src = rctx->sg;
238 	}
239 
240 	aead_request_set_tfm(subreq, tctx->u.aead);
241 	aead_request_set_ad(subreq, req->assoclen);
242 	aead_request_set_callback(subreq, aead_request_flags(req),
243 				  essiv_aead_done, req);
244 	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
245 
246 	err = enc ? crypto_aead_encrypt(subreq) :
247 		    crypto_aead_decrypt(subreq);
248 
249 	if (rctx->assoc && err != -EINPROGRESS)
250 		kfree(rctx->assoc);
251 	return err;
252 }
253 
254 static int essiv_aead_encrypt(struct aead_request *req)
255 {
256 	return essiv_aead_crypt(req, true);
257 }
258 
259 static int essiv_aead_decrypt(struct aead_request *req)
260 {
261 	return essiv_aead_crypt(req, false);
262 }
263 
264 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
265 			  struct essiv_tfm_ctx *tctx)
266 {
267 	struct crypto_cipher *essiv_cipher;
268 	struct crypto_shash *hash;
269 	int err;
270 
271 	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
272 	if (IS_ERR(essiv_cipher))
273 		return PTR_ERR(essiv_cipher);
274 
275 	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
276 	if (IS_ERR(hash)) {
277 		err = PTR_ERR(hash);
278 		goto err_free_essiv_cipher;
279 	}
280 
281 	tctx->essiv_cipher = essiv_cipher;
282 	tctx->hash = hash;
283 
284 	return 0;
285 
286 err_free_essiv_cipher:
287 	crypto_free_cipher(essiv_cipher);
288 	return err;
289 }
290 
291 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
292 {
293 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
294 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
295 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
296 	struct crypto_skcipher *skcipher;
297 	int err;
298 
299 	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
300 	if (IS_ERR(skcipher))
301 		return PTR_ERR(skcipher);
302 
303 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
304 				         crypto_skcipher_reqsize(skcipher));
305 
306 	err = essiv_init_tfm(ictx, tctx);
307 	if (err) {
308 		crypto_free_skcipher(skcipher);
309 		return err;
310 	}
311 
312 	tctx->u.skcipher = skcipher;
313 	return 0;
314 }
315 
316 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
317 {
318 	struct aead_instance *inst = aead_alg_instance(tfm);
319 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
320 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
321 	struct crypto_aead *aead;
322 	unsigned int subreq_size;
323 	int err;
324 
325 	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
326 		     sizeof(struct essiv_aead_request_ctx));
327 
328 	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
329 	if (IS_ERR(aead))
330 		return PTR_ERR(aead);
331 
332 	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
333 		      crypto_aead_reqsize(aead);
334 
335 	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
336 			 subreq_size;
337 	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
338 
339 	err = essiv_init_tfm(ictx, tctx);
340 	if (err) {
341 		crypto_free_aead(aead);
342 		return err;
343 	}
344 
345 	tctx->u.aead = aead;
346 	return 0;
347 }
348 
349 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
350 {
351 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
352 
353 	crypto_free_skcipher(tctx->u.skcipher);
354 	crypto_free_cipher(tctx->essiv_cipher);
355 	crypto_free_shash(tctx->hash);
356 }
357 
358 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
359 {
360 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
361 
362 	crypto_free_aead(tctx->u.aead);
363 	crypto_free_cipher(tctx->essiv_cipher);
364 	crypto_free_shash(tctx->hash);
365 }
366 
367 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
368 {
369 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
370 
371 	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
372 	kfree(inst);
373 }
374 
375 static void essiv_aead_free_instance(struct aead_instance *inst)
376 {
377 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
378 
379 	crypto_drop_aead(&ictx->u.aead_spawn);
380 	kfree(inst);
381 }
382 
383 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
384 {
385 	const char *p, *q;
386 	int len;
387 
388 	/* find the last opening parens */
389 	p = strrchr(cra_name, '(');
390 	if (!p++)
391 		return false;
392 
393 	/* find the first closing parens in the tail of the string */
394 	q = strchr(p, ')');
395 	if (!q)
396 		return false;
397 
398 	len = q - p;
399 	if (len >= CRYPTO_MAX_ALG_NAME)
400 		return false;
401 
402 	memcpy(essiv_cipher_name, p, len);
403 	essiv_cipher_name[len] = '\0';
404 	return true;
405 }
406 
407 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
408 				       struct shash_alg *hash_alg,
409 				       int ivsize)
410 {
411 	struct crypto_alg *alg;
412 	bool ret = false;
413 
414 	alg = crypto_alg_mod_lookup(essiv_cipher_name,
415 				    CRYPTO_ALG_TYPE_CIPHER,
416 				    CRYPTO_ALG_TYPE_MASK);
417 	if (IS_ERR(alg))
418 		return false;
419 
420 	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
421 	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
422 		goto out;
423 
424 	if (ivsize != alg->cra_blocksize)
425 		goto out;
426 
427 	if (crypto_shash_alg_needs_key(hash_alg))
428 		goto out;
429 
430 	ret = true;
431 
432 out:
433 	crypto_mod_put(alg);
434 	return ret;
435 }
436 
437 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
438 {
439 	struct crypto_attr_type *algt;
440 	const char *inner_cipher_name;
441 	const char *shash_name;
442 	struct skcipher_instance *skcipher_inst = NULL;
443 	struct aead_instance *aead_inst = NULL;
444 	struct crypto_instance *inst;
445 	struct crypto_alg *base, *block_base;
446 	struct essiv_instance_ctx *ictx;
447 	struct skcipher_alg *skcipher_alg = NULL;
448 	struct aead_alg *aead_alg = NULL;
449 	struct crypto_alg *_hash_alg;
450 	struct shash_alg *hash_alg;
451 	int ivsize;
452 	u32 type;
453 	u32 mask;
454 	int err;
455 
456 	algt = crypto_get_attr_type(tb);
457 	if (IS_ERR(algt))
458 		return PTR_ERR(algt);
459 
460 	inner_cipher_name = crypto_attr_alg_name(tb[1]);
461 	if (IS_ERR(inner_cipher_name))
462 		return PTR_ERR(inner_cipher_name);
463 
464 	shash_name = crypto_attr_alg_name(tb[2]);
465 	if (IS_ERR(shash_name))
466 		return PTR_ERR(shash_name);
467 
468 	type = algt->type & algt->mask;
469 	mask = crypto_algt_inherited_mask(algt);
470 
471 	switch (type) {
472 	case CRYPTO_ALG_TYPE_SKCIPHER:
473 		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
474 					sizeof(*ictx), GFP_KERNEL);
475 		if (!skcipher_inst)
476 			return -ENOMEM;
477 		inst = skcipher_crypto_instance(skcipher_inst);
478 		base = &skcipher_inst->alg.base;
479 		ictx = crypto_instance_ctx(inst);
480 
481 		/* Symmetric cipher, e.g., "cbc(aes)" */
482 		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
483 					   inner_cipher_name, 0, mask);
484 		if (err)
485 			goto out_free_inst;
486 		skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
487 		block_base = &skcipher_alg->base;
488 		ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
489 		break;
490 
491 	case CRYPTO_ALG_TYPE_AEAD:
492 		aead_inst = kzalloc(sizeof(*aead_inst) +
493 				    sizeof(*ictx), GFP_KERNEL);
494 		if (!aead_inst)
495 			return -ENOMEM;
496 		inst = aead_crypto_instance(aead_inst);
497 		base = &aead_inst->alg.base;
498 		ictx = crypto_instance_ctx(inst);
499 
500 		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
501 		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
502 				       inner_cipher_name, 0, mask);
503 		if (err)
504 			goto out_free_inst;
505 		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
506 		block_base = &aead_alg->base;
507 		if (!strstarts(block_base->cra_name, "authenc(")) {
508 			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
509 			err = -EINVAL;
510 			goto out_drop_skcipher;
511 		}
512 		ivsize = aead_alg->ivsize;
513 		break;
514 
515 	default:
516 		return -EINVAL;
517 	}
518 
519 	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
520 		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
521 		err = -EINVAL;
522 		goto out_drop_skcipher;
523 	}
524 
525 	/* Synchronous hash, e.g., "sha256" */
526 	_hash_alg = crypto_alg_mod_lookup(shash_name,
527 					  CRYPTO_ALG_TYPE_SHASH,
528 					  CRYPTO_ALG_TYPE_MASK | mask);
529 	if (IS_ERR(_hash_alg)) {
530 		err = PTR_ERR(_hash_alg);
531 		goto out_drop_skcipher;
532 	}
533 	hash_alg = __crypto_shash_alg(_hash_alg);
534 
535 	/* Check the set of algorithms */
536 	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
537 					ivsize)) {
538 		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
539 			block_base->cra_name, hash_alg->base.cra_name);
540 		err = -EINVAL;
541 		goto out_free_hash;
542 	}
543 
544 	/* record the driver name so we can instantiate this exact algo later */
545 	strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
546 		CRYPTO_MAX_ALG_NAME);
547 
548 	/* Instance fields */
549 
550 	err = -ENAMETOOLONG;
551 	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
552 		     "essiv(%s,%s)", block_base->cra_name,
553 		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
554 		goto out_free_hash;
555 	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
556 		     "essiv(%s,%s)", block_base->cra_driver_name,
557 		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
558 		goto out_free_hash;
559 
560 	/*
561 	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
562 	 * flags manually.
563 	 */
564 	base->cra_flags        |= (hash_alg->base.cra_flags &
565 				   CRYPTO_ALG_INHERITED_FLAGS);
566 	base->cra_blocksize	= block_base->cra_blocksize;
567 	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
568 	base->cra_alignmask	= block_base->cra_alignmask;
569 	base->cra_priority	= block_base->cra_priority;
570 
571 	if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
572 		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
573 		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
574 		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
575 		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
576 		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
577 
578 		skcipher_inst->alg.min_keysize	= crypto_skcipher_alg_min_keysize(skcipher_alg);
579 		skcipher_inst->alg.max_keysize	= crypto_skcipher_alg_max_keysize(skcipher_alg);
580 		skcipher_inst->alg.ivsize	= ivsize;
581 		skcipher_inst->alg.chunksize	= crypto_skcipher_alg_chunksize(skcipher_alg);
582 		skcipher_inst->alg.walksize	= crypto_skcipher_alg_walksize(skcipher_alg);
583 
584 		skcipher_inst->free		= essiv_skcipher_free_instance;
585 
586 		err = skcipher_register_instance(tmpl, skcipher_inst);
587 	} else {
588 		aead_inst->alg.setkey		= essiv_aead_setkey;
589 		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
590 		aead_inst->alg.encrypt		= essiv_aead_encrypt;
591 		aead_inst->alg.decrypt		= essiv_aead_decrypt;
592 		aead_inst->alg.init		= essiv_aead_init_tfm;
593 		aead_inst->alg.exit		= essiv_aead_exit_tfm;
594 
595 		aead_inst->alg.ivsize		= ivsize;
596 		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
597 		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
598 
599 		aead_inst->free			= essiv_aead_free_instance;
600 
601 		err = aead_register_instance(tmpl, aead_inst);
602 	}
603 
604 	if (err)
605 		goto out_free_hash;
606 
607 	crypto_mod_put(_hash_alg);
608 	return 0;
609 
610 out_free_hash:
611 	crypto_mod_put(_hash_alg);
612 out_drop_skcipher:
613 	if (type == CRYPTO_ALG_TYPE_SKCIPHER)
614 		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
615 	else
616 		crypto_drop_aead(&ictx->u.aead_spawn);
617 out_free_inst:
618 	kfree(skcipher_inst);
619 	kfree(aead_inst);
620 	return err;
621 }
622 
623 /* essiv(cipher_name, shash_name) */
624 static struct crypto_template essiv_tmpl = {
625 	.name	= "essiv",
626 	.create	= essiv_create,
627 	.module	= THIS_MODULE,
628 };
629 
630 static int __init essiv_module_init(void)
631 {
632 	return crypto_register_template(&essiv_tmpl);
633 }
634 
635 static void __exit essiv_module_exit(void)
636 {
637 	crypto_unregister_template(&essiv_tmpl);
638 }
639 
640 subsys_initcall(essiv_module_init);
641 module_exit(essiv_module_exit);
642 
643 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
644 MODULE_LICENSE("GPL v2");
645 MODULE_ALIAS_CRYPTO("essiv");
646