xref: /openbmc/linux/crypto/essiv.c (revision 255e48eb)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ESSIV skcipher and aead template for block encryption
4  *
5  * This template encapsulates the ESSIV IV generation algorithm used by
6  * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7  * used for block encryption, by encrypting it using the hash of the
8  * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9  * number in LE representation zero-padded to the size of the IV, but this
10  * is not assumed by this driver.
11  *
12  * The typical use of this template is to instantiate the skcipher
13  * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14  * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15  * also permits ESSIV to be used in combination with the authenc template,
16  * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17  * we need to instantiate an aead that accepts the same special key format
18  * as the authenc template, and deals with the way the encrypted IV is
19  * embedded into the AAD area of the aead request. This means the AEAD
20  * flavor produced by this template is tightly coupled to the way dm-crypt
21  * happens to use it.
22  *
23  * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
24  *
25  * Heavily based on:
26  * adiantum length-preserving encryption mode
27  *
28  * Copyright 2018 Google LLC
29  */
30 
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/cipher.h>
34 #include <crypto/internal/hash.h>
35 #include <crypto/internal/skcipher.h>
36 #include <crypto/scatterwalk.h>
37 #include <linux/module.h>
38 
39 #include "internal.h"
40 
41 struct essiv_instance_ctx {
42 	union {
43 		struct crypto_skcipher_spawn	skcipher_spawn;
44 		struct crypto_aead_spawn	aead_spawn;
45 	} u;
46 	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
47 	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
48 };
49 
50 struct essiv_tfm_ctx {
51 	union {
52 		struct crypto_skcipher	*skcipher;
53 		struct crypto_aead	*aead;
54 	} u;
55 	struct crypto_cipher		*essiv_cipher;
56 	struct crypto_shash		*hash;
57 	int				ivoffset;
58 };
59 
60 struct essiv_aead_request_ctx {
61 	struct scatterlist		sg[4];
62 	u8				*assoc;
63 	struct aead_request		aead_req;
64 };
65 
essiv_skcipher_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)66 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
67 				 const u8 *key, unsigned int keylen)
68 {
69 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
70 	u8 salt[HASH_MAX_DIGESTSIZE];
71 	int err;
72 
73 	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
74 	crypto_skcipher_set_flags(tctx->u.skcipher,
75 				  crypto_skcipher_get_flags(tfm) &
76 				  CRYPTO_TFM_REQ_MASK);
77 	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
78 	if (err)
79 		return err;
80 
81 	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
82 	if (err)
83 		return err;
84 
85 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
86 	crypto_cipher_set_flags(tctx->essiv_cipher,
87 				crypto_skcipher_get_flags(tfm) &
88 				CRYPTO_TFM_REQ_MASK);
89 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
90 				    crypto_shash_digestsize(tctx->hash));
91 }
92 
essiv_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)93 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
94 			     unsigned int keylen)
95 {
96 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
97 	SHASH_DESC_ON_STACK(desc, tctx->hash);
98 	struct crypto_authenc_keys keys;
99 	u8 salt[HASH_MAX_DIGESTSIZE];
100 	int err;
101 
102 	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
103 	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
104 					    CRYPTO_TFM_REQ_MASK);
105 	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
106 	if (err)
107 		return err;
108 
109 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
110 		return -EINVAL;
111 
112 	desc->tfm = tctx->hash;
113 	err = crypto_shash_init(desc) ?:
114 	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
115 	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
116 	if (err)
117 		return err;
118 
119 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
120 	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
121 						    CRYPTO_TFM_REQ_MASK);
122 	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
123 				    crypto_shash_digestsize(tctx->hash));
124 }
125 
essiv_aead_setauthsize(struct crypto_aead * tfm,unsigned int authsize)126 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
127 				  unsigned int authsize)
128 {
129 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
130 
131 	return crypto_aead_setauthsize(tctx->u.aead, authsize);
132 }
133 
essiv_skcipher_done(void * data,int err)134 static void essiv_skcipher_done(void *data, int err)
135 {
136 	struct skcipher_request *req = data;
137 
138 	skcipher_request_complete(req, err);
139 }
140 
essiv_skcipher_crypt(struct skcipher_request * req,bool enc)141 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
142 {
143 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
144 	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
145 	struct skcipher_request *subreq = skcipher_request_ctx(req);
146 
147 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
148 
149 	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
150 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
151 				   req->iv);
152 	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
153 				      essiv_skcipher_done, req);
154 
155 	return enc ? crypto_skcipher_encrypt(subreq) :
156 		     crypto_skcipher_decrypt(subreq);
157 }
158 
essiv_skcipher_encrypt(struct skcipher_request * req)159 static int essiv_skcipher_encrypt(struct skcipher_request *req)
160 {
161 	return essiv_skcipher_crypt(req, true);
162 }
163 
essiv_skcipher_decrypt(struct skcipher_request * req)164 static int essiv_skcipher_decrypt(struct skcipher_request *req)
165 {
166 	return essiv_skcipher_crypt(req, false);
167 }
168 
essiv_aead_done(void * data,int err)169 static void essiv_aead_done(void *data, int err)
170 {
171 	struct aead_request *req = data;
172 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
173 
174 	if (err == -EINPROGRESS)
175 		goto out;
176 
177 	kfree(rctx->assoc);
178 
179 out:
180 	aead_request_complete(req, err);
181 }
182 
essiv_aead_crypt(struct aead_request * req,bool enc)183 static int essiv_aead_crypt(struct aead_request *req, bool enc)
184 {
185 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
186 	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
187 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
188 	struct aead_request *subreq = &rctx->aead_req;
189 	struct scatterlist *src = req->src;
190 	int err;
191 
192 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
193 
194 	/*
195 	 * dm-crypt embeds the sector number and the IV in the AAD region, so
196 	 * we have to copy the converted IV into the right scatterlist before
197 	 * we pass it on.
198 	 */
199 	rctx->assoc = NULL;
200 	if (req->src == req->dst || !enc) {
201 		scatterwalk_map_and_copy(req->iv, req->dst,
202 					 req->assoclen - crypto_aead_ivsize(tfm),
203 					 crypto_aead_ivsize(tfm), 1);
204 	} else {
205 		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
206 		int ivsize = crypto_aead_ivsize(tfm);
207 		int ssize = req->assoclen - ivsize;
208 		struct scatterlist *sg;
209 		int nents;
210 
211 		if (ssize < 0)
212 			return -EINVAL;
213 
214 		nents = sg_nents_for_len(req->src, ssize);
215 		if (nents < 0)
216 			return -EINVAL;
217 
218 		memcpy(iv, req->iv, ivsize);
219 		sg_init_table(rctx->sg, 4);
220 
221 		if (unlikely(nents > 1)) {
222 			/*
223 			 * This is a case that rarely occurs in practice, but
224 			 * for correctness, we have to deal with it nonetheless.
225 			 */
226 			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
227 			if (!rctx->assoc)
228 				return -ENOMEM;
229 
230 			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
231 						 ssize, 0);
232 			sg_set_buf(rctx->sg, rctx->assoc, ssize);
233 		} else {
234 			sg_set_page(rctx->sg, sg_page(req->src), ssize,
235 				    req->src->offset);
236 		}
237 
238 		sg_set_buf(rctx->sg + 1, iv, ivsize);
239 		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
240 		if (sg != rctx->sg + 2)
241 			sg_chain(rctx->sg, 3, sg);
242 
243 		src = rctx->sg;
244 	}
245 
246 	aead_request_set_tfm(subreq, tctx->u.aead);
247 	aead_request_set_ad(subreq, req->assoclen);
248 	aead_request_set_callback(subreq, aead_request_flags(req),
249 				  essiv_aead_done, req);
250 	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
251 
252 	err = enc ? crypto_aead_encrypt(subreq) :
253 		    crypto_aead_decrypt(subreq);
254 
255 	if (rctx->assoc && err != -EINPROGRESS && err != -EBUSY)
256 		kfree(rctx->assoc);
257 	return err;
258 }
259 
essiv_aead_encrypt(struct aead_request * req)260 static int essiv_aead_encrypt(struct aead_request *req)
261 {
262 	return essiv_aead_crypt(req, true);
263 }
264 
essiv_aead_decrypt(struct aead_request * req)265 static int essiv_aead_decrypt(struct aead_request *req)
266 {
267 	return essiv_aead_crypt(req, false);
268 }
269 
essiv_init_tfm(struct essiv_instance_ctx * ictx,struct essiv_tfm_ctx * tctx)270 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
271 			  struct essiv_tfm_ctx *tctx)
272 {
273 	struct crypto_cipher *essiv_cipher;
274 	struct crypto_shash *hash;
275 	int err;
276 
277 	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
278 	if (IS_ERR(essiv_cipher))
279 		return PTR_ERR(essiv_cipher);
280 
281 	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
282 	if (IS_ERR(hash)) {
283 		err = PTR_ERR(hash);
284 		goto err_free_essiv_cipher;
285 	}
286 
287 	tctx->essiv_cipher = essiv_cipher;
288 	tctx->hash = hash;
289 
290 	return 0;
291 
292 err_free_essiv_cipher:
293 	crypto_free_cipher(essiv_cipher);
294 	return err;
295 }
296 
essiv_skcipher_init_tfm(struct crypto_skcipher * tfm)297 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
298 {
299 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
300 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
301 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
302 	struct crypto_skcipher *skcipher;
303 	int err;
304 
305 	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
306 	if (IS_ERR(skcipher))
307 		return PTR_ERR(skcipher);
308 
309 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
310 				         crypto_skcipher_reqsize(skcipher));
311 
312 	err = essiv_init_tfm(ictx, tctx);
313 	if (err) {
314 		crypto_free_skcipher(skcipher);
315 		return err;
316 	}
317 
318 	tctx->u.skcipher = skcipher;
319 	return 0;
320 }
321 
essiv_aead_init_tfm(struct crypto_aead * tfm)322 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
323 {
324 	struct aead_instance *inst = aead_alg_instance(tfm);
325 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
326 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
327 	struct crypto_aead *aead;
328 	unsigned int subreq_size;
329 	int err;
330 
331 	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
332 		     sizeof(struct essiv_aead_request_ctx));
333 
334 	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
335 	if (IS_ERR(aead))
336 		return PTR_ERR(aead);
337 
338 	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
339 		      crypto_aead_reqsize(aead);
340 
341 	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
342 			 subreq_size;
343 	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
344 
345 	err = essiv_init_tfm(ictx, tctx);
346 	if (err) {
347 		crypto_free_aead(aead);
348 		return err;
349 	}
350 
351 	tctx->u.aead = aead;
352 	return 0;
353 }
354 
essiv_skcipher_exit_tfm(struct crypto_skcipher * tfm)355 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
356 {
357 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
358 
359 	crypto_free_skcipher(tctx->u.skcipher);
360 	crypto_free_cipher(tctx->essiv_cipher);
361 	crypto_free_shash(tctx->hash);
362 }
363 
essiv_aead_exit_tfm(struct crypto_aead * tfm)364 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
365 {
366 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
367 
368 	crypto_free_aead(tctx->u.aead);
369 	crypto_free_cipher(tctx->essiv_cipher);
370 	crypto_free_shash(tctx->hash);
371 }
372 
essiv_skcipher_free_instance(struct skcipher_instance * inst)373 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
374 {
375 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
376 
377 	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
378 	kfree(inst);
379 }
380 
essiv_aead_free_instance(struct aead_instance * inst)381 static void essiv_aead_free_instance(struct aead_instance *inst)
382 {
383 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
384 
385 	crypto_drop_aead(&ictx->u.aead_spawn);
386 	kfree(inst);
387 }
388 
parse_cipher_name(char * essiv_cipher_name,const char * cra_name)389 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
390 {
391 	const char *p, *q;
392 	int len;
393 
394 	/* find the last opening parens */
395 	p = strrchr(cra_name, '(');
396 	if (!p++)
397 		return false;
398 
399 	/* find the first closing parens in the tail of the string */
400 	q = strchr(p, ')');
401 	if (!q)
402 		return false;
403 
404 	len = q - p;
405 	if (len >= CRYPTO_MAX_ALG_NAME)
406 		return false;
407 
408 	memcpy(essiv_cipher_name, p, len);
409 	essiv_cipher_name[len] = '\0';
410 	return true;
411 }
412 
essiv_supported_algorithms(const char * essiv_cipher_name,struct shash_alg * hash_alg,int ivsize)413 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
414 				       struct shash_alg *hash_alg,
415 				       int ivsize)
416 {
417 	struct crypto_alg *alg;
418 	bool ret = false;
419 
420 	alg = crypto_alg_mod_lookup(essiv_cipher_name,
421 				    CRYPTO_ALG_TYPE_CIPHER,
422 				    CRYPTO_ALG_TYPE_MASK);
423 	if (IS_ERR(alg))
424 		return false;
425 
426 	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
427 	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
428 		goto out;
429 
430 	if (ivsize != alg->cra_blocksize)
431 		goto out;
432 
433 	if (crypto_shash_alg_needs_key(hash_alg))
434 		goto out;
435 
436 	ret = true;
437 
438 out:
439 	crypto_mod_put(alg);
440 	return ret;
441 }
442 
essiv_create(struct crypto_template * tmpl,struct rtattr ** tb)443 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
444 {
445 	struct crypto_attr_type *algt;
446 	const char *inner_cipher_name;
447 	const char *shash_name;
448 	struct skcipher_instance *skcipher_inst = NULL;
449 	struct aead_instance *aead_inst = NULL;
450 	struct crypto_instance *inst;
451 	struct crypto_alg *base, *block_base;
452 	struct essiv_instance_ctx *ictx;
453 	struct skcipher_alg *skcipher_alg = NULL;
454 	struct aead_alg *aead_alg = NULL;
455 	struct crypto_alg *_hash_alg;
456 	struct shash_alg *hash_alg;
457 	int ivsize;
458 	u32 type;
459 	u32 mask;
460 	int err;
461 
462 	algt = crypto_get_attr_type(tb);
463 	if (IS_ERR(algt))
464 		return PTR_ERR(algt);
465 
466 	inner_cipher_name = crypto_attr_alg_name(tb[1]);
467 	if (IS_ERR(inner_cipher_name))
468 		return PTR_ERR(inner_cipher_name);
469 
470 	shash_name = crypto_attr_alg_name(tb[2]);
471 	if (IS_ERR(shash_name))
472 		return PTR_ERR(shash_name);
473 
474 	type = algt->type & algt->mask;
475 	mask = crypto_algt_inherited_mask(algt);
476 
477 	switch (type) {
478 	case CRYPTO_ALG_TYPE_SKCIPHER:
479 		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
480 					sizeof(*ictx), GFP_KERNEL);
481 		if (!skcipher_inst)
482 			return -ENOMEM;
483 		inst = skcipher_crypto_instance(skcipher_inst);
484 		base = &skcipher_inst->alg.base;
485 		ictx = crypto_instance_ctx(inst);
486 
487 		/* Symmetric cipher, e.g., "cbc(aes)" */
488 		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
489 					   inner_cipher_name, 0, mask);
490 		if (err)
491 			goto out_free_inst;
492 		skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
493 		block_base = &skcipher_alg->base;
494 		ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
495 		break;
496 
497 	case CRYPTO_ALG_TYPE_AEAD:
498 		aead_inst = kzalloc(sizeof(*aead_inst) +
499 				    sizeof(*ictx), GFP_KERNEL);
500 		if (!aead_inst)
501 			return -ENOMEM;
502 		inst = aead_crypto_instance(aead_inst);
503 		base = &aead_inst->alg.base;
504 		ictx = crypto_instance_ctx(inst);
505 
506 		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
507 		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
508 				       inner_cipher_name, 0, mask);
509 		if (err)
510 			goto out_free_inst;
511 		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
512 		block_base = &aead_alg->base;
513 		if (!strstarts(block_base->cra_name, "authenc(")) {
514 			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
515 			err = -EINVAL;
516 			goto out_drop_skcipher;
517 		}
518 		ivsize = aead_alg->ivsize;
519 		break;
520 
521 	default:
522 		return -EINVAL;
523 	}
524 
525 	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
526 		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
527 		err = -EINVAL;
528 		goto out_drop_skcipher;
529 	}
530 
531 	/* Synchronous hash, e.g., "sha256" */
532 	_hash_alg = crypto_alg_mod_lookup(shash_name,
533 					  CRYPTO_ALG_TYPE_SHASH,
534 					  CRYPTO_ALG_TYPE_MASK | mask);
535 	if (IS_ERR(_hash_alg)) {
536 		err = PTR_ERR(_hash_alg);
537 		goto out_drop_skcipher;
538 	}
539 	hash_alg = __crypto_shash_alg(_hash_alg);
540 
541 	/* Check the set of algorithms */
542 	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
543 					ivsize)) {
544 		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
545 			block_base->cra_name, hash_alg->base.cra_name);
546 		err = -EINVAL;
547 		goto out_free_hash;
548 	}
549 
550 	/* record the driver name so we can instantiate this exact algo later */
551 	strscpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
552 		CRYPTO_MAX_ALG_NAME);
553 
554 	/* Instance fields */
555 
556 	err = -ENAMETOOLONG;
557 	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
558 		     "essiv(%s,%s)", block_base->cra_name,
559 		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
560 		goto out_free_hash;
561 	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
562 		     "essiv(%s,%s)", block_base->cra_driver_name,
563 		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
564 		goto out_free_hash;
565 
566 	/*
567 	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
568 	 * flags manually.
569 	 */
570 	base->cra_flags        |= (hash_alg->base.cra_flags &
571 				   CRYPTO_ALG_INHERITED_FLAGS);
572 	base->cra_blocksize	= block_base->cra_blocksize;
573 	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
574 	base->cra_alignmask	= block_base->cra_alignmask;
575 	base->cra_priority	= block_base->cra_priority;
576 
577 	if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
578 		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
579 		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
580 		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
581 		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
582 		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
583 
584 		skcipher_inst->alg.min_keysize	= crypto_skcipher_alg_min_keysize(skcipher_alg);
585 		skcipher_inst->alg.max_keysize	= crypto_skcipher_alg_max_keysize(skcipher_alg);
586 		skcipher_inst->alg.ivsize	= ivsize;
587 		skcipher_inst->alg.chunksize	= crypto_skcipher_alg_chunksize(skcipher_alg);
588 		skcipher_inst->alg.walksize	= crypto_skcipher_alg_walksize(skcipher_alg);
589 
590 		skcipher_inst->free		= essiv_skcipher_free_instance;
591 
592 		err = skcipher_register_instance(tmpl, skcipher_inst);
593 	} else {
594 		aead_inst->alg.setkey		= essiv_aead_setkey;
595 		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
596 		aead_inst->alg.encrypt		= essiv_aead_encrypt;
597 		aead_inst->alg.decrypt		= essiv_aead_decrypt;
598 		aead_inst->alg.init		= essiv_aead_init_tfm;
599 		aead_inst->alg.exit		= essiv_aead_exit_tfm;
600 
601 		aead_inst->alg.ivsize		= ivsize;
602 		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
603 		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
604 
605 		aead_inst->free			= essiv_aead_free_instance;
606 
607 		err = aead_register_instance(tmpl, aead_inst);
608 	}
609 
610 	if (err)
611 		goto out_free_hash;
612 
613 	crypto_mod_put(_hash_alg);
614 	return 0;
615 
616 out_free_hash:
617 	crypto_mod_put(_hash_alg);
618 out_drop_skcipher:
619 	if (type == CRYPTO_ALG_TYPE_SKCIPHER)
620 		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
621 	else
622 		crypto_drop_aead(&ictx->u.aead_spawn);
623 out_free_inst:
624 	kfree(skcipher_inst);
625 	kfree(aead_inst);
626 	return err;
627 }
628 
629 /* essiv(cipher_name, shash_name) */
630 static struct crypto_template essiv_tmpl = {
631 	.name	= "essiv",
632 	.create	= essiv_create,
633 	.module	= THIS_MODULE,
634 };
635 
essiv_module_init(void)636 static int __init essiv_module_init(void)
637 {
638 	return crypto_register_template(&essiv_tmpl);
639 }
640 
essiv_module_exit(void)641 static void __exit essiv_module_exit(void)
642 {
643 	crypto_unregister_template(&essiv_tmpl);
644 }
645 
646 subsys_initcall(essiv_module_init);
647 module_exit(essiv_module_exit);
648 
649 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
650 MODULE_LICENSE("GPL v2");
651 MODULE_ALIAS_CRYPTO("essiv");
652 MODULE_IMPORT_NS(CRYPTO_INTERNAL);
653