xref: /openbmc/linux/crypto/cts.c (revision 255e48eb)
176cb9521SKevin Coffman /*
276cb9521SKevin Coffman  * CTS: Cipher Text Stealing mode
376cb9521SKevin Coffman  *
476cb9521SKevin Coffman  * COPYRIGHT (c) 2008
576cb9521SKevin Coffman  * The Regents of the University of Michigan
676cb9521SKevin Coffman  * ALL RIGHTS RESERVED
776cb9521SKevin Coffman  *
876cb9521SKevin Coffman  * Permission is granted to use, copy, create derivative works
976cb9521SKevin Coffman  * and redistribute this software and such derivative works
1076cb9521SKevin Coffman  * for any purpose, so long as the name of The University of
1176cb9521SKevin Coffman  * Michigan is not used in any advertising or publicity
1276cb9521SKevin Coffman  * pertaining to the use of distribution of this software
1376cb9521SKevin Coffman  * without specific, written prior authorization.  If the
1476cb9521SKevin Coffman  * above copyright notice or any other identification of the
1576cb9521SKevin Coffman  * University of Michigan is included in any copy of any
1676cb9521SKevin Coffman  * portion of this software, then the disclaimer below must
1776cb9521SKevin Coffman  * also be included.
1876cb9521SKevin Coffman  *
1976cb9521SKevin Coffman  * THIS SOFTWARE IS PROVIDED AS IS, WITHOUT REPRESENTATION
2076cb9521SKevin Coffman  * FROM THE UNIVERSITY OF MICHIGAN AS TO ITS FITNESS FOR ANY
2176cb9521SKevin Coffman  * PURPOSE, AND WITHOUT WARRANTY BY THE UNIVERSITY OF
2276cb9521SKevin Coffman  * MICHIGAN OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING
2376cb9521SKevin Coffman  * WITHOUT LIMITATION THE IMPLIED WARRANTIES OF
2476cb9521SKevin Coffman  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE
2576cb9521SKevin Coffman  * REGENTS OF THE UNIVERSITY OF MICHIGAN SHALL NOT BE LIABLE
2676cb9521SKevin Coffman  * FOR ANY DAMAGES, INCLUDING SPECIAL, INDIRECT, INCIDENTAL, OR
2776cb9521SKevin Coffman  * CONSEQUENTIAL DAMAGES, WITH RESPECT TO ANY CLAIM ARISING
2876cb9521SKevin Coffman  * OUT OF OR IN CONNECTION WITH THE USE OF THE SOFTWARE, EVEN
2976cb9521SKevin Coffman  * IF IT HAS BEEN OR IS HEREAFTER ADVISED OF THE POSSIBILITY OF
3076cb9521SKevin Coffman  * SUCH DAMAGES.
3176cb9521SKevin Coffman  */
3276cb9521SKevin Coffman 
3376cb9521SKevin Coffman /* Derived from various:
3476cb9521SKevin Coffman  *	Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
3576cb9521SKevin Coffman  */
3676cb9521SKevin Coffman 
3776cb9521SKevin Coffman /*
3876cb9521SKevin Coffman  * This is the Cipher Text Stealing mode as described by
3976cb9521SKevin Coffman  * Section 8 of rfc2040 and referenced by rfc3962.
4076cb9521SKevin Coffman  * rfc3962 includes errata information in its Appendix A.
4176cb9521SKevin Coffman  */
4276cb9521SKevin Coffman 
436650c4deSSalvatore Mesoraca #include <crypto/algapi.h>
440605c41cSHerbert Xu #include <crypto/internal/skcipher.h>
4576cb9521SKevin Coffman #include <linux/err.h>
4676cb9521SKevin Coffman #include <linux/init.h>
4776cb9521SKevin Coffman #include <linux/kernel.h>
4876cb9521SKevin Coffman #include <linux/log2.h>
4976cb9521SKevin Coffman #include <linux/module.h>
5076cb9521SKevin Coffman #include <linux/scatterlist.h>
5176cb9521SKevin Coffman #include <crypto/scatterwalk.h>
5276cb9521SKevin Coffman #include <linux/slab.h>
53d8c34b94SGideon Israel Dsouza #include <linux/compiler.h>
5476cb9521SKevin Coffman 
5576cb9521SKevin Coffman struct crypto_cts_ctx {
560605c41cSHerbert Xu 	struct crypto_skcipher *child;
5776cb9521SKevin Coffman };
5876cb9521SKevin Coffman 
590605c41cSHerbert Xu struct crypto_cts_reqctx {
600605c41cSHerbert Xu 	struct scatterlist sg[2];
610605c41cSHerbert Xu 	unsigned offset;
620605c41cSHerbert Xu 	struct skcipher_request subreq;
630605c41cSHerbert Xu };
640605c41cSHerbert Xu 
crypto_cts_reqctx_space(struct skcipher_request * req)650605c41cSHerbert Xu static inline u8 *crypto_cts_reqctx_space(struct skcipher_request *req)
660605c41cSHerbert Xu {
670605c41cSHerbert Xu 	struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
680605c41cSHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
690605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
700605c41cSHerbert Xu 	struct crypto_skcipher *child = ctx->child;
710605c41cSHerbert Xu 
720605c41cSHerbert Xu 	return PTR_ALIGN((u8 *)(rctx + 1) + crypto_skcipher_reqsize(child),
730605c41cSHerbert Xu 			 crypto_skcipher_alignmask(tfm) + 1);
740605c41cSHerbert Xu }
750605c41cSHerbert Xu 
crypto_cts_setkey(struct crypto_skcipher * parent,const u8 * key,unsigned int keylen)760605c41cSHerbert Xu static int crypto_cts_setkey(struct crypto_skcipher *parent, const u8 *key,
7776cb9521SKevin Coffman 			     unsigned int keylen)
7876cb9521SKevin Coffman {
790605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(parent);
800605c41cSHerbert Xu 	struct crypto_skcipher *child = ctx->child;
8176cb9521SKevin Coffman 
820605c41cSHerbert Xu 	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
830605c41cSHerbert Xu 	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
8476cb9521SKevin Coffman 					 CRYPTO_TFM_REQ_MASK);
85af5034e8SEric Biggers 	return crypto_skcipher_setkey(child, key, keylen);
8676cb9521SKevin Coffman }
8776cb9521SKevin Coffman 
cts_cbc_crypt_done(void * data,int err)88*255e48ebSHerbert Xu static void cts_cbc_crypt_done(void *data, int err)
8976cb9521SKevin Coffman {
90*255e48ebSHerbert Xu 	struct skcipher_request *req = data;
9176cb9521SKevin Coffman 
920605c41cSHerbert Xu 	if (err == -EINPROGRESS)
930605c41cSHerbert Xu 		return;
9476cb9521SKevin Coffman 
950605c41cSHerbert Xu 	skcipher_request_complete(req, err);
9676cb9521SKevin Coffman }
9776cb9521SKevin Coffman 
cts_cbc_encrypt(struct skcipher_request * req)980605c41cSHerbert Xu static int cts_cbc_encrypt(struct skcipher_request *req)
9976cb9521SKevin Coffman {
1000605c41cSHerbert Xu 	struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
1010605c41cSHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1020605c41cSHerbert Xu 	struct skcipher_request *subreq = &rctx->subreq;
1030605c41cSHerbert Xu 	int bsize = crypto_skcipher_blocksize(tfm);
1046650c4deSSalvatore Mesoraca 	u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
1050605c41cSHerbert Xu 	struct scatterlist *sg;
1060605c41cSHerbert Xu 	unsigned int offset;
1070605c41cSHerbert Xu 	int lastn;
10876cb9521SKevin Coffman 
1090605c41cSHerbert Xu 	offset = rctx->offset;
1100605c41cSHerbert Xu 	lastn = req->cryptlen - offset;
11176cb9521SKevin Coffman 
1120605c41cSHerbert Xu 	sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
1130605c41cSHerbert Xu 	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
1140605c41cSHerbert Xu 
1150605c41cSHerbert Xu 	memset(d, 0, bsize);
1160605c41cSHerbert Xu 	scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
1170605c41cSHerbert Xu 
1180605c41cSHerbert Xu 	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
1190605c41cSHerbert Xu 	memzero_explicit(d, sizeof(d));
1200605c41cSHerbert Xu 
1210605c41cSHerbert Xu 	skcipher_request_set_callback(subreq, req->base.flags &
1220605c41cSHerbert Xu 					      CRYPTO_TFM_REQ_MAY_BACKLOG,
1230605c41cSHerbert Xu 				      cts_cbc_crypt_done, req);
1240605c41cSHerbert Xu 	skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv);
1250605c41cSHerbert Xu 	return crypto_skcipher_encrypt(subreq);
12676cb9521SKevin Coffman }
12776cb9521SKevin Coffman 
crypto_cts_encrypt_done(void * data,int err)128*255e48ebSHerbert Xu static void crypto_cts_encrypt_done(void *data, int err)
12976cb9521SKevin Coffman {
130*255e48ebSHerbert Xu 	struct skcipher_request *req = data;
13176cb9521SKevin Coffman 
13276cb9521SKevin Coffman 	if (err)
1330605c41cSHerbert Xu 		goto out;
1340605c41cSHerbert Xu 
1350605c41cSHerbert Xu 	err = cts_cbc_encrypt(req);
1364e5b0ad5SGilad Ben-Yossef 	if (err == -EINPROGRESS || err == -EBUSY)
1370605c41cSHerbert Xu 		return;
1380605c41cSHerbert Xu 
1390605c41cSHerbert Xu out:
1400605c41cSHerbert Xu 	skcipher_request_complete(req, err);
1410605c41cSHerbert Xu }
1420605c41cSHerbert Xu 
crypto_cts_encrypt(struct skcipher_request * req)1430605c41cSHerbert Xu static int crypto_cts_encrypt(struct skcipher_request *req)
1440605c41cSHerbert Xu {
1450605c41cSHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1460605c41cSHerbert Xu 	struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
1470605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
1480605c41cSHerbert Xu 	struct skcipher_request *subreq = &rctx->subreq;
1490605c41cSHerbert Xu 	int bsize = crypto_skcipher_blocksize(tfm);
1500605c41cSHerbert Xu 	unsigned int nbytes = req->cryptlen;
1510605c41cSHerbert Xu 	unsigned int offset;
1520605c41cSHerbert Xu 
1530605c41cSHerbert Xu 	skcipher_request_set_tfm(subreq, ctx->child);
1540605c41cSHerbert Xu 
155c31a8719SEric Biggers 	if (nbytes < bsize)
156c31a8719SEric Biggers 		return -EINVAL;
157c31a8719SEric Biggers 
158c31a8719SEric Biggers 	if (nbytes == bsize) {
1590605c41cSHerbert Xu 		skcipher_request_set_callback(subreq, req->base.flags,
1600605c41cSHerbert Xu 					      req->base.complete,
1610605c41cSHerbert Xu 					      req->base.data);
1620605c41cSHerbert Xu 		skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
1630605c41cSHerbert Xu 					   req->iv);
1640605c41cSHerbert Xu 		return crypto_skcipher_encrypt(subreq);
1650605c41cSHerbert Xu 	}
1660605c41cSHerbert Xu 
167c31a8719SEric Biggers 	offset = rounddown(nbytes - 1, bsize);
1680605c41cSHerbert Xu 	rctx->offset = offset;
1690605c41cSHerbert Xu 
1700605c41cSHerbert Xu 	skcipher_request_set_callback(subreq, req->base.flags,
1710605c41cSHerbert Xu 				      crypto_cts_encrypt_done, req);
1720605c41cSHerbert Xu 	skcipher_request_set_crypt(subreq, req->src, req->dst,
1730605c41cSHerbert Xu 				   offset, req->iv);
1740605c41cSHerbert Xu 
1750605c41cSHerbert Xu 	return crypto_skcipher_encrypt(subreq) ?:
1760605c41cSHerbert Xu 	       cts_cbc_encrypt(req);
1770605c41cSHerbert Xu }
1780605c41cSHerbert Xu 
cts_cbc_decrypt(struct skcipher_request * req)1790605c41cSHerbert Xu static int cts_cbc_decrypt(struct skcipher_request *req)
1800605c41cSHerbert Xu {
1810605c41cSHerbert Xu 	struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
1820605c41cSHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1830605c41cSHerbert Xu 	struct skcipher_request *subreq = &rctx->subreq;
1840605c41cSHerbert Xu 	int bsize = crypto_skcipher_blocksize(tfm);
1856650c4deSSalvatore Mesoraca 	u8 d[MAX_CIPHER_BLOCKSIZE * 2] __aligned(__alignof__(u32));
1860605c41cSHerbert Xu 	struct scatterlist *sg;
1870605c41cSHerbert Xu 	unsigned int offset;
1880605c41cSHerbert Xu 	u8 *space;
1890605c41cSHerbert Xu 	int lastn;
1900605c41cSHerbert Xu 
1910605c41cSHerbert Xu 	offset = rctx->offset;
1920605c41cSHerbert Xu 	lastn = req->cryptlen - offset;
1930605c41cSHerbert Xu 
1940605c41cSHerbert Xu 	sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
1950605c41cSHerbert Xu 
1960605c41cSHerbert Xu 	/* 1. Decrypt Cn-1 (s) to create Dn */
1970605c41cSHerbert Xu 	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
1980605c41cSHerbert Xu 	space = crypto_cts_reqctx_space(req);
1990605c41cSHerbert Xu 	crypto_xor(d + bsize, space, bsize);
20076cb9521SKevin Coffman 	/* 2. Pad Cn with zeros at the end to create C of length BB */
2010605c41cSHerbert Xu 	memset(d, 0, bsize);
2020605c41cSHerbert Xu 	scatterwalk_map_and_copy(d, req->src, offset, lastn, 0);
2030605c41cSHerbert Xu 	/* 3. Exclusive-or Dn with C to create Xn */
2040605c41cSHerbert Xu 	/* 4. Select the first Ln bytes of Xn to create Pn */
2050605c41cSHerbert Xu 	crypto_xor(d + bsize, d, lastn);
20676cb9521SKevin Coffman 
2070605c41cSHerbert Xu 	/* 5. Append the tail (BB - Ln) bytes of Xn to Cn to create En */
2080605c41cSHerbert Xu 	memcpy(d + lastn, d + bsize + lastn, bsize - lastn);
20976cb9521SKevin Coffman 	/* 6. Decrypt En to create Pn-1 */
2107185ad26SDaniel Borkmann 
2110605c41cSHerbert Xu 	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
2120605c41cSHerbert Xu 	memzero_explicit(d, sizeof(d));
21376cb9521SKevin Coffman 
2140605c41cSHerbert Xu 	skcipher_request_set_callback(subreq, req->base.flags &
2150605c41cSHerbert Xu 					      CRYPTO_TFM_REQ_MAY_BACKLOG,
2160605c41cSHerbert Xu 				      cts_cbc_crypt_done, req);
21776cb9521SKevin Coffman 
2180605c41cSHerbert Xu 	skcipher_request_set_crypt(subreq, sg, sg, bsize, space);
2190605c41cSHerbert Xu 	return crypto_skcipher_decrypt(subreq);
22076cb9521SKevin Coffman }
22176cb9521SKevin Coffman 
crypto_cts_decrypt_done(void * data,int err)222*255e48ebSHerbert Xu static void crypto_cts_decrypt_done(void *data, int err)
22376cb9521SKevin Coffman {
224*255e48ebSHerbert Xu 	struct skcipher_request *req = data;
22576cb9521SKevin Coffman 
2260605c41cSHerbert Xu 	if (err)
2270605c41cSHerbert Xu 		goto out;
22876cb9521SKevin Coffman 
2290605c41cSHerbert Xu 	err = cts_cbc_decrypt(req);
2304e5b0ad5SGilad Ben-Yossef 	if (err == -EINPROGRESS || err == -EBUSY)
2310605c41cSHerbert Xu 		return;
2320605c41cSHerbert Xu 
2330605c41cSHerbert Xu out:
2340605c41cSHerbert Xu 	skcipher_request_complete(req, err);
23576cb9521SKevin Coffman }
23676cb9521SKevin Coffman 
crypto_cts_decrypt(struct skcipher_request * req)2370605c41cSHerbert Xu static int crypto_cts_decrypt(struct skcipher_request *req)
23876cb9521SKevin Coffman {
2390605c41cSHerbert Xu 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2400605c41cSHerbert Xu 	struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req);
2410605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
2420605c41cSHerbert Xu 	struct skcipher_request *subreq = &rctx->subreq;
2430605c41cSHerbert Xu 	int bsize = crypto_skcipher_blocksize(tfm);
2440605c41cSHerbert Xu 	unsigned int nbytes = req->cryptlen;
2450605c41cSHerbert Xu 	unsigned int offset;
2460605c41cSHerbert Xu 	u8 *space;
24776cb9521SKevin Coffman 
2480605c41cSHerbert Xu 	skcipher_request_set_tfm(subreq, ctx->child);
2490605c41cSHerbert Xu 
250c31a8719SEric Biggers 	if (nbytes < bsize)
251c31a8719SEric Biggers 		return -EINVAL;
252c31a8719SEric Biggers 
253c31a8719SEric Biggers 	if (nbytes == bsize) {
2540605c41cSHerbert Xu 		skcipher_request_set_callback(subreq, req->base.flags,
2550605c41cSHerbert Xu 					      req->base.complete,
2560605c41cSHerbert Xu 					      req->base.data);
2570605c41cSHerbert Xu 		skcipher_request_set_crypt(subreq, req->src, req->dst, nbytes,
2580605c41cSHerbert Xu 					   req->iv);
2590605c41cSHerbert Xu 		return crypto_skcipher_decrypt(subreq);
2600605c41cSHerbert Xu 	}
2610605c41cSHerbert Xu 
2620605c41cSHerbert Xu 	skcipher_request_set_callback(subreq, req->base.flags,
2630605c41cSHerbert Xu 				      crypto_cts_decrypt_done, req);
2640605c41cSHerbert Xu 
2650605c41cSHerbert Xu 	space = crypto_cts_reqctx_space(req);
2660605c41cSHerbert Xu 
267c31a8719SEric Biggers 	offset = rounddown(nbytes - 1, bsize);
2680605c41cSHerbert Xu 	rctx->offset = offset;
2690605c41cSHerbert Xu 
270c31a8719SEric Biggers 	if (offset <= bsize)
2710605c41cSHerbert Xu 		memcpy(space, req->iv, bsize);
2720605c41cSHerbert Xu 	else
2730605c41cSHerbert Xu 		scatterwalk_map_and_copy(space, req->src, offset - 2 * bsize,
2740605c41cSHerbert Xu 					 bsize, 0);
2750605c41cSHerbert Xu 
2760605c41cSHerbert Xu 	skcipher_request_set_crypt(subreq, req->src, req->dst,
2770605c41cSHerbert Xu 				   offset, req->iv);
2780605c41cSHerbert Xu 
2790605c41cSHerbert Xu 	return crypto_skcipher_decrypt(subreq) ?:
2800605c41cSHerbert Xu 	       cts_cbc_decrypt(req);
2810605c41cSHerbert Xu }
2820605c41cSHerbert Xu 
crypto_cts_init_tfm(struct crypto_skcipher * tfm)2830605c41cSHerbert Xu static int crypto_cts_init_tfm(struct crypto_skcipher *tfm)
2840605c41cSHerbert Xu {
2850605c41cSHerbert Xu 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
2860605c41cSHerbert Xu 	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
2870605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
2880605c41cSHerbert Xu 	struct crypto_skcipher *cipher;
2890605c41cSHerbert Xu 	unsigned reqsize;
2900605c41cSHerbert Xu 	unsigned bsize;
2910605c41cSHerbert Xu 	unsigned align;
2920605c41cSHerbert Xu 
29360425a8bSEric Biggers 	cipher = crypto_spawn_skcipher(spawn);
29476cb9521SKevin Coffman 	if (IS_ERR(cipher))
29576cb9521SKevin Coffman 		return PTR_ERR(cipher);
29676cb9521SKevin Coffman 
29776cb9521SKevin Coffman 	ctx->child = cipher;
2980605c41cSHerbert Xu 
2990605c41cSHerbert Xu 	align = crypto_skcipher_alignmask(tfm);
3000605c41cSHerbert Xu 	bsize = crypto_skcipher_blocksize(cipher);
3010605c41cSHerbert Xu 	reqsize = ALIGN(sizeof(struct crypto_cts_reqctx) +
3020605c41cSHerbert Xu 			crypto_skcipher_reqsize(cipher),
3030605c41cSHerbert Xu 			crypto_tfm_ctx_alignment()) +
3040605c41cSHerbert Xu 		  (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize;
3050605c41cSHerbert Xu 
3060605c41cSHerbert Xu 	crypto_skcipher_set_reqsize(tfm, reqsize);
3070605c41cSHerbert Xu 
30876cb9521SKevin Coffman 	return 0;
30976cb9521SKevin Coffman }
31076cb9521SKevin Coffman 
crypto_cts_exit_tfm(struct crypto_skcipher * tfm)3110605c41cSHerbert Xu static void crypto_cts_exit_tfm(struct crypto_skcipher *tfm)
31276cb9521SKevin Coffman {
3130605c41cSHerbert Xu 	struct crypto_cts_ctx *ctx = crypto_skcipher_ctx(tfm);
3140605c41cSHerbert Xu 
3150605c41cSHerbert Xu 	crypto_free_skcipher(ctx->child);
31676cb9521SKevin Coffman }
31776cb9521SKevin Coffman 
crypto_cts_free(struct skcipher_instance * inst)3180605c41cSHerbert Xu static void crypto_cts_free(struct skcipher_instance *inst)
31976cb9521SKevin Coffman {
3200605c41cSHerbert Xu 	crypto_drop_skcipher(skcipher_instance_ctx(inst));
3210605c41cSHerbert Xu 	kfree(inst);
3220605c41cSHerbert Xu }
3230605c41cSHerbert Xu 
crypto_cts_create(struct crypto_template * tmpl,struct rtattr ** tb)3240605c41cSHerbert Xu static int crypto_cts_create(struct crypto_template *tmpl, struct rtattr **tb)
3250605c41cSHerbert Xu {
3260605c41cSHerbert Xu 	struct crypto_skcipher_spawn *spawn;
3270605c41cSHerbert Xu 	struct skcipher_instance *inst;
3280605c41cSHerbert Xu 	struct skcipher_alg *alg;
329b9f76dddSEric Biggers 	u32 mask;
33076cb9521SKevin Coffman 	int err;
33176cb9521SKevin Coffman 
3327bcb2c99SEric Biggers 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
3337bcb2c99SEric Biggers 	if (err)
3347bcb2c99SEric Biggers 		return err;
335b9f76dddSEric Biggers 
3360605c41cSHerbert Xu 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
3370605c41cSHerbert Xu 	if (!inst)
3380605c41cSHerbert Xu 		return -ENOMEM;
3390605c41cSHerbert Xu 
3400605c41cSHerbert Xu 	spawn = skcipher_instance_ctx(inst);
3410605c41cSHerbert Xu 
342b9f76dddSEric Biggers 	err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
3433ff2bab8SEric Biggers 				   crypto_attr_alg_name(tb[1]), 0, mask);
34476cb9521SKevin Coffman 	if (err)
3450605c41cSHerbert Xu 		goto err_free_inst;
34676cb9521SKevin Coffman 
3470605c41cSHerbert Xu 	alg = crypto_spawn_skcipher_alg(spawn);
34876cb9521SKevin Coffman 
3490605c41cSHerbert Xu 	err = -EINVAL;
3500605c41cSHerbert Xu 	if (crypto_skcipher_alg_ivsize(alg) != alg->base.cra_blocksize)
3513ff2bab8SEric Biggers 		goto err_free_inst;
35276cb9521SKevin Coffman 
3530605c41cSHerbert Xu 	if (strncmp(alg->base.cra_name, "cbc(", 4))
3543ff2bab8SEric Biggers 		goto err_free_inst;
355988dc017SHerbert Xu 
3560605c41cSHerbert Xu 	err = crypto_inst_setname(skcipher_crypto_instance(inst), "cts",
3570605c41cSHerbert Xu 				  &alg->base);
3580605c41cSHerbert Xu 	if (err)
3593ff2bab8SEric Biggers 		goto err_free_inst;
36076cb9521SKevin Coffman 
3610605c41cSHerbert Xu 	inst->alg.base.cra_priority = alg->base.cra_priority;
3620605c41cSHerbert Xu 	inst->alg.base.cra_blocksize = alg->base.cra_blocksize;
3630605c41cSHerbert Xu 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
36476cb9521SKevin Coffman 
3650605c41cSHerbert Xu 	inst->alg.ivsize = alg->base.cra_blocksize;
3660605c41cSHerbert Xu 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
3670605c41cSHerbert Xu 	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
3680605c41cSHerbert Xu 	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
36976cb9521SKevin Coffman 
3700605c41cSHerbert Xu 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_cts_ctx);
37176cb9521SKevin Coffman 
3720605c41cSHerbert Xu 	inst->alg.init = crypto_cts_init_tfm;
3730605c41cSHerbert Xu 	inst->alg.exit = crypto_cts_exit_tfm;
37476cb9521SKevin Coffman 
3750605c41cSHerbert Xu 	inst->alg.setkey = crypto_cts_setkey;
3760605c41cSHerbert Xu 	inst->alg.encrypt = crypto_cts_encrypt;
3770605c41cSHerbert Xu 	inst->alg.decrypt = crypto_cts_decrypt;
37876cb9521SKevin Coffman 
3790605c41cSHerbert Xu 	inst->free = crypto_cts_free;
38076cb9521SKevin Coffman 
3810605c41cSHerbert Xu 	err = skcipher_register_instance(tmpl, inst);
3823ff2bab8SEric Biggers 	if (err) {
3830605c41cSHerbert Xu err_free_inst:
3843ff2bab8SEric Biggers 		crypto_cts_free(inst);
3853ff2bab8SEric Biggers 	}
3863ff2bab8SEric Biggers 	return err;
38776cb9521SKevin Coffman }
38876cb9521SKevin Coffman 
38976cb9521SKevin Coffman static struct crypto_template crypto_cts_tmpl = {
39076cb9521SKevin Coffman 	.name = "cts",
3910605c41cSHerbert Xu 	.create = crypto_cts_create,
39276cb9521SKevin Coffman 	.module = THIS_MODULE,
39376cb9521SKevin Coffman };
39476cb9521SKevin Coffman 
crypto_cts_module_init(void)39576cb9521SKevin Coffman static int __init crypto_cts_module_init(void)
39676cb9521SKevin Coffman {
39776cb9521SKevin Coffman 	return crypto_register_template(&crypto_cts_tmpl);
39876cb9521SKevin Coffman }
39976cb9521SKevin Coffman 
crypto_cts_module_exit(void)40076cb9521SKevin Coffman static void __exit crypto_cts_module_exit(void)
40176cb9521SKevin Coffman {
40276cb9521SKevin Coffman 	crypto_unregister_template(&crypto_cts_tmpl);
40376cb9521SKevin Coffman }
40476cb9521SKevin Coffman 
405c4741b23SEric Biggers subsys_initcall(crypto_cts_module_init);
40676cb9521SKevin Coffman module_exit(crypto_cts_module_exit);
40776cb9521SKevin Coffman 
40876cb9521SKevin Coffman MODULE_LICENSE("Dual BSD/GPL");
40976cb9521SKevin Coffman MODULE_DESCRIPTION("CTS-CBC CipherText Stealing for CBC");
4104943ba16SKees Cook MODULE_ALIAS_CRYPTO("cts");
411