xref: /openbmc/linux/crypto/xctr.c (revision 4f2c0a4acffbec01079c28f839422e64ddeff004)
1*17fee07aSNathan Huckleberry // SPDX-License-Identifier: GPL-2.0-or-later
2*17fee07aSNathan Huckleberry /*
3*17fee07aSNathan Huckleberry  * XCTR: XOR Counter mode - Adapted from ctr.c
4*17fee07aSNathan Huckleberry  *
5*17fee07aSNathan Huckleberry  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6*17fee07aSNathan Huckleberry  * Copyright 2021 Google LLC
7*17fee07aSNathan Huckleberry  */
8*17fee07aSNathan Huckleberry 
9*17fee07aSNathan Huckleberry /*
10*17fee07aSNathan Huckleberry  * XCTR mode is a blockcipher mode of operation used to implement HCTR2. XCTR is
11*17fee07aSNathan Huckleberry  * closely related to the CTR mode of operation; the main difference is that CTR
12*17fee07aSNathan Huckleberry  * generates the keystream using E(CTR + IV) whereas XCTR generates the
13*17fee07aSNathan Huckleberry  * keystream using E(CTR ^ IV). This allows implementations to avoid dealing
14*17fee07aSNathan Huckleberry  * with multi-limb integers (as is required in CTR mode). XCTR is also specified
15*17fee07aSNathan Huckleberry  * using little-endian arithmetic which makes it slightly faster on LE machines.
16*17fee07aSNathan Huckleberry  *
17*17fee07aSNathan Huckleberry  * See the HCTR2 paper for more details:
18*17fee07aSNathan Huckleberry  *	Length-preserving encryption with HCTR2
19*17fee07aSNathan Huckleberry  *      (https://eprint.iacr.org/2021/1441.pdf)
20*17fee07aSNathan Huckleberry  */
21*17fee07aSNathan Huckleberry 
22*17fee07aSNathan Huckleberry #include <crypto/algapi.h>
23*17fee07aSNathan Huckleberry #include <crypto/internal/cipher.h>
24*17fee07aSNathan Huckleberry #include <crypto/internal/skcipher.h>
25*17fee07aSNathan Huckleberry #include <linux/err.h>
26*17fee07aSNathan Huckleberry #include <linux/init.h>
27*17fee07aSNathan Huckleberry #include <linux/kernel.h>
28*17fee07aSNathan Huckleberry #include <linux/module.h>
29*17fee07aSNathan Huckleberry #include <linux/slab.h>
30*17fee07aSNathan Huckleberry 
31*17fee07aSNathan Huckleberry /* For now this implementation is limited to 16-byte blocks for simplicity */
32*17fee07aSNathan Huckleberry #define XCTR_BLOCKSIZE 16
33*17fee07aSNathan Huckleberry 
crypto_xctr_crypt_final(struct skcipher_walk * walk,struct crypto_cipher * tfm,u32 byte_ctr)34*17fee07aSNathan Huckleberry static void crypto_xctr_crypt_final(struct skcipher_walk *walk,
35*17fee07aSNathan Huckleberry 				   struct crypto_cipher *tfm, u32 byte_ctr)
36*17fee07aSNathan Huckleberry {
37*17fee07aSNathan Huckleberry 	u8 keystream[XCTR_BLOCKSIZE];
38*17fee07aSNathan Huckleberry 	const u8 *src = walk->src.virt.addr;
39*17fee07aSNathan Huckleberry 	u8 *dst = walk->dst.virt.addr;
40*17fee07aSNathan Huckleberry 	unsigned int nbytes = walk->nbytes;
41*17fee07aSNathan Huckleberry 	__le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
42*17fee07aSNathan Huckleberry 
43*17fee07aSNathan Huckleberry 	crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
44*17fee07aSNathan Huckleberry 	crypto_cipher_encrypt_one(tfm, keystream, walk->iv);
45*17fee07aSNathan Huckleberry 	crypto_xor_cpy(dst, keystream, src, nbytes);
46*17fee07aSNathan Huckleberry 	crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
47*17fee07aSNathan Huckleberry }
48*17fee07aSNathan Huckleberry 
crypto_xctr_crypt_segment(struct skcipher_walk * walk,struct crypto_cipher * tfm,u32 byte_ctr)49*17fee07aSNathan Huckleberry static int crypto_xctr_crypt_segment(struct skcipher_walk *walk,
50*17fee07aSNathan Huckleberry 				    struct crypto_cipher *tfm, u32 byte_ctr)
51*17fee07aSNathan Huckleberry {
52*17fee07aSNathan Huckleberry 	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
53*17fee07aSNathan Huckleberry 		   crypto_cipher_alg(tfm)->cia_encrypt;
54*17fee07aSNathan Huckleberry 	const u8 *src = walk->src.virt.addr;
55*17fee07aSNathan Huckleberry 	u8 *dst = walk->dst.virt.addr;
56*17fee07aSNathan Huckleberry 	unsigned int nbytes = walk->nbytes;
57*17fee07aSNathan Huckleberry 	__le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
58*17fee07aSNathan Huckleberry 
59*17fee07aSNathan Huckleberry 	do {
60*17fee07aSNathan Huckleberry 		crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
61*17fee07aSNathan Huckleberry 		fn(crypto_cipher_tfm(tfm), dst, walk->iv);
62*17fee07aSNathan Huckleberry 		crypto_xor(dst, src, XCTR_BLOCKSIZE);
63*17fee07aSNathan Huckleberry 		crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
64*17fee07aSNathan Huckleberry 
65*17fee07aSNathan Huckleberry 		le32_add_cpu(&ctr32, 1);
66*17fee07aSNathan Huckleberry 
67*17fee07aSNathan Huckleberry 		src += XCTR_BLOCKSIZE;
68*17fee07aSNathan Huckleberry 		dst += XCTR_BLOCKSIZE;
69*17fee07aSNathan Huckleberry 	} while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE);
70*17fee07aSNathan Huckleberry 
71*17fee07aSNathan Huckleberry 	return nbytes;
72*17fee07aSNathan Huckleberry }
73*17fee07aSNathan Huckleberry 
crypto_xctr_crypt_inplace(struct skcipher_walk * walk,struct crypto_cipher * tfm,u32 byte_ctr)74*17fee07aSNathan Huckleberry static int crypto_xctr_crypt_inplace(struct skcipher_walk *walk,
75*17fee07aSNathan Huckleberry 				    struct crypto_cipher *tfm, u32 byte_ctr)
76*17fee07aSNathan Huckleberry {
77*17fee07aSNathan Huckleberry 	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
78*17fee07aSNathan Huckleberry 		   crypto_cipher_alg(tfm)->cia_encrypt;
79*17fee07aSNathan Huckleberry 	unsigned long alignmask = crypto_cipher_alignmask(tfm);
80*17fee07aSNathan Huckleberry 	unsigned int nbytes = walk->nbytes;
81*17fee07aSNathan Huckleberry 	u8 *data = walk->src.virt.addr;
82*17fee07aSNathan Huckleberry 	u8 tmp[XCTR_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
83*17fee07aSNathan Huckleberry 	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
84*17fee07aSNathan Huckleberry 	__le32 ctr32 = cpu_to_le32(byte_ctr / XCTR_BLOCKSIZE + 1);
85*17fee07aSNathan Huckleberry 
86*17fee07aSNathan Huckleberry 	do {
87*17fee07aSNathan Huckleberry 		crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
88*17fee07aSNathan Huckleberry 		fn(crypto_cipher_tfm(tfm), keystream, walk->iv);
89*17fee07aSNathan Huckleberry 		crypto_xor(data, keystream, XCTR_BLOCKSIZE);
90*17fee07aSNathan Huckleberry 		crypto_xor(walk->iv, (u8 *)&ctr32, sizeof(ctr32));
91*17fee07aSNathan Huckleberry 
92*17fee07aSNathan Huckleberry 		le32_add_cpu(&ctr32, 1);
93*17fee07aSNathan Huckleberry 
94*17fee07aSNathan Huckleberry 		data += XCTR_BLOCKSIZE;
95*17fee07aSNathan Huckleberry 	} while ((nbytes -= XCTR_BLOCKSIZE) >= XCTR_BLOCKSIZE);
96*17fee07aSNathan Huckleberry 
97*17fee07aSNathan Huckleberry 	return nbytes;
98*17fee07aSNathan Huckleberry }
99*17fee07aSNathan Huckleberry 
crypto_xctr_crypt(struct skcipher_request * req)100*17fee07aSNathan Huckleberry static int crypto_xctr_crypt(struct skcipher_request *req)
101*17fee07aSNathan Huckleberry {
102*17fee07aSNathan Huckleberry 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
103*17fee07aSNathan Huckleberry 	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
104*17fee07aSNathan Huckleberry 	struct skcipher_walk walk;
105*17fee07aSNathan Huckleberry 	unsigned int nbytes;
106*17fee07aSNathan Huckleberry 	int err;
107*17fee07aSNathan Huckleberry 	u32 byte_ctr = 0;
108*17fee07aSNathan Huckleberry 
109*17fee07aSNathan Huckleberry 	err = skcipher_walk_virt(&walk, req, false);
110*17fee07aSNathan Huckleberry 
111*17fee07aSNathan Huckleberry 	while (walk.nbytes >= XCTR_BLOCKSIZE) {
112*17fee07aSNathan Huckleberry 		if (walk.src.virt.addr == walk.dst.virt.addr)
113*17fee07aSNathan Huckleberry 			nbytes = crypto_xctr_crypt_inplace(&walk, cipher,
114*17fee07aSNathan Huckleberry 							   byte_ctr);
115*17fee07aSNathan Huckleberry 		else
116*17fee07aSNathan Huckleberry 			nbytes = crypto_xctr_crypt_segment(&walk, cipher,
117*17fee07aSNathan Huckleberry 							   byte_ctr);
118*17fee07aSNathan Huckleberry 
119*17fee07aSNathan Huckleberry 		byte_ctr += walk.nbytes - nbytes;
120*17fee07aSNathan Huckleberry 		err = skcipher_walk_done(&walk, nbytes);
121*17fee07aSNathan Huckleberry 	}
122*17fee07aSNathan Huckleberry 
123*17fee07aSNathan Huckleberry 	if (walk.nbytes) {
124*17fee07aSNathan Huckleberry 		crypto_xctr_crypt_final(&walk, cipher, byte_ctr);
125*17fee07aSNathan Huckleberry 		err = skcipher_walk_done(&walk, 0);
126*17fee07aSNathan Huckleberry 	}
127*17fee07aSNathan Huckleberry 
128*17fee07aSNathan Huckleberry 	return err;
129*17fee07aSNathan Huckleberry }
130*17fee07aSNathan Huckleberry 
crypto_xctr_create(struct crypto_template * tmpl,struct rtattr ** tb)131*17fee07aSNathan Huckleberry static int crypto_xctr_create(struct crypto_template *tmpl, struct rtattr **tb)
132*17fee07aSNathan Huckleberry {
133*17fee07aSNathan Huckleberry 	struct skcipher_instance *inst;
134*17fee07aSNathan Huckleberry 	struct crypto_alg *alg;
135*17fee07aSNathan Huckleberry 	int err;
136*17fee07aSNathan Huckleberry 
137*17fee07aSNathan Huckleberry 	inst = skcipher_alloc_instance_simple(tmpl, tb);
138*17fee07aSNathan Huckleberry 	if (IS_ERR(inst))
139*17fee07aSNathan Huckleberry 		return PTR_ERR(inst);
140*17fee07aSNathan Huckleberry 
141*17fee07aSNathan Huckleberry 	alg = skcipher_ialg_simple(inst);
142*17fee07aSNathan Huckleberry 
143*17fee07aSNathan Huckleberry 	/* Block size must be 16 bytes. */
144*17fee07aSNathan Huckleberry 	err = -EINVAL;
145*17fee07aSNathan Huckleberry 	if (alg->cra_blocksize != XCTR_BLOCKSIZE)
146*17fee07aSNathan Huckleberry 		goto out_free_inst;
147*17fee07aSNathan Huckleberry 
148*17fee07aSNathan Huckleberry 	/* XCTR mode is a stream cipher. */
149*17fee07aSNathan Huckleberry 	inst->alg.base.cra_blocksize = 1;
150*17fee07aSNathan Huckleberry 
151*17fee07aSNathan Huckleberry 	/*
152*17fee07aSNathan Huckleberry 	 * To simplify the implementation, configure the skcipher walk to only
153*17fee07aSNathan Huckleberry 	 * give a partial block at the very end, never earlier.
154*17fee07aSNathan Huckleberry 	 */
155*17fee07aSNathan Huckleberry 	inst->alg.chunksize = alg->cra_blocksize;
156*17fee07aSNathan Huckleberry 
157*17fee07aSNathan Huckleberry 	inst->alg.encrypt = crypto_xctr_crypt;
158*17fee07aSNathan Huckleberry 	inst->alg.decrypt = crypto_xctr_crypt;
159*17fee07aSNathan Huckleberry 
160*17fee07aSNathan Huckleberry 	err = skcipher_register_instance(tmpl, inst);
161*17fee07aSNathan Huckleberry 	if (err) {
162*17fee07aSNathan Huckleberry out_free_inst:
163*17fee07aSNathan Huckleberry 		inst->free(inst);
164*17fee07aSNathan Huckleberry 	}
165*17fee07aSNathan Huckleberry 
166*17fee07aSNathan Huckleberry 	return err;
167*17fee07aSNathan Huckleberry }
168*17fee07aSNathan Huckleberry 
169*17fee07aSNathan Huckleberry static struct crypto_template crypto_xctr_tmpl = {
170*17fee07aSNathan Huckleberry 	.name = "xctr",
171*17fee07aSNathan Huckleberry 	.create = crypto_xctr_create,
172*17fee07aSNathan Huckleberry 	.module = THIS_MODULE,
173*17fee07aSNathan Huckleberry };
174*17fee07aSNathan Huckleberry 
crypto_xctr_module_init(void)175*17fee07aSNathan Huckleberry static int __init crypto_xctr_module_init(void)
176*17fee07aSNathan Huckleberry {
177*17fee07aSNathan Huckleberry 	return crypto_register_template(&crypto_xctr_tmpl);
178*17fee07aSNathan Huckleberry }
179*17fee07aSNathan Huckleberry 
crypto_xctr_module_exit(void)180*17fee07aSNathan Huckleberry static void __exit crypto_xctr_module_exit(void)
181*17fee07aSNathan Huckleberry {
182*17fee07aSNathan Huckleberry 	crypto_unregister_template(&crypto_xctr_tmpl);
183*17fee07aSNathan Huckleberry }
184*17fee07aSNathan Huckleberry 
185*17fee07aSNathan Huckleberry subsys_initcall(crypto_xctr_module_init);
186*17fee07aSNathan Huckleberry module_exit(crypto_xctr_module_exit);
187*17fee07aSNathan Huckleberry 
188*17fee07aSNathan Huckleberry MODULE_LICENSE("GPL");
189*17fee07aSNathan Huckleberry MODULE_DESCRIPTION("XCTR block cipher mode of operation");
190*17fee07aSNathan Huckleberry MODULE_ALIAS_CRYPTO("xctr");
191*17fee07aSNathan Huckleberry MODULE_IMPORT_NS(CRYPTO_INTERNAL);
192