xref: /openbmc/linux/crypto/pcbc.c (revision 7f6964c5)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * PCBC: Propagating Cipher Block Chaining mode
4  *
5  * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
6  * Written by David Howells (dhowells@redhat.com)
7  *
8  * Derived from cbc.c
9  * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10  */
11 
12 #include <crypto/algapi.h>
13 #include <crypto/internal/skcipher.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 
19 static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
20 				       struct skcipher_walk *walk,
21 				       struct crypto_cipher *tfm)
22 {
23 	int bsize = crypto_cipher_blocksize(tfm);
24 	unsigned int nbytes = walk->nbytes;
25 	u8 *src = walk->src.virt.addr;
26 	u8 *dst = walk->dst.virt.addr;
27 	u8 * const iv = walk->iv;
28 
29 	do {
30 		crypto_xor(iv, src, bsize);
31 		crypto_cipher_encrypt_one(tfm, dst, iv);
32 		crypto_xor_cpy(iv, dst, src, bsize);
33 
34 		src += bsize;
35 		dst += bsize;
36 	} while ((nbytes -= bsize) >= bsize);
37 
38 	return nbytes;
39 }
40 
41 static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
42 				       struct skcipher_walk *walk,
43 				       struct crypto_cipher *tfm)
44 {
45 	int bsize = crypto_cipher_blocksize(tfm);
46 	unsigned int nbytes = walk->nbytes;
47 	u8 *src = walk->src.virt.addr;
48 	u8 * const iv = walk->iv;
49 	u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
50 
51 	do {
52 		memcpy(tmpbuf, src, bsize);
53 		crypto_xor(iv, src, bsize);
54 		crypto_cipher_encrypt_one(tfm, src, iv);
55 		crypto_xor_cpy(iv, tmpbuf, src, bsize);
56 
57 		src += bsize;
58 	} while ((nbytes -= bsize) >= bsize);
59 
60 	return nbytes;
61 }
62 
63 static int crypto_pcbc_encrypt(struct skcipher_request *req)
64 {
65 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
66 	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
67 	struct skcipher_walk walk;
68 	unsigned int nbytes;
69 	int err;
70 
71 	err = skcipher_walk_virt(&walk, req, false);
72 
73 	while ((nbytes = walk.nbytes)) {
74 		if (walk.src.virt.addr == walk.dst.virt.addr)
75 			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
76 							     cipher);
77 		else
78 			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
79 							     cipher);
80 		err = skcipher_walk_done(&walk, nbytes);
81 	}
82 
83 	return err;
84 }
85 
86 static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
87 				       struct skcipher_walk *walk,
88 				       struct crypto_cipher *tfm)
89 {
90 	int bsize = crypto_cipher_blocksize(tfm);
91 	unsigned int nbytes = walk->nbytes;
92 	u8 *src = walk->src.virt.addr;
93 	u8 *dst = walk->dst.virt.addr;
94 	u8 * const iv = walk->iv;
95 
96 	do {
97 		crypto_cipher_decrypt_one(tfm, dst, src);
98 		crypto_xor(dst, iv, bsize);
99 		crypto_xor_cpy(iv, dst, src, bsize);
100 
101 		src += bsize;
102 		dst += bsize;
103 	} while ((nbytes -= bsize) >= bsize);
104 
105 	return nbytes;
106 }
107 
108 static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
109 				       struct skcipher_walk *walk,
110 				       struct crypto_cipher *tfm)
111 {
112 	int bsize = crypto_cipher_blocksize(tfm);
113 	unsigned int nbytes = walk->nbytes;
114 	u8 *src = walk->src.virt.addr;
115 	u8 * const iv = walk->iv;
116 	u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
117 
118 	do {
119 		memcpy(tmpbuf, src, bsize);
120 		crypto_cipher_decrypt_one(tfm, src, src);
121 		crypto_xor(src, iv, bsize);
122 		crypto_xor_cpy(iv, src, tmpbuf, bsize);
123 
124 		src += bsize;
125 	} while ((nbytes -= bsize) >= bsize);
126 
127 	return nbytes;
128 }
129 
130 static int crypto_pcbc_decrypt(struct skcipher_request *req)
131 {
132 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
133 	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
134 	struct skcipher_walk walk;
135 	unsigned int nbytes;
136 	int err;
137 
138 	err = skcipher_walk_virt(&walk, req, false);
139 
140 	while ((nbytes = walk.nbytes)) {
141 		if (walk.src.virt.addr == walk.dst.virt.addr)
142 			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
143 							     cipher);
144 		else
145 			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
146 							     cipher);
147 		err = skcipher_walk_done(&walk, nbytes);
148 	}
149 
150 	return err;
151 }
152 
153 static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
154 {
155 	struct skcipher_instance *inst;
156 	struct crypto_alg *alg;
157 	int err;
158 
159 	inst = skcipher_alloc_instance_simple(tmpl, tb, &alg);
160 	if (IS_ERR(inst))
161 		return PTR_ERR(inst);
162 
163 	inst->alg.encrypt = crypto_pcbc_encrypt;
164 	inst->alg.decrypt = crypto_pcbc_decrypt;
165 
166 	err = skcipher_register_instance(tmpl, inst);
167 	if (err)
168 		inst->free(inst);
169 	crypto_mod_put(alg);
170 	return err;
171 }
172 
173 static struct crypto_template crypto_pcbc_tmpl = {
174 	.name = "pcbc",
175 	.create = crypto_pcbc_create,
176 	.module = THIS_MODULE,
177 };
178 
179 static int __init crypto_pcbc_module_init(void)
180 {
181 	return crypto_register_template(&crypto_pcbc_tmpl);
182 }
183 
184 static void __exit crypto_pcbc_module_exit(void)
185 {
186 	crypto_unregister_template(&crypto_pcbc_tmpl);
187 }
188 
189 subsys_initcall(crypto_pcbc_module_init);
190 module_exit(crypto_pcbc_module_exit);
191 
192 MODULE_LICENSE("GPL");
193 MODULE_DESCRIPTION("PCBC block cipher mode of operation");
194 MODULE_ALIAS_CRYPTO("pcbc");
195