xref: /openbmc/linux/crypto/seqiv.c (revision eb3fcf00)
1 /*
2  * seqiv: Sequence Number IV Generator
3  *
4  * This generator generates an IV based on a sequence number by xoring it
5  * with a salt.  This algorithm is mainly useful for CTR and similar modes.
6  *
7  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * This program is free software; you can redistribute it and/or modify it
10  * under the terms of the GNU General Public License as published by the Free
11  * Software Foundation; either version 2 of the License, or (at your option)
12  * any later version.
13  *
14  */
15 
16 #include <crypto/internal/geniv.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/rng.h>
19 #include <crypto/scatterwalk.h>
20 #include <linux/err.h>
21 #include <linux/init.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/slab.h>
25 #include <linux/spinlock.h>
26 #include <linux/string.h>
27 
28 struct seqiv_ctx {
29 	spinlock_t lock;
30 	u8 salt[] __attribute__ ((aligned(__alignof__(u32))));
31 };
32 
33 static void seqiv_free(struct crypto_instance *inst);
34 
35 static void seqiv_complete2(struct skcipher_givcrypt_request *req, int err)
36 {
37 	struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
38 	struct crypto_ablkcipher *geniv;
39 
40 	if (err == -EINPROGRESS)
41 		return;
42 
43 	if (err)
44 		goto out;
45 
46 	geniv = skcipher_givcrypt_reqtfm(req);
47 	memcpy(req->creq.info, subreq->info, crypto_ablkcipher_ivsize(geniv));
48 
49 out:
50 	kfree(subreq->info);
51 }
52 
53 static void seqiv_complete(struct crypto_async_request *base, int err)
54 {
55 	struct skcipher_givcrypt_request *req = base->data;
56 
57 	seqiv_complete2(req, err);
58 	skcipher_givcrypt_complete(req, err);
59 }
60 
61 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
62 {
63 	struct aead_request *subreq = aead_request_ctx(req);
64 	struct crypto_aead *geniv;
65 
66 	if (err == -EINPROGRESS)
67 		return;
68 
69 	if (err)
70 		goto out;
71 
72 	geniv = crypto_aead_reqtfm(req);
73 	memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
74 
75 out:
76 	kzfree(subreq->iv);
77 }
78 
79 static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
80 					int err)
81 {
82 	struct aead_request *req = base->data;
83 
84 	seqiv_aead_encrypt_complete2(req, err);
85 	aead_request_complete(req, err);
86 }
87 
88 static void seqiv_geniv(struct seqiv_ctx *ctx, u8 *info, u64 seq,
89 			unsigned int ivsize)
90 {
91 	unsigned int len = ivsize;
92 
93 	if (ivsize > sizeof(u64)) {
94 		memset(info, 0, ivsize - sizeof(u64));
95 		len = sizeof(u64);
96 	}
97 	seq = cpu_to_be64(seq);
98 	memcpy(info + ivsize - len, &seq, len);
99 	crypto_xor(info, ctx->salt, ivsize);
100 }
101 
102 static int seqiv_givencrypt(struct skcipher_givcrypt_request *req)
103 {
104 	struct crypto_ablkcipher *geniv = skcipher_givcrypt_reqtfm(req);
105 	struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
106 	struct ablkcipher_request *subreq = skcipher_givcrypt_reqctx(req);
107 	crypto_completion_t compl;
108 	void *data;
109 	u8 *info;
110 	unsigned int ivsize;
111 	int err;
112 
113 	ablkcipher_request_set_tfm(subreq, skcipher_geniv_cipher(geniv));
114 
115 	compl = req->creq.base.complete;
116 	data = req->creq.base.data;
117 	info = req->creq.info;
118 
119 	ivsize = crypto_ablkcipher_ivsize(geniv);
120 
121 	if (unlikely(!IS_ALIGNED((unsigned long)info,
122 				 crypto_ablkcipher_alignmask(geniv) + 1))) {
123 		info = kmalloc(ivsize, req->creq.base.flags &
124 				       CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
125 								  GFP_ATOMIC);
126 		if (!info)
127 			return -ENOMEM;
128 
129 		compl = seqiv_complete;
130 		data = req;
131 	}
132 
133 	ablkcipher_request_set_callback(subreq, req->creq.base.flags, compl,
134 					data);
135 	ablkcipher_request_set_crypt(subreq, req->creq.src, req->creq.dst,
136 				     req->creq.nbytes, info);
137 
138 	seqiv_geniv(ctx, info, req->seq, ivsize);
139 	memcpy(req->giv, info, ivsize);
140 
141 	err = crypto_ablkcipher_encrypt(subreq);
142 	if (unlikely(info != req->creq.info))
143 		seqiv_complete2(req, err);
144 	return err;
145 }
146 
147 static int seqiv_aead_encrypt(struct aead_request *req)
148 {
149 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
150 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
151 	struct aead_request *subreq = aead_request_ctx(req);
152 	crypto_completion_t compl;
153 	void *data;
154 	u8 *info;
155 	unsigned int ivsize = 8;
156 	int err;
157 
158 	if (req->cryptlen < ivsize)
159 		return -EINVAL;
160 
161 	aead_request_set_tfm(subreq, ctx->child);
162 
163 	compl = req->base.complete;
164 	data = req->base.data;
165 	info = req->iv;
166 
167 	if (req->src != req->dst) {
168 		struct blkcipher_desc desc = {
169 			.tfm = ctx->null,
170 		};
171 
172 		err = crypto_blkcipher_encrypt(&desc, req->dst, req->src,
173 					       req->assoclen + req->cryptlen);
174 		if (err)
175 			return err;
176 	}
177 
178 	if (unlikely(!IS_ALIGNED((unsigned long)info,
179 				 crypto_aead_alignmask(geniv) + 1))) {
180 		info = kmalloc(ivsize, req->base.flags &
181 				       CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL:
182 								  GFP_ATOMIC);
183 		if (!info)
184 			return -ENOMEM;
185 
186 		memcpy(info, req->iv, ivsize);
187 		compl = seqiv_aead_encrypt_complete;
188 		data = req;
189 	}
190 
191 	aead_request_set_callback(subreq, req->base.flags, compl, data);
192 	aead_request_set_crypt(subreq, req->dst, req->dst,
193 			       req->cryptlen - ivsize, info);
194 	aead_request_set_ad(subreq, req->assoclen + ivsize);
195 
196 	crypto_xor(info, ctx->salt, ivsize);
197 	scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
198 
199 	err = crypto_aead_encrypt(subreq);
200 	if (unlikely(info != req->iv))
201 		seqiv_aead_encrypt_complete2(req, err);
202 	return err;
203 }
204 
205 static int seqiv_aead_decrypt(struct aead_request *req)
206 {
207 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
208 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
209 	struct aead_request *subreq = aead_request_ctx(req);
210 	crypto_completion_t compl;
211 	void *data;
212 	unsigned int ivsize = 8;
213 
214 	if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
215 		return -EINVAL;
216 
217 	aead_request_set_tfm(subreq, ctx->child);
218 
219 	compl = req->base.complete;
220 	data = req->base.data;
221 
222 	aead_request_set_callback(subreq, req->base.flags, compl, data);
223 	aead_request_set_crypt(subreq, req->src, req->dst,
224 			       req->cryptlen - ivsize, req->iv);
225 	aead_request_set_ad(subreq, req->assoclen + ivsize);
226 
227 	scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
228 
229 	return crypto_aead_decrypt(subreq);
230 }
231 
232 static int seqiv_init(struct crypto_tfm *tfm)
233 {
234 	struct crypto_ablkcipher *geniv = __crypto_ablkcipher_cast(tfm);
235 	struct seqiv_ctx *ctx = crypto_ablkcipher_ctx(geniv);
236 	int err;
237 
238 	spin_lock_init(&ctx->lock);
239 
240 	tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request);
241 
242 	err = 0;
243 	if (!crypto_get_default_rng()) {
244 		crypto_ablkcipher_crt(geniv)->givencrypt = seqiv_givencrypt;
245 		err = crypto_rng_get_bytes(crypto_default_rng, ctx->salt,
246 					   crypto_ablkcipher_ivsize(geniv));
247 		crypto_put_default_rng();
248 	}
249 
250 	return err ?: skcipher_geniv_init(tfm);
251 }
252 
253 static int seqiv_ablkcipher_create(struct crypto_template *tmpl,
254 				   struct rtattr **tb)
255 {
256 	struct crypto_instance *inst;
257 	int err;
258 
259 	inst = skcipher_geniv_alloc(tmpl, tb, 0, 0);
260 
261 	if (IS_ERR(inst))
262 		return PTR_ERR(inst);
263 
264 	err = -EINVAL;
265 	if (inst->alg.cra_ablkcipher.ivsize < sizeof(u64))
266 		goto free_inst;
267 
268 	inst->alg.cra_init = seqiv_init;
269 	inst->alg.cra_exit = skcipher_geniv_exit;
270 
271 	inst->alg.cra_ctxsize += inst->alg.cra_ablkcipher.ivsize;
272 	inst->alg.cra_ctxsize += sizeof(struct seqiv_ctx);
273 
274 	inst->alg.cra_alignmask |= __alignof__(u32) - 1;
275 
276 	err = crypto_register_instance(tmpl, inst);
277 	if (err)
278 		goto free_inst;
279 
280 out:
281 	return err;
282 
283 free_inst:
284 	skcipher_geniv_free(inst);
285 	goto out;
286 }
287 
288 static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
289 {
290 	struct aead_instance *inst;
291 	struct crypto_aead_spawn *spawn;
292 	struct aead_alg *alg;
293 	int err;
294 
295 	inst = aead_geniv_alloc(tmpl, tb, 0, 0);
296 
297 	if (IS_ERR(inst))
298 		return PTR_ERR(inst);
299 
300 	inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
301 
302 	spawn = aead_instance_ctx(inst);
303 	alg = crypto_spawn_aead_alg(spawn);
304 
305 	err = -EINVAL;
306 	if (inst->alg.ivsize != sizeof(u64))
307 		goto free_inst;
308 
309 	inst->alg.encrypt = seqiv_aead_encrypt;
310 	inst->alg.decrypt = seqiv_aead_decrypt;
311 
312 	inst->alg.init = aead_init_geniv;
313 	inst->alg.exit = aead_exit_geniv;
314 
315 	inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
316 	inst->alg.base.cra_ctxsize += inst->alg.ivsize;
317 
318 	err = aead_register_instance(tmpl, inst);
319 	if (err)
320 		goto free_inst;
321 
322 out:
323 	return err;
324 
325 free_inst:
326 	aead_geniv_free(inst);
327 	goto out;
328 }
329 
330 static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
331 {
332 	struct crypto_attr_type *algt;
333 	int err;
334 
335 	algt = crypto_get_attr_type(tb);
336 	if (IS_ERR(algt))
337 		return PTR_ERR(algt);
338 
339 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
340 		err = seqiv_ablkcipher_create(tmpl, tb);
341 	else
342 		err = seqiv_aead_create(tmpl, tb);
343 
344 	return err;
345 }
346 
347 static void seqiv_free(struct crypto_instance *inst)
348 {
349 	if ((inst->alg.cra_flags ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
350 		skcipher_geniv_free(inst);
351 	else
352 		aead_geniv_free(aead_instance(inst));
353 }
354 
355 static struct crypto_template seqiv_tmpl = {
356 	.name = "seqiv",
357 	.create = seqiv_create,
358 	.free = seqiv_free,
359 	.module = THIS_MODULE,
360 };
361 
362 static int __init seqiv_module_init(void)
363 {
364 	return crypto_register_template(&seqiv_tmpl);
365 }
366 
367 static void __exit seqiv_module_exit(void)
368 {
369 	crypto_unregister_template(&seqiv_tmpl);
370 }
371 
372 module_init(seqiv_module_init);
373 module_exit(seqiv_module_exit);
374 
375 MODULE_LICENSE("GPL");
376 MODULE_DESCRIPTION("Sequence Number IV Generator");
377 MODULE_ALIAS_CRYPTO("seqiv");
378