xref: /openbmc/linux/crypto/seqiv.c (revision 78560d41)
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * seqiv: Sequence Number IV Generator
4  *
5  * This generator generates an IV based on a sequence number by xoring it
6  * with a salt.  This algorithm is mainly useful for CTR and similar modes.
7  *
8  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
9  */
10 
11 #include <crypto/internal/geniv.h>
12 #include <crypto/scatterwalk.h>
13 #include <crypto/skcipher.h>
14 #include <linux/err.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/string.h>
20 
21 static void seqiv_aead_encrypt_complete2(struct aead_request *req, int err)
22 {
23 	struct aead_request *subreq = aead_request_ctx(req);
24 	struct crypto_aead *geniv;
25 
26 	if (err == -EINPROGRESS)
27 		return;
28 
29 	if (err)
30 		goto out;
31 
32 	geniv = crypto_aead_reqtfm(req);
33 	memcpy(req->iv, subreq->iv, crypto_aead_ivsize(geniv));
34 
35 out:
36 	kzfree(subreq->iv);
37 }
38 
39 static void seqiv_aead_encrypt_complete(struct crypto_async_request *base,
40 					int err)
41 {
42 	struct aead_request *req = base->data;
43 
44 	seqiv_aead_encrypt_complete2(req, err);
45 	aead_request_complete(req, err);
46 }
47 
48 static int seqiv_aead_encrypt(struct aead_request *req)
49 {
50 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
51 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
52 	struct aead_request *subreq = aead_request_ctx(req);
53 	crypto_completion_t compl;
54 	void *data;
55 	u8 *info;
56 	unsigned int ivsize = 8;
57 	int err;
58 
59 	if (req->cryptlen < ivsize)
60 		return -EINVAL;
61 
62 	aead_request_set_tfm(subreq, ctx->child);
63 
64 	compl = req->base.complete;
65 	data = req->base.data;
66 	info = req->iv;
67 
68 	if (req->src != req->dst) {
69 		SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->sknull);
70 
71 		skcipher_request_set_sync_tfm(nreq, ctx->sknull);
72 		skcipher_request_set_callback(nreq, req->base.flags,
73 					      NULL, NULL);
74 		skcipher_request_set_crypt(nreq, req->src, req->dst,
75 					   req->assoclen + req->cryptlen,
76 					   NULL);
77 
78 		err = crypto_skcipher_encrypt(nreq);
79 		if (err)
80 			return err;
81 	}
82 
83 	if (unlikely(!IS_ALIGNED((unsigned long)info,
84 				 crypto_aead_alignmask(geniv) + 1))) {
85 		info = kmemdup(req->iv, ivsize, req->base.flags &
86 			       CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
87 			       GFP_ATOMIC);
88 		if (!info)
89 			return -ENOMEM;
90 
91 		compl = seqiv_aead_encrypt_complete;
92 		data = req;
93 	}
94 
95 	aead_request_set_callback(subreq, req->base.flags, compl, data);
96 	aead_request_set_crypt(subreq, req->dst, req->dst,
97 			       req->cryptlen - ivsize, info);
98 	aead_request_set_ad(subreq, req->assoclen + ivsize);
99 
100 	crypto_xor(info, ctx->salt, ivsize);
101 	scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1);
102 
103 	err = crypto_aead_encrypt(subreq);
104 	if (unlikely(info != req->iv))
105 		seqiv_aead_encrypt_complete2(req, err);
106 	return err;
107 }
108 
109 static int seqiv_aead_decrypt(struct aead_request *req)
110 {
111 	struct crypto_aead *geniv = crypto_aead_reqtfm(req);
112 	struct aead_geniv_ctx *ctx = crypto_aead_ctx(geniv);
113 	struct aead_request *subreq = aead_request_ctx(req);
114 	crypto_completion_t compl;
115 	void *data;
116 	unsigned int ivsize = 8;
117 
118 	if (req->cryptlen < ivsize + crypto_aead_authsize(geniv))
119 		return -EINVAL;
120 
121 	aead_request_set_tfm(subreq, ctx->child);
122 
123 	compl = req->base.complete;
124 	data = req->base.data;
125 
126 	aead_request_set_callback(subreq, req->base.flags, compl, data);
127 	aead_request_set_crypt(subreq, req->src, req->dst,
128 			       req->cryptlen - ivsize, req->iv);
129 	aead_request_set_ad(subreq, req->assoclen + ivsize);
130 
131 	scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0);
132 
133 	return crypto_aead_decrypt(subreq);
134 }
135 
136 static int seqiv_aead_create(struct crypto_template *tmpl, struct rtattr **tb)
137 {
138 	struct aead_instance *inst;
139 	int err;
140 
141 	inst = aead_geniv_alloc(tmpl, tb, 0, 0);
142 
143 	if (IS_ERR(inst))
144 		return PTR_ERR(inst);
145 
146 	err = -EINVAL;
147 	if (inst->alg.ivsize != sizeof(u64))
148 		goto free_inst;
149 
150 	inst->alg.encrypt = seqiv_aead_encrypt;
151 	inst->alg.decrypt = seqiv_aead_decrypt;
152 
153 	inst->alg.init = aead_init_geniv;
154 	inst->alg.exit = aead_exit_geniv;
155 
156 	inst->alg.base.cra_ctxsize = sizeof(struct aead_geniv_ctx);
157 	inst->alg.base.cra_ctxsize += inst->alg.ivsize;
158 
159 	err = aead_register_instance(tmpl, inst);
160 	if (err) {
161 free_inst:
162 		inst->free(inst);
163 	}
164 	return err;
165 }
166 
167 static int seqiv_create(struct crypto_template *tmpl, struct rtattr **tb)
168 {
169 	struct crypto_attr_type *algt;
170 
171 	algt = crypto_get_attr_type(tb);
172 	if (IS_ERR(algt))
173 		return PTR_ERR(algt);
174 
175 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & CRYPTO_ALG_TYPE_MASK)
176 		return -EINVAL;
177 
178 	return seqiv_aead_create(tmpl, tb);
179 }
180 
181 static struct crypto_template seqiv_tmpl = {
182 	.name = "seqiv",
183 	.create = seqiv_create,
184 	.module = THIS_MODULE,
185 };
186 
187 static int __init seqiv_module_init(void)
188 {
189 	return crypto_register_template(&seqiv_tmpl);
190 }
191 
192 static void __exit seqiv_module_exit(void)
193 {
194 	crypto_unregister_template(&seqiv_tmpl);
195 }
196 
197 subsys_initcall(seqiv_module_init);
198 module_exit(seqiv_module_exit);
199 
200 MODULE_LICENSE("GPL");
201 MODULE_DESCRIPTION("Sequence Number IV Generator");
202 MODULE_ALIAS_CRYPTO("seqiv");
203