xref: /openbmc/linux/crypto/pcrypt.c (revision aad7ebb5)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * pcrypt - Parallel crypto wrapper.
4  *
5  * Copyright (C) 2009 secunet Security Networks AG
6  * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
7  */
8 
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/notifier.h>
17 #include <linux/kobject.h>
18 #include <linux/cpu.h>
19 #include <crypto/pcrypt.h>
20 
21 static struct padata_instance *pencrypt;
22 static struct padata_instance *pdecrypt;
23 static struct kset           *pcrypt_kset;
24 
25 struct pcrypt_instance_ctx {
26 	struct crypto_aead_spawn spawn;
27 	atomic_t tfm_count;
28 };
29 
30 struct pcrypt_aead_ctx {
31 	struct crypto_aead *child;
32 	unsigned int cb_cpu;
33 };
34 
35 static int pcrypt_aead_setkey(struct crypto_aead *parent,
36 			      const u8 *key, unsigned int keylen)
37 {
38 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
39 
40 	return crypto_aead_setkey(ctx->child, key, keylen);
41 }
42 
43 static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
44 				   unsigned int authsize)
45 {
46 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
47 
48 	return crypto_aead_setauthsize(ctx->child, authsize);
49 }
50 
51 static void pcrypt_aead_serial(struct padata_priv *padata)
52 {
53 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
54 	struct aead_request *req = pcrypt_request_ctx(preq);
55 
56 	aead_request_complete(req->base.data, padata->info);
57 }
58 
59 static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
60 {
61 	struct aead_request *req = areq->data;
62 	struct pcrypt_request *preq = aead_request_ctx(req);
63 	struct padata_priv *padata = pcrypt_request_padata(preq);
64 
65 	padata->info = err;
66 	req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
67 
68 	padata_do_serial(padata);
69 }
70 
71 static void pcrypt_aead_enc(struct padata_priv *padata)
72 {
73 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
74 	struct aead_request *req = pcrypt_request_ctx(preq);
75 
76 	padata->info = crypto_aead_encrypt(req);
77 
78 	if (padata->info == -EINPROGRESS)
79 		return;
80 
81 	padata_do_serial(padata);
82 }
83 
84 static int pcrypt_aead_encrypt(struct aead_request *req)
85 {
86 	int err;
87 	struct pcrypt_request *preq = aead_request_ctx(req);
88 	struct aead_request *creq = pcrypt_request_ctx(preq);
89 	struct padata_priv *padata = pcrypt_request_padata(preq);
90 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
91 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
92 	u32 flags = aead_request_flags(req);
93 
94 	memset(padata, 0, sizeof(struct padata_priv));
95 
96 	padata->parallel = pcrypt_aead_enc;
97 	padata->serial = pcrypt_aead_serial;
98 
99 	aead_request_set_tfm(creq, ctx->child);
100 	aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
101 				  pcrypt_aead_done, req);
102 	aead_request_set_crypt(creq, req->src, req->dst,
103 			       req->cryptlen, req->iv);
104 	aead_request_set_ad(creq, req->assoclen);
105 
106 	err = padata_do_parallel(pencrypt, padata, &ctx->cb_cpu);
107 	if (!err)
108 		return -EINPROGRESS;
109 
110 	return err;
111 }
112 
113 static void pcrypt_aead_dec(struct padata_priv *padata)
114 {
115 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
116 	struct aead_request *req = pcrypt_request_ctx(preq);
117 
118 	padata->info = crypto_aead_decrypt(req);
119 
120 	if (padata->info == -EINPROGRESS)
121 		return;
122 
123 	padata_do_serial(padata);
124 }
125 
126 static int pcrypt_aead_decrypt(struct aead_request *req)
127 {
128 	int err;
129 	struct pcrypt_request *preq = aead_request_ctx(req);
130 	struct aead_request *creq = pcrypt_request_ctx(preq);
131 	struct padata_priv *padata = pcrypt_request_padata(preq);
132 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
133 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
134 	u32 flags = aead_request_flags(req);
135 
136 	memset(padata, 0, sizeof(struct padata_priv));
137 
138 	padata->parallel = pcrypt_aead_dec;
139 	padata->serial = pcrypt_aead_serial;
140 
141 	aead_request_set_tfm(creq, ctx->child);
142 	aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
143 				  pcrypt_aead_done, req);
144 	aead_request_set_crypt(creq, req->src, req->dst,
145 			       req->cryptlen, req->iv);
146 	aead_request_set_ad(creq, req->assoclen);
147 
148 	err = padata_do_parallel(pdecrypt, padata, &ctx->cb_cpu);
149 	if (!err)
150 		return -EINPROGRESS;
151 
152 	return err;
153 }
154 
155 static int pcrypt_aead_init_tfm(struct crypto_aead *tfm)
156 {
157 	int cpu, cpu_index;
158 	struct aead_instance *inst = aead_alg_instance(tfm);
159 	struct pcrypt_instance_ctx *ictx = aead_instance_ctx(inst);
160 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
161 	struct crypto_aead *cipher;
162 
163 	cpu_index = (unsigned int)atomic_inc_return(&ictx->tfm_count) %
164 		    cpumask_weight(cpu_online_mask);
165 
166 	ctx->cb_cpu = cpumask_first(cpu_online_mask);
167 	for (cpu = 0; cpu < cpu_index; cpu++)
168 		ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
169 
170 	cipher = crypto_spawn_aead(&ictx->spawn);
171 
172 	if (IS_ERR(cipher))
173 		return PTR_ERR(cipher);
174 
175 	ctx->child = cipher;
176 	crypto_aead_set_reqsize(tfm, sizeof(struct pcrypt_request) +
177 				     sizeof(struct aead_request) +
178 				     crypto_aead_reqsize(cipher));
179 
180 	return 0;
181 }
182 
183 static void pcrypt_aead_exit_tfm(struct crypto_aead *tfm)
184 {
185 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
186 
187 	crypto_free_aead(ctx->child);
188 }
189 
190 static void pcrypt_free(struct aead_instance *inst)
191 {
192 	struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
193 
194 	crypto_drop_aead(&ctx->spawn);
195 	kfree(inst);
196 }
197 
198 static int pcrypt_init_instance(struct crypto_instance *inst,
199 				struct crypto_alg *alg)
200 {
201 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
202 		     "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
203 		return -ENAMETOOLONG;
204 
205 	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
206 
207 	inst->alg.cra_priority = alg->cra_priority + 100;
208 	inst->alg.cra_blocksize = alg->cra_blocksize;
209 	inst->alg.cra_alignmask = alg->cra_alignmask;
210 
211 	return 0;
212 }
213 
214 static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb,
215 			      u32 type, u32 mask)
216 {
217 	struct pcrypt_instance_ctx *ctx;
218 	struct crypto_attr_type *algt;
219 	struct aead_instance *inst;
220 	struct aead_alg *alg;
221 	const char *name;
222 	int err;
223 
224 	algt = crypto_get_attr_type(tb);
225 	if (IS_ERR(algt))
226 		return PTR_ERR(algt);
227 
228 	name = crypto_attr_alg_name(tb[1]);
229 	if (IS_ERR(name))
230 		return PTR_ERR(name);
231 
232 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
233 	if (!inst)
234 		return -ENOMEM;
235 
236 	ctx = aead_instance_ctx(inst);
237 	crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
238 
239 	err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
240 	if (err)
241 		goto out_free_inst;
242 
243 	alg = crypto_spawn_aead_alg(&ctx->spawn);
244 	err = pcrypt_init_instance(aead_crypto_instance(inst), &alg->base);
245 	if (err)
246 		goto out_drop_aead;
247 
248 	inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC;
249 
250 	inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
251 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
252 
253 	inst->alg.base.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
254 
255 	inst->alg.init = pcrypt_aead_init_tfm;
256 	inst->alg.exit = pcrypt_aead_exit_tfm;
257 
258 	inst->alg.setkey = pcrypt_aead_setkey;
259 	inst->alg.setauthsize = pcrypt_aead_setauthsize;
260 	inst->alg.encrypt = pcrypt_aead_encrypt;
261 	inst->alg.decrypt = pcrypt_aead_decrypt;
262 
263 	inst->free = pcrypt_free;
264 
265 	err = aead_register_instance(tmpl, inst);
266 	if (err)
267 		goto out_drop_aead;
268 
269 out:
270 	return err;
271 
272 out_drop_aead:
273 	crypto_drop_aead(&ctx->spawn);
274 out_free_inst:
275 	kfree(inst);
276 	goto out;
277 }
278 
279 static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb)
280 {
281 	struct crypto_attr_type *algt;
282 
283 	algt = crypto_get_attr_type(tb);
284 	if (IS_ERR(algt))
285 		return PTR_ERR(algt);
286 
287 	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
288 	case CRYPTO_ALG_TYPE_AEAD:
289 		return pcrypt_create_aead(tmpl, tb, algt->type, algt->mask);
290 	}
291 
292 	return -EINVAL;
293 }
294 
295 static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
296 {
297 	int ret;
298 
299 	pinst->kobj.kset = pcrypt_kset;
300 	ret = kobject_add(&pinst->kobj, NULL, "%s", name);
301 	if (!ret)
302 		kobject_uevent(&pinst->kobj, KOBJ_ADD);
303 
304 	return ret;
305 }
306 
307 static int pcrypt_init_padata(struct padata_instance **pinst, const char *name)
308 {
309 	int ret = -ENOMEM;
310 
311 	*pinst = padata_alloc_possible(name);
312 	if (!*pinst)
313 		return ret;
314 
315 	ret = pcrypt_sysfs_add(*pinst, name);
316 	if (ret)
317 		padata_free(*pinst);
318 
319 	return ret;
320 }
321 
322 static void pcrypt_fini_padata(struct padata_instance *pinst)
323 {
324 	padata_stop(pinst);
325 	padata_free(pinst);
326 }
327 
328 static struct crypto_template pcrypt_tmpl = {
329 	.name = "pcrypt",
330 	.create = pcrypt_create,
331 	.module = THIS_MODULE,
332 };
333 
334 static int __init pcrypt_init(void)
335 {
336 	int err = -ENOMEM;
337 
338 	pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
339 	if (!pcrypt_kset)
340 		goto err;
341 
342 	err = pcrypt_init_padata(&pencrypt, "pencrypt");
343 	if (err)
344 		goto err_unreg_kset;
345 
346 	err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
347 	if (err)
348 		goto err_deinit_pencrypt;
349 
350 	padata_start(pencrypt);
351 	padata_start(pdecrypt);
352 
353 	return crypto_register_template(&pcrypt_tmpl);
354 
355 err_deinit_pencrypt:
356 	pcrypt_fini_padata(pencrypt);
357 err_unreg_kset:
358 	kset_unregister(pcrypt_kset);
359 err:
360 	return err;
361 }
362 
363 static void __exit pcrypt_exit(void)
364 {
365 	pcrypt_fini_padata(pencrypt);
366 	pcrypt_fini_padata(pdecrypt);
367 
368 	kset_unregister(pcrypt_kset);
369 	crypto_unregister_template(&pcrypt_tmpl);
370 }
371 
372 subsys_initcall(pcrypt_init);
373 module_exit(pcrypt_exit);
374 
375 MODULE_LICENSE("GPL");
376 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
377 MODULE_DESCRIPTION("Parallel crypto wrapper");
378 MODULE_ALIAS_CRYPTO("pcrypt");
379