xref: /openbmc/linux/crypto/authencesn.c (revision 752beb5e)
1 /*
2  * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
3  *                 derived from authenc.c
4  *
5  * Copyright (C) 2010 secunet Security Networks AG
6  * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
7  * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * This program is free software; you can redistribute it and/or modify it
10  * under the terms of the GNU General Public License as published by the Free
11  * Software Foundation; either version 2 of the License, or (at your option)
12  * any later version.
13  *
14  */
15 
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/hash.h>
18 #include <crypto/internal/skcipher.h>
19 #include <crypto/authenc.h>
20 #include <crypto/null.h>
21 #include <crypto/scatterwalk.h>
22 #include <linux/err.h>
23 #include <linux/init.h>
24 #include <linux/kernel.h>
25 #include <linux/module.h>
26 #include <linux/rtnetlink.h>
27 #include <linux/slab.h>
28 #include <linux/spinlock.h>
29 
30 struct authenc_esn_instance_ctx {
31 	struct crypto_ahash_spawn auth;
32 	struct crypto_skcipher_spawn enc;
33 };
34 
35 struct crypto_authenc_esn_ctx {
36 	unsigned int reqoff;
37 	struct crypto_ahash *auth;
38 	struct crypto_skcipher *enc;
39 	struct crypto_sync_skcipher *null;
40 };
41 
42 struct authenc_esn_request_ctx {
43 	struct scatterlist src[2];
44 	struct scatterlist dst[2];
45 	char tail[];
46 };
47 
48 static void authenc_esn_request_complete(struct aead_request *req, int err)
49 {
50 	if (err != -EINPROGRESS)
51 		aead_request_complete(req, err);
52 }
53 
54 static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
55 					  unsigned int authsize)
56 {
57 	if (authsize > 0 && authsize < 4)
58 		return -EINVAL;
59 
60 	return 0;
61 }
62 
63 static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
64 				     unsigned int keylen)
65 {
66 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
67 	struct crypto_ahash *auth = ctx->auth;
68 	struct crypto_skcipher *enc = ctx->enc;
69 	struct crypto_authenc_keys keys;
70 	int err = -EINVAL;
71 
72 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
73 		goto badkey;
74 
75 	crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
76 	crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
77 				     CRYPTO_TFM_REQ_MASK);
78 	err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
79 	crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) &
80 					   CRYPTO_TFM_RES_MASK);
81 
82 	if (err)
83 		goto out;
84 
85 	crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
86 	crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
87 					 CRYPTO_TFM_REQ_MASK);
88 	err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
89 	crypto_aead_set_flags(authenc_esn, crypto_skcipher_get_flags(enc) &
90 					   CRYPTO_TFM_RES_MASK);
91 
92 out:
93 	memzero_explicit(&keys, sizeof(keys));
94 	return err;
95 
96 badkey:
97 	crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN);
98 	goto out;
99 }
100 
101 static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
102 					  unsigned int flags)
103 {
104 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
105 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
106 	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
107 	struct crypto_ahash *auth = ctx->auth;
108 	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
109 			     crypto_ahash_alignmask(auth) + 1);
110 	unsigned int authsize = crypto_aead_authsize(authenc_esn);
111 	unsigned int assoclen = req->assoclen;
112 	unsigned int cryptlen = req->cryptlen;
113 	struct scatterlist *dst = req->dst;
114 	u32 tmp[2];
115 
116 	/* Move high-order bits of sequence number back. */
117 	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
118 	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
119 	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
120 
121 	scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
122 	return 0;
123 }
124 
125 static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
126 					 int err)
127 {
128 	struct aead_request *req = areq->data;
129 
130 	err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
131 	aead_request_complete(req, err);
132 }
133 
134 static int crypto_authenc_esn_genicv(struct aead_request *req,
135 				     unsigned int flags)
136 {
137 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
138 	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
139 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
140 	struct crypto_ahash *auth = ctx->auth;
141 	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
142 			     crypto_ahash_alignmask(auth) + 1);
143 	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
144 	unsigned int authsize = crypto_aead_authsize(authenc_esn);
145 	unsigned int assoclen = req->assoclen;
146 	unsigned int cryptlen = req->cryptlen;
147 	struct scatterlist *dst = req->dst;
148 	u32 tmp[2];
149 
150 	if (!authsize)
151 		return 0;
152 
153 	/* Move high-order bits of sequence number to the end. */
154 	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
155 	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
156 	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
157 
158 	sg_init_table(areq_ctx->dst, 2);
159 	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
160 
161 	ahash_request_set_tfm(ahreq, auth);
162 	ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
163 	ahash_request_set_callback(ahreq, flags,
164 				   authenc_esn_geniv_ahash_done, req);
165 
166 	return crypto_ahash_digest(ahreq) ?:
167 	       crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
168 }
169 
170 
171 static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
172 					    int err)
173 {
174 	struct aead_request *areq = req->data;
175 
176 	if (!err)
177 		err = crypto_authenc_esn_genicv(areq, 0);
178 
179 	authenc_esn_request_complete(areq, err);
180 }
181 
182 static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
183 {
184 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
185 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
186 	SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
187 
188 	skcipher_request_set_sync_tfm(skreq, ctx->null);
189 	skcipher_request_set_callback(skreq, aead_request_flags(req),
190 				      NULL, NULL);
191 	skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
192 
193 	return crypto_skcipher_encrypt(skreq);
194 }
195 
196 static int crypto_authenc_esn_encrypt(struct aead_request *req)
197 {
198 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
199 	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
200 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
201 	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
202 						  ctx->reqoff);
203 	struct crypto_skcipher *enc = ctx->enc;
204 	unsigned int assoclen = req->assoclen;
205 	unsigned int cryptlen = req->cryptlen;
206 	struct scatterlist *src, *dst;
207 	int err;
208 
209 	sg_init_table(areq_ctx->src, 2);
210 	src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
211 	dst = src;
212 
213 	if (req->src != req->dst) {
214 		err = crypto_authenc_esn_copy(req, assoclen);
215 		if (err)
216 			return err;
217 
218 		sg_init_table(areq_ctx->dst, 2);
219 		dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
220 	}
221 
222 	skcipher_request_set_tfm(skreq, enc);
223 	skcipher_request_set_callback(skreq, aead_request_flags(req),
224 				      crypto_authenc_esn_encrypt_done, req);
225 	skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
226 
227 	err = crypto_skcipher_encrypt(skreq);
228 	if (err)
229 		return err;
230 
231 	return crypto_authenc_esn_genicv(req, aead_request_flags(req));
232 }
233 
234 static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
235 					   unsigned int flags)
236 {
237 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
238 	unsigned int authsize = crypto_aead_authsize(authenc_esn);
239 	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
240 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
241 	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
242 						  ctx->reqoff);
243 	struct crypto_ahash *auth = ctx->auth;
244 	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
245 			      crypto_ahash_alignmask(auth) + 1);
246 	unsigned int cryptlen = req->cryptlen - authsize;
247 	unsigned int assoclen = req->assoclen;
248 	struct scatterlist *dst = req->dst;
249 	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
250 	u32 tmp[2];
251 
252 	if (!authsize)
253 		goto decrypt;
254 
255 	/* Move high-order bits of sequence number back. */
256 	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
257 	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
258 	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
259 
260 	if (crypto_memneq(ihash, ohash, authsize))
261 		return -EBADMSG;
262 
263 decrypt:
264 
265 	sg_init_table(areq_ctx->dst, 2);
266 	dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
267 
268 	skcipher_request_set_tfm(skreq, ctx->enc);
269 	skcipher_request_set_callback(skreq, flags,
270 				      req->base.complete, req->base.data);
271 	skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
272 
273 	return crypto_skcipher_decrypt(skreq);
274 }
275 
276 static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
277 					  int err)
278 {
279 	struct aead_request *req = areq->data;
280 
281 	err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
282 	authenc_esn_request_complete(req, err);
283 }
284 
285 static int crypto_authenc_esn_decrypt(struct aead_request *req)
286 {
287 	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
288 	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
289 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
290 	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
291 	unsigned int authsize = crypto_aead_authsize(authenc_esn);
292 	struct crypto_ahash *auth = ctx->auth;
293 	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
294 			      crypto_ahash_alignmask(auth) + 1);
295 	unsigned int assoclen = req->assoclen;
296 	unsigned int cryptlen = req->cryptlen;
297 	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
298 	struct scatterlist *dst = req->dst;
299 	u32 tmp[2];
300 	int err;
301 
302 	cryptlen -= authsize;
303 
304 	if (req->src != dst) {
305 		err = crypto_authenc_esn_copy(req, assoclen + cryptlen);
306 		if (err)
307 			return err;
308 	}
309 
310 	scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
311 				 authsize, 0);
312 
313 	if (!authsize)
314 		goto tail;
315 
316 	/* Move high-order bits of sequence number to the end. */
317 	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
318 	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
319 	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
320 
321 	sg_init_table(areq_ctx->dst, 2);
322 	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
323 
324 	ahash_request_set_tfm(ahreq, auth);
325 	ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
326 	ahash_request_set_callback(ahreq, aead_request_flags(req),
327 				   authenc_esn_verify_ahash_done, req);
328 
329 	err = crypto_ahash_digest(ahreq);
330 	if (err)
331 		return err;
332 
333 tail:
334 	return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
335 }
336 
337 static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
338 {
339 	struct aead_instance *inst = aead_alg_instance(tfm);
340 	struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
341 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
342 	struct crypto_ahash *auth;
343 	struct crypto_skcipher *enc;
344 	struct crypto_sync_skcipher *null;
345 	int err;
346 
347 	auth = crypto_spawn_ahash(&ictx->auth);
348 	if (IS_ERR(auth))
349 		return PTR_ERR(auth);
350 
351 	enc = crypto_spawn_skcipher(&ictx->enc);
352 	err = PTR_ERR(enc);
353 	if (IS_ERR(enc))
354 		goto err_free_ahash;
355 
356 	null = crypto_get_default_null_skcipher();
357 	err = PTR_ERR(null);
358 	if (IS_ERR(null))
359 		goto err_free_skcipher;
360 
361 	ctx->auth = auth;
362 	ctx->enc = enc;
363 	ctx->null = null;
364 
365 	ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
366 			    crypto_ahash_alignmask(auth) + 1);
367 
368 	crypto_aead_set_reqsize(
369 		tfm,
370 		sizeof(struct authenc_esn_request_ctx) +
371 		ctx->reqoff +
372 		max_t(unsigned int,
373 		      crypto_ahash_reqsize(auth) +
374 		      sizeof(struct ahash_request),
375 		      sizeof(struct skcipher_request) +
376 		      crypto_skcipher_reqsize(enc)));
377 
378 	return 0;
379 
380 err_free_skcipher:
381 	crypto_free_skcipher(enc);
382 err_free_ahash:
383 	crypto_free_ahash(auth);
384 	return err;
385 }
386 
387 static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
388 {
389 	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
390 
391 	crypto_free_ahash(ctx->auth);
392 	crypto_free_skcipher(ctx->enc);
393 	crypto_put_default_null_skcipher();
394 }
395 
396 static void crypto_authenc_esn_free(struct aead_instance *inst)
397 {
398 	struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
399 
400 	crypto_drop_skcipher(&ctx->enc);
401 	crypto_drop_ahash(&ctx->auth);
402 	kfree(inst);
403 }
404 
405 static int crypto_authenc_esn_create(struct crypto_template *tmpl,
406 				     struct rtattr **tb)
407 {
408 	struct crypto_attr_type *algt;
409 	struct aead_instance *inst;
410 	struct hash_alg_common *auth;
411 	struct crypto_alg *auth_base;
412 	struct skcipher_alg *enc;
413 	struct authenc_esn_instance_ctx *ctx;
414 	const char *enc_name;
415 	int err;
416 
417 	algt = crypto_get_attr_type(tb);
418 	if (IS_ERR(algt))
419 		return PTR_ERR(algt);
420 
421 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
422 		return -EINVAL;
423 
424 	auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
425 			      CRYPTO_ALG_TYPE_AHASH_MASK |
426 			      crypto_requires_sync(algt->type, algt->mask));
427 	if (IS_ERR(auth))
428 		return PTR_ERR(auth);
429 
430 	auth_base = &auth->base;
431 
432 	enc_name = crypto_attr_alg_name(tb[2]);
433 	err = PTR_ERR(enc_name);
434 	if (IS_ERR(enc_name))
435 		goto out_put_auth;
436 
437 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
438 	err = -ENOMEM;
439 	if (!inst)
440 		goto out_put_auth;
441 
442 	ctx = aead_instance_ctx(inst);
443 
444 	err = crypto_init_ahash_spawn(&ctx->auth, auth,
445 				      aead_crypto_instance(inst));
446 	if (err)
447 		goto err_free_inst;
448 
449 	crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
450 	err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
451 				   crypto_requires_sync(algt->type,
452 							algt->mask));
453 	if (err)
454 		goto err_drop_auth;
455 
456 	enc = crypto_spawn_skcipher_alg(&ctx->enc);
457 
458 	err = -ENAMETOOLONG;
459 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
460 		     "authencesn(%s,%s)", auth_base->cra_name,
461 		     enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
462 		goto err_drop_enc;
463 
464 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
465 		     "authencesn(%s,%s)", auth_base->cra_driver_name,
466 		     enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
467 		goto err_drop_enc;
468 
469 	inst->alg.base.cra_flags = (auth_base->cra_flags |
470 				    enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
471 	inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
472 				      auth_base->cra_priority;
473 	inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
474 	inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
475 				       enc->base.cra_alignmask;
476 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
477 
478 	inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
479 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
480 	inst->alg.maxauthsize = auth->digestsize;
481 
482 	inst->alg.init = crypto_authenc_esn_init_tfm;
483 	inst->alg.exit = crypto_authenc_esn_exit_tfm;
484 
485 	inst->alg.setkey = crypto_authenc_esn_setkey;
486 	inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
487 	inst->alg.encrypt = crypto_authenc_esn_encrypt;
488 	inst->alg.decrypt = crypto_authenc_esn_decrypt;
489 
490 	inst->free = crypto_authenc_esn_free,
491 
492 	err = aead_register_instance(tmpl, inst);
493 	if (err)
494 		goto err_drop_enc;
495 
496 out:
497 	crypto_mod_put(auth_base);
498 	return err;
499 
500 err_drop_enc:
501 	crypto_drop_skcipher(&ctx->enc);
502 err_drop_auth:
503 	crypto_drop_ahash(&ctx->auth);
504 err_free_inst:
505 	kfree(inst);
506 out_put_auth:
507 	goto out;
508 }
509 
510 static struct crypto_template crypto_authenc_esn_tmpl = {
511 	.name = "authencesn",
512 	.create = crypto_authenc_esn_create,
513 	.module = THIS_MODULE,
514 };
515 
516 static int __init crypto_authenc_esn_module_init(void)
517 {
518 	return crypto_register_template(&crypto_authenc_esn_tmpl);
519 }
520 
521 static void __exit crypto_authenc_esn_module_exit(void)
522 {
523 	crypto_unregister_template(&crypto_authenc_esn_tmpl);
524 }
525 
526 subsys_initcall(crypto_authenc_esn_module_init);
527 module_exit(crypto_authenc_esn_module_exit);
528 
529 MODULE_LICENSE("GPL");
530 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
531 MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
532 MODULE_ALIAS_CRYPTO("authencesn");
533