xref: /openbmc/linux/crypto/ccm.c (revision 174cd4b1)
1 /*
2  * CCM: Counter with CBC-MAC
3  *
4  * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/internal/aead.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/slab.h>
22 
23 #include "internal.h"
24 
25 struct ccm_instance_ctx {
26 	struct crypto_skcipher_spawn ctr;
27 	struct crypto_ahash_spawn mac;
28 };
29 
30 struct crypto_ccm_ctx {
31 	struct crypto_ahash *mac;
32 	struct crypto_skcipher *ctr;
33 };
34 
35 struct crypto_rfc4309_ctx {
36 	struct crypto_aead *child;
37 	u8 nonce[3];
38 };
39 
40 struct crypto_rfc4309_req_ctx {
41 	struct scatterlist src[3];
42 	struct scatterlist dst[3];
43 	struct aead_request subreq;
44 };
45 
46 struct crypto_ccm_req_priv_ctx {
47 	u8 odata[16];
48 	u8 auth_tag[16];
49 	u32 flags;
50 	struct scatterlist src[3];
51 	struct scatterlist dst[3];
52 	struct skcipher_request skreq;
53 };
54 
55 struct cbcmac_tfm_ctx {
56 	struct crypto_cipher *child;
57 };
58 
59 struct cbcmac_desc_ctx {
60 	unsigned int len;
61 };
62 
63 static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
64 	struct aead_request *req)
65 {
66 	unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
67 
68 	return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
69 }
70 
71 static int set_msg_len(u8 *block, unsigned int msglen, int csize)
72 {
73 	__be32 data;
74 
75 	memset(block, 0, csize);
76 	block += csize;
77 
78 	if (csize >= 4)
79 		csize = 4;
80 	else if (msglen > (1 << (8 * csize)))
81 		return -EOVERFLOW;
82 
83 	data = cpu_to_be32(msglen);
84 	memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
85 
86 	return 0;
87 }
88 
89 static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
90 			     unsigned int keylen)
91 {
92 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
93 	struct crypto_skcipher *ctr = ctx->ctr;
94 	struct crypto_ahash *mac = ctx->mac;
95 	int err = 0;
96 
97 	crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
98 	crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
99 				       CRYPTO_TFM_REQ_MASK);
100 	err = crypto_skcipher_setkey(ctr, key, keylen);
101 	crypto_aead_set_flags(aead, crypto_skcipher_get_flags(ctr) &
102 			      CRYPTO_TFM_RES_MASK);
103 	if (err)
104 		goto out;
105 
106 	crypto_ahash_clear_flags(mac, CRYPTO_TFM_REQ_MASK);
107 	crypto_ahash_set_flags(mac, crypto_aead_get_flags(aead) &
108 				    CRYPTO_TFM_REQ_MASK);
109 	err = crypto_ahash_setkey(mac, key, keylen);
110 	crypto_aead_set_flags(aead, crypto_ahash_get_flags(mac) &
111 			      CRYPTO_TFM_RES_MASK);
112 
113 out:
114 	return err;
115 }
116 
117 static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
118 				  unsigned int authsize)
119 {
120 	switch (authsize) {
121 	case 4:
122 	case 6:
123 	case 8:
124 	case 10:
125 	case 12:
126 	case 14:
127 	case 16:
128 		break;
129 	default:
130 		return -EINVAL;
131 	}
132 
133 	return 0;
134 }
135 
136 static int format_input(u8 *info, struct aead_request *req,
137 			unsigned int cryptlen)
138 {
139 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
140 	unsigned int lp = req->iv[0];
141 	unsigned int l = lp + 1;
142 	unsigned int m;
143 
144 	m = crypto_aead_authsize(aead);
145 
146 	memcpy(info, req->iv, 16);
147 
148 	/* format control info per RFC 3610 and
149 	 * NIST Special Publication 800-38C
150 	 */
151 	*info |= (8 * ((m - 2) / 2));
152 	if (req->assoclen)
153 		*info |= 64;
154 
155 	return set_msg_len(info + 16 - l, cryptlen, l);
156 }
157 
158 static int format_adata(u8 *adata, unsigned int a)
159 {
160 	int len = 0;
161 
162 	/* add control info for associated data
163 	 * RFC 3610 and NIST Special Publication 800-38C
164 	 */
165 	if (a < 65280) {
166 		*(__be16 *)adata = cpu_to_be16(a);
167 		len = 2;
168 	} else  {
169 		*(__be16 *)adata = cpu_to_be16(0xfffe);
170 		*(__be32 *)&adata[2] = cpu_to_be32(a);
171 		len = 6;
172 	}
173 
174 	return len;
175 }
176 
177 static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
178 			   unsigned int cryptlen)
179 {
180 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
181 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
182 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
183 	AHASH_REQUEST_ON_STACK(ahreq, ctx->mac);
184 	unsigned int assoclen = req->assoclen;
185 	struct scatterlist sg[3];
186 	u8 odata[16];
187 	u8 idata[16];
188 	int ilen, err;
189 
190 	/* format control data for input */
191 	err = format_input(odata, req, cryptlen);
192 	if (err)
193 		goto out;
194 
195 	sg_init_table(sg, 3);
196 	sg_set_buf(&sg[0], odata, 16);
197 
198 	/* format associated data and compute into mac */
199 	if (assoclen) {
200 		ilen = format_adata(idata, assoclen);
201 		sg_set_buf(&sg[1], idata, ilen);
202 		sg_chain(sg, 3, req->src);
203 	} else {
204 		ilen = 0;
205 		sg_chain(sg, 2, req->src);
206 	}
207 
208 	ahash_request_set_tfm(ahreq, ctx->mac);
209 	ahash_request_set_callback(ahreq, pctx->flags, NULL, NULL);
210 	ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
211 	err = crypto_ahash_init(ahreq);
212 	if (err)
213 		goto out;
214 	err = crypto_ahash_update(ahreq);
215 	if (err)
216 		goto out;
217 
218 	/* we need to pad the MAC input to a round multiple of the block size */
219 	ilen = 16 - (assoclen + ilen) % 16;
220 	if (ilen < 16) {
221 		memset(idata, 0, ilen);
222 		sg_init_table(sg, 2);
223 		sg_set_buf(&sg[0], idata, ilen);
224 		if (plain)
225 			sg_chain(sg, 2, plain);
226 		plain = sg;
227 		cryptlen += ilen;
228 	}
229 
230 	ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen);
231 	err = crypto_ahash_finup(ahreq);
232 out:
233 	return err;
234 }
235 
236 static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
237 {
238 	struct aead_request *req = areq->data;
239 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
240 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
241 	u8 *odata = pctx->odata;
242 
243 	if (!err)
244 		scatterwalk_map_and_copy(odata, req->dst,
245 					 req->assoclen + req->cryptlen,
246 					 crypto_aead_authsize(aead), 1);
247 	aead_request_complete(req, err);
248 }
249 
250 static inline int crypto_ccm_check_iv(const u8 *iv)
251 {
252 	/* 2 <= L <= 8, so 1 <= L' <= 7. */
253 	if (1 > iv[0] || iv[0] > 7)
254 		return -EINVAL;
255 
256 	return 0;
257 }
258 
259 static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
260 {
261 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
262 	struct scatterlist *sg;
263 	u8 *iv = req->iv;
264 	int err;
265 
266 	err = crypto_ccm_check_iv(iv);
267 	if (err)
268 		return err;
269 
270 	pctx->flags = aead_request_flags(req);
271 
272 	 /* Note: rfc 3610 and NIST 800-38C require counter of
273 	 * zero to encrypt auth tag.
274 	 */
275 	memset(iv + 15 - iv[0], 0, iv[0] + 1);
276 
277 	sg_init_table(pctx->src, 3);
278 	sg_set_buf(pctx->src, tag, 16);
279 	sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
280 	if (sg != pctx->src + 1)
281 		sg_chain(pctx->src, 2, sg);
282 
283 	if (req->src != req->dst) {
284 		sg_init_table(pctx->dst, 3);
285 		sg_set_buf(pctx->dst, tag, 16);
286 		sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
287 		if (sg != pctx->dst + 1)
288 			sg_chain(pctx->dst, 2, sg);
289 	}
290 
291 	return 0;
292 }
293 
294 static int crypto_ccm_encrypt(struct aead_request *req)
295 {
296 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
297 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
298 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
299 	struct skcipher_request *skreq = &pctx->skreq;
300 	struct scatterlist *dst;
301 	unsigned int cryptlen = req->cryptlen;
302 	u8 *odata = pctx->odata;
303 	u8 *iv = req->iv;
304 	int err;
305 
306 	err = crypto_ccm_init_crypt(req, odata);
307 	if (err)
308 		return err;
309 
310 	err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
311 	if (err)
312 		return err;
313 
314 	dst = pctx->src;
315 	if (req->src != req->dst)
316 		dst = pctx->dst;
317 
318 	skcipher_request_set_tfm(skreq, ctx->ctr);
319 	skcipher_request_set_callback(skreq, pctx->flags,
320 				      crypto_ccm_encrypt_done, req);
321 	skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
322 	err = crypto_skcipher_encrypt(skreq);
323 	if (err)
324 		return err;
325 
326 	/* copy authtag to end of dst */
327 	scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
328 				 crypto_aead_authsize(aead), 1);
329 	return err;
330 }
331 
332 static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
333 				   int err)
334 {
335 	struct aead_request *req = areq->data;
336 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
337 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
338 	unsigned int authsize = crypto_aead_authsize(aead);
339 	unsigned int cryptlen = req->cryptlen - authsize;
340 	struct scatterlist *dst;
341 
342 	pctx->flags = 0;
343 
344 	dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
345 
346 	if (!err) {
347 		err = crypto_ccm_auth(req, dst, cryptlen);
348 		if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
349 			err = -EBADMSG;
350 	}
351 	aead_request_complete(req, err);
352 }
353 
354 static int crypto_ccm_decrypt(struct aead_request *req)
355 {
356 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
357 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
358 	struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
359 	struct skcipher_request *skreq = &pctx->skreq;
360 	struct scatterlist *dst;
361 	unsigned int authsize = crypto_aead_authsize(aead);
362 	unsigned int cryptlen = req->cryptlen;
363 	u8 *authtag = pctx->auth_tag;
364 	u8 *odata = pctx->odata;
365 	u8 *iv = req->iv;
366 	int err;
367 
368 	cryptlen -= authsize;
369 
370 	err = crypto_ccm_init_crypt(req, authtag);
371 	if (err)
372 		return err;
373 
374 	scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
375 				 authsize, 0);
376 
377 	dst = pctx->src;
378 	if (req->src != req->dst)
379 		dst = pctx->dst;
380 
381 	skcipher_request_set_tfm(skreq, ctx->ctr);
382 	skcipher_request_set_callback(skreq, pctx->flags,
383 				      crypto_ccm_decrypt_done, req);
384 	skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv);
385 	err = crypto_skcipher_decrypt(skreq);
386 	if (err)
387 		return err;
388 
389 	err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
390 	if (err)
391 		return err;
392 
393 	/* verify */
394 	if (crypto_memneq(authtag, odata, authsize))
395 		return -EBADMSG;
396 
397 	return err;
398 }
399 
400 static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
401 {
402 	struct aead_instance *inst = aead_alg_instance(tfm);
403 	struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
404 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
405 	struct crypto_ahash *mac;
406 	struct crypto_skcipher *ctr;
407 	unsigned long align;
408 	int err;
409 
410 	mac = crypto_spawn_ahash(&ictx->mac);
411 	if (IS_ERR(mac))
412 		return PTR_ERR(mac);
413 
414 	ctr = crypto_spawn_skcipher(&ictx->ctr);
415 	err = PTR_ERR(ctr);
416 	if (IS_ERR(ctr))
417 		goto err_free_mac;
418 
419 	ctx->mac = mac;
420 	ctx->ctr = ctr;
421 
422 	align = crypto_aead_alignmask(tfm);
423 	align &= ~(crypto_tfm_ctx_alignment() - 1);
424 	crypto_aead_set_reqsize(
425 		tfm,
426 		align + sizeof(struct crypto_ccm_req_priv_ctx) +
427 		crypto_skcipher_reqsize(ctr));
428 
429 	return 0;
430 
431 err_free_mac:
432 	crypto_free_ahash(mac);
433 	return err;
434 }
435 
436 static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
437 {
438 	struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
439 
440 	crypto_free_ahash(ctx->mac);
441 	crypto_free_skcipher(ctx->ctr);
442 }
443 
444 static void crypto_ccm_free(struct aead_instance *inst)
445 {
446 	struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
447 
448 	crypto_drop_ahash(&ctx->mac);
449 	crypto_drop_skcipher(&ctx->ctr);
450 	kfree(inst);
451 }
452 
453 static int crypto_ccm_create_common(struct crypto_template *tmpl,
454 				    struct rtattr **tb,
455 				    const char *full_name,
456 				    const char *ctr_name,
457 				    const char *mac_name)
458 {
459 	struct crypto_attr_type *algt;
460 	struct aead_instance *inst;
461 	struct skcipher_alg *ctr;
462 	struct crypto_alg *mac_alg;
463 	struct hash_alg_common *mac;
464 	struct ccm_instance_ctx *ictx;
465 	int err;
466 
467 	algt = crypto_get_attr_type(tb);
468 	if (IS_ERR(algt))
469 		return PTR_ERR(algt);
470 
471 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
472 		return -EINVAL;
473 
474 	mac_alg = crypto_find_alg(mac_name, &crypto_ahash_type,
475 				  CRYPTO_ALG_TYPE_HASH,
476 				  CRYPTO_ALG_TYPE_AHASH_MASK |
477 				  CRYPTO_ALG_ASYNC);
478 	if (IS_ERR(mac_alg))
479 		return PTR_ERR(mac_alg);
480 
481 	mac = __crypto_hash_alg_common(mac_alg);
482 	err = -EINVAL;
483 	if (mac->digestsize != 16)
484 		goto out_put_mac;
485 
486 	inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
487 	err = -ENOMEM;
488 	if (!inst)
489 		goto out_put_mac;
490 
491 	ictx = aead_instance_ctx(inst);
492 	err = crypto_init_ahash_spawn(&ictx->mac, mac,
493 				      aead_crypto_instance(inst));
494 	if (err)
495 		goto err_free_inst;
496 
497 	crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
498 	err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
499 				   crypto_requires_sync(algt->type,
500 							algt->mask));
501 	if (err)
502 		goto err_drop_mac;
503 
504 	ctr = crypto_spawn_skcipher_alg(&ictx->ctr);
505 
506 	/* Not a stream cipher? */
507 	err = -EINVAL;
508 	if (ctr->base.cra_blocksize != 1)
509 		goto err_drop_ctr;
510 
511 	/* We want the real thing! */
512 	if (crypto_skcipher_alg_ivsize(ctr) != 16)
513 		goto err_drop_ctr;
514 
515 	err = -ENAMETOOLONG;
516 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
517 		     "ccm_base(%s,%s)", ctr->base.cra_driver_name,
518 		     mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
519 		goto err_drop_ctr;
520 
521 	memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
522 
523 	inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC;
524 	inst->alg.base.cra_priority = (mac->base.cra_priority +
525 				       ctr->base.cra_priority) / 2;
526 	inst->alg.base.cra_blocksize = 1;
527 	inst->alg.base.cra_alignmask = mac->base.cra_alignmask |
528 				       ctr->base.cra_alignmask;
529 	inst->alg.ivsize = 16;
530 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
531 	inst->alg.maxauthsize = 16;
532 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
533 	inst->alg.init = crypto_ccm_init_tfm;
534 	inst->alg.exit = crypto_ccm_exit_tfm;
535 	inst->alg.setkey = crypto_ccm_setkey;
536 	inst->alg.setauthsize = crypto_ccm_setauthsize;
537 	inst->alg.encrypt = crypto_ccm_encrypt;
538 	inst->alg.decrypt = crypto_ccm_decrypt;
539 
540 	inst->free = crypto_ccm_free;
541 
542 	err = aead_register_instance(tmpl, inst);
543 	if (err)
544 		goto err_drop_ctr;
545 
546 out_put_mac:
547 	crypto_mod_put(mac_alg);
548 	return err;
549 
550 err_drop_ctr:
551 	crypto_drop_skcipher(&ictx->ctr);
552 err_drop_mac:
553 	crypto_drop_ahash(&ictx->mac);
554 err_free_inst:
555 	kfree(inst);
556 	goto out_put_mac;
557 }
558 
559 static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
560 {
561 	const char *cipher_name;
562 	char ctr_name[CRYPTO_MAX_ALG_NAME];
563 	char mac_name[CRYPTO_MAX_ALG_NAME];
564 	char full_name[CRYPTO_MAX_ALG_NAME];
565 
566 	cipher_name = crypto_attr_alg_name(tb[1]);
567 	if (IS_ERR(cipher_name))
568 		return PTR_ERR(cipher_name);
569 
570 	if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
571 		     cipher_name) >= CRYPTO_MAX_ALG_NAME)
572 		return -ENAMETOOLONG;
573 
574 	if (snprintf(mac_name, CRYPTO_MAX_ALG_NAME, "cbcmac(%s)",
575 		     cipher_name) >= CRYPTO_MAX_ALG_NAME)
576 		return -ENAMETOOLONG;
577 
578 	if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
579 	    CRYPTO_MAX_ALG_NAME)
580 		return -ENAMETOOLONG;
581 
582 	return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
583 					mac_name);
584 }
585 
586 static struct crypto_template crypto_ccm_tmpl = {
587 	.name = "ccm",
588 	.create = crypto_ccm_create,
589 	.module = THIS_MODULE,
590 };
591 
592 static int crypto_ccm_base_create(struct crypto_template *tmpl,
593 				  struct rtattr **tb)
594 {
595 	const char *ctr_name;
596 	const char *cipher_name;
597 	char full_name[CRYPTO_MAX_ALG_NAME];
598 
599 	ctr_name = crypto_attr_alg_name(tb[1]);
600 	if (IS_ERR(ctr_name))
601 		return PTR_ERR(ctr_name);
602 
603 	cipher_name = crypto_attr_alg_name(tb[2]);
604 	if (IS_ERR(cipher_name))
605 		return PTR_ERR(cipher_name);
606 
607 	if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
608 		     ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
609 		return -ENAMETOOLONG;
610 
611 	return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
612 					cipher_name);
613 }
614 
615 static struct crypto_template crypto_ccm_base_tmpl = {
616 	.name = "ccm_base",
617 	.create = crypto_ccm_base_create,
618 	.module = THIS_MODULE,
619 };
620 
621 static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
622 				 unsigned int keylen)
623 {
624 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
625 	struct crypto_aead *child = ctx->child;
626 	int err;
627 
628 	if (keylen < 3)
629 		return -EINVAL;
630 
631 	keylen -= 3;
632 	memcpy(ctx->nonce, key + keylen, 3);
633 
634 	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
635 	crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
636 				     CRYPTO_TFM_REQ_MASK);
637 	err = crypto_aead_setkey(child, key, keylen);
638 	crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
639 				      CRYPTO_TFM_RES_MASK);
640 
641 	return err;
642 }
643 
644 static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
645 				      unsigned int authsize)
646 {
647 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
648 
649 	switch (authsize) {
650 	case 8:
651 	case 12:
652 	case 16:
653 		break;
654 	default:
655 		return -EINVAL;
656 	}
657 
658 	return crypto_aead_setauthsize(ctx->child, authsize);
659 }
660 
661 static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
662 {
663 	struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
664 	struct aead_request *subreq = &rctx->subreq;
665 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
666 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
667 	struct crypto_aead *child = ctx->child;
668 	struct scatterlist *sg;
669 	u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
670 			   crypto_aead_alignmask(child) + 1);
671 
672 	/* L' */
673 	iv[0] = 3;
674 
675 	memcpy(iv + 1, ctx->nonce, 3);
676 	memcpy(iv + 4, req->iv, 8);
677 
678 	scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
679 
680 	sg_init_table(rctx->src, 3);
681 	sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
682 	sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
683 	if (sg != rctx->src + 1)
684 		sg_chain(rctx->src, 2, sg);
685 
686 	if (req->src != req->dst) {
687 		sg_init_table(rctx->dst, 3);
688 		sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
689 		sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
690 		if (sg != rctx->dst + 1)
691 			sg_chain(rctx->dst, 2, sg);
692 	}
693 
694 	aead_request_set_tfm(subreq, child);
695 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
696 				  req->base.data);
697 	aead_request_set_crypt(subreq, rctx->src,
698 			       req->src == req->dst ? rctx->src : rctx->dst,
699 			       req->cryptlen, iv);
700 	aead_request_set_ad(subreq, req->assoclen - 8);
701 
702 	return subreq;
703 }
704 
705 static int crypto_rfc4309_encrypt(struct aead_request *req)
706 {
707 	if (req->assoclen != 16 && req->assoclen != 20)
708 		return -EINVAL;
709 
710 	req = crypto_rfc4309_crypt(req);
711 
712 	return crypto_aead_encrypt(req);
713 }
714 
715 static int crypto_rfc4309_decrypt(struct aead_request *req)
716 {
717 	if (req->assoclen != 16 && req->assoclen != 20)
718 		return -EINVAL;
719 
720 	req = crypto_rfc4309_crypt(req);
721 
722 	return crypto_aead_decrypt(req);
723 }
724 
725 static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
726 {
727 	struct aead_instance *inst = aead_alg_instance(tfm);
728 	struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
729 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
730 	struct crypto_aead *aead;
731 	unsigned long align;
732 
733 	aead = crypto_spawn_aead(spawn);
734 	if (IS_ERR(aead))
735 		return PTR_ERR(aead);
736 
737 	ctx->child = aead;
738 
739 	align = crypto_aead_alignmask(aead);
740 	align &= ~(crypto_tfm_ctx_alignment() - 1);
741 	crypto_aead_set_reqsize(
742 		tfm,
743 		sizeof(struct crypto_rfc4309_req_ctx) +
744 		ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
745 		align + 32);
746 
747 	return 0;
748 }
749 
750 static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
751 {
752 	struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
753 
754 	crypto_free_aead(ctx->child);
755 }
756 
757 static void crypto_rfc4309_free(struct aead_instance *inst)
758 {
759 	crypto_drop_aead(aead_instance_ctx(inst));
760 	kfree(inst);
761 }
762 
763 static int crypto_rfc4309_create(struct crypto_template *tmpl,
764 				 struct rtattr **tb)
765 {
766 	struct crypto_attr_type *algt;
767 	struct aead_instance *inst;
768 	struct crypto_aead_spawn *spawn;
769 	struct aead_alg *alg;
770 	const char *ccm_name;
771 	int err;
772 
773 	algt = crypto_get_attr_type(tb);
774 	if (IS_ERR(algt))
775 		return PTR_ERR(algt);
776 
777 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
778 		return -EINVAL;
779 
780 	ccm_name = crypto_attr_alg_name(tb[1]);
781 	if (IS_ERR(ccm_name))
782 		return PTR_ERR(ccm_name);
783 
784 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
785 	if (!inst)
786 		return -ENOMEM;
787 
788 	spawn = aead_instance_ctx(inst);
789 	crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
790 	err = crypto_grab_aead(spawn, ccm_name, 0,
791 			       crypto_requires_sync(algt->type, algt->mask));
792 	if (err)
793 		goto out_free_inst;
794 
795 	alg = crypto_spawn_aead_alg(spawn);
796 
797 	err = -EINVAL;
798 
799 	/* We only support 16-byte blocks. */
800 	if (crypto_aead_alg_ivsize(alg) != 16)
801 		goto out_drop_alg;
802 
803 	/* Not a stream cipher? */
804 	if (alg->base.cra_blocksize != 1)
805 		goto out_drop_alg;
806 
807 	err = -ENAMETOOLONG;
808 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
809 		     "rfc4309(%s)", alg->base.cra_name) >=
810 	    CRYPTO_MAX_ALG_NAME ||
811 	    snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
812 		     "rfc4309(%s)", alg->base.cra_driver_name) >=
813 	    CRYPTO_MAX_ALG_NAME)
814 		goto out_drop_alg;
815 
816 	inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
817 	inst->alg.base.cra_priority = alg->base.cra_priority;
818 	inst->alg.base.cra_blocksize = 1;
819 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
820 
821 	inst->alg.ivsize = 8;
822 	inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
823 	inst->alg.maxauthsize = 16;
824 
825 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
826 
827 	inst->alg.init = crypto_rfc4309_init_tfm;
828 	inst->alg.exit = crypto_rfc4309_exit_tfm;
829 
830 	inst->alg.setkey = crypto_rfc4309_setkey;
831 	inst->alg.setauthsize = crypto_rfc4309_setauthsize;
832 	inst->alg.encrypt = crypto_rfc4309_encrypt;
833 	inst->alg.decrypt = crypto_rfc4309_decrypt;
834 
835 	inst->free = crypto_rfc4309_free;
836 
837 	err = aead_register_instance(tmpl, inst);
838 	if (err)
839 		goto out_drop_alg;
840 
841 out:
842 	return err;
843 
844 out_drop_alg:
845 	crypto_drop_aead(spawn);
846 out_free_inst:
847 	kfree(inst);
848 	goto out;
849 }
850 
851 static struct crypto_template crypto_rfc4309_tmpl = {
852 	.name = "rfc4309",
853 	.create = crypto_rfc4309_create,
854 	.module = THIS_MODULE,
855 };
856 
857 static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
858 				     const u8 *inkey, unsigned int keylen)
859 {
860 	struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(parent);
861 
862 	return crypto_cipher_setkey(ctx->child, inkey, keylen);
863 }
864 
865 static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
866 {
867 	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
868 	int bs = crypto_shash_digestsize(pdesc->tfm);
869 	u8 *dg = (u8 *)ctx + crypto_shash_descsize(pdesc->tfm) - bs;
870 
871 	ctx->len = 0;
872 	memset(dg, 0, bs);
873 
874 	return 0;
875 }
876 
877 static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
878 				       unsigned int len)
879 {
880 	struct crypto_shash *parent = pdesc->tfm;
881 	struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
882 	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
883 	struct crypto_cipher *tfm = tctx->child;
884 	int bs = crypto_shash_digestsize(parent);
885 	u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
886 
887 	while (len > 0) {
888 		unsigned int l = min(len, bs - ctx->len);
889 
890 		crypto_xor(dg + ctx->len, p, l);
891 		ctx->len +=l;
892 		len -= l;
893 		p += l;
894 
895 		if (ctx->len == bs) {
896 			crypto_cipher_encrypt_one(tfm, dg, dg);
897 			ctx->len = 0;
898 		}
899 	}
900 
901 	return 0;
902 }
903 
904 static int crypto_cbcmac_digest_final(struct shash_desc *pdesc, u8 *out)
905 {
906 	struct crypto_shash *parent = pdesc->tfm;
907 	struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(parent);
908 	struct cbcmac_desc_ctx *ctx = shash_desc_ctx(pdesc);
909 	struct crypto_cipher *tfm = tctx->child;
910 	int bs = crypto_shash_digestsize(parent);
911 	u8 *dg = (u8 *)ctx + crypto_shash_descsize(parent) - bs;
912 
913 	if (ctx->len)
914 		crypto_cipher_encrypt_one(tfm, dg, dg);
915 
916 	memcpy(out, dg, bs);
917 	return 0;
918 }
919 
920 static int cbcmac_init_tfm(struct crypto_tfm *tfm)
921 {
922 	struct crypto_cipher *cipher;
923 	struct crypto_instance *inst = (void *)tfm->__crt_alg;
924 	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
925 	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
926 
927 	cipher = crypto_spawn_cipher(spawn);
928 	if (IS_ERR(cipher))
929 		return PTR_ERR(cipher);
930 
931 	ctx->child = cipher;
932 
933 	return 0;
934 };
935 
936 static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
937 {
938 	struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
939 	crypto_free_cipher(ctx->child);
940 }
941 
942 static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
943 {
944 	struct shash_instance *inst;
945 	struct crypto_alg *alg;
946 	int err;
947 
948 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH);
949 	if (err)
950 		return err;
951 
952 	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
953 				  CRYPTO_ALG_TYPE_MASK);
954 	if (IS_ERR(alg))
955 		return PTR_ERR(alg);
956 
957 	inst = shash_alloc_instance("cbcmac", alg);
958 	err = PTR_ERR(inst);
959 	if (IS_ERR(inst))
960 		goto out_put_alg;
961 
962 	err = crypto_init_spawn(shash_instance_ctx(inst), alg,
963 				shash_crypto_instance(inst),
964 				CRYPTO_ALG_TYPE_MASK);
965 	if (err)
966 		goto out_free_inst;
967 
968 	inst->alg.base.cra_priority = alg->cra_priority;
969 	inst->alg.base.cra_blocksize = 1;
970 
971 	inst->alg.digestsize = alg->cra_blocksize;
972 	inst->alg.descsize = ALIGN(sizeof(struct cbcmac_desc_ctx),
973 				   alg->cra_alignmask + 1) +
974 			     alg->cra_blocksize;
975 
976 	inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
977 	inst->alg.base.cra_init = cbcmac_init_tfm;
978 	inst->alg.base.cra_exit = cbcmac_exit_tfm;
979 
980 	inst->alg.init = crypto_cbcmac_digest_init;
981 	inst->alg.update = crypto_cbcmac_digest_update;
982 	inst->alg.final = crypto_cbcmac_digest_final;
983 	inst->alg.setkey = crypto_cbcmac_digest_setkey;
984 
985 	err = shash_register_instance(tmpl, inst);
986 
987 out_free_inst:
988 	if (err)
989 		shash_free_instance(shash_crypto_instance(inst));
990 
991 out_put_alg:
992 	crypto_mod_put(alg);
993 	return err;
994 }
995 
996 static struct crypto_template crypto_cbcmac_tmpl = {
997 	.name = "cbcmac",
998 	.create = cbcmac_create,
999 	.free = shash_free_instance,
1000 	.module = THIS_MODULE,
1001 };
1002 
1003 static int __init crypto_ccm_module_init(void)
1004 {
1005 	int err;
1006 
1007 	err = crypto_register_template(&crypto_cbcmac_tmpl);
1008 	if (err)
1009 		goto out;
1010 
1011 	err = crypto_register_template(&crypto_ccm_base_tmpl);
1012 	if (err)
1013 		goto out_undo_cbcmac;
1014 
1015 	err = crypto_register_template(&crypto_ccm_tmpl);
1016 	if (err)
1017 		goto out_undo_base;
1018 
1019 	err = crypto_register_template(&crypto_rfc4309_tmpl);
1020 	if (err)
1021 		goto out_undo_ccm;
1022 
1023 out:
1024 	return err;
1025 
1026 out_undo_ccm:
1027 	crypto_unregister_template(&crypto_ccm_tmpl);
1028 out_undo_base:
1029 	crypto_unregister_template(&crypto_ccm_base_tmpl);
1030 out_undo_cbcmac:
1031 	crypto_register_template(&crypto_cbcmac_tmpl);
1032 	goto out;
1033 }
1034 
1035 static void __exit crypto_ccm_module_exit(void)
1036 {
1037 	crypto_unregister_template(&crypto_rfc4309_tmpl);
1038 	crypto_unregister_template(&crypto_ccm_tmpl);
1039 	crypto_unregister_template(&crypto_ccm_base_tmpl);
1040 	crypto_unregister_template(&crypto_cbcmac_tmpl);
1041 }
1042 
1043 module_init(crypto_ccm_module_init);
1044 module_exit(crypto_ccm_module_exit);
1045 
1046 MODULE_LICENSE("GPL");
1047 MODULE_DESCRIPTION("Counter with CBC MAC");
1048 MODULE_ALIAS_CRYPTO("ccm_base");
1049 MODULE_ALIAS_CRYPTO("rfc4309");
1050 MODULE_ALIAS_CRYPTO("ccm");
1051