xref: /openbmc/linux/crypto/gcm.c (revision cd900f0cacd7601dabdd028e8cbdbf2a7041cee2)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * GCM: Galois/Counter Mode.
4  *
5  * Copyright (c) 2007 Nokia Siemens Networks - Mikko Herranen <mh1@iki.fi>
6  */
7 
8 #include <crypto/gf128mul.h>
9 #include <crypto/internal/aead.h>
10 #include <crypto/internal/skcipher.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/null.h>
13 #include <crypto/scatterwalk.h>
14 #include <crypto/gcm.h>
15 #include <crypto/hash.h>
16 #include "internal.h"
17 #include <linux/err.h>
18 #include <linux/init.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <linux/slab.h>
22 
23 struct gcm_instance_ctx {
24 	struct crypto_skcipher_spawn ctr;
25 	struct crypto_ahash_spawn ghash;
26 };
27 
28 struct crypto_gcm_ctx {
29 	struct crypto_skcipher *ctr;
30 	struct crypto_ahash *ghash;
31 };
32 
33 struct crypto_rfc4106_ctx {
34 	struct crypto_aead *child;
35 	u8 nonce[4];
36 };
37 
38 struct crypto_rfc4106_req_ctx {
39 	struct scatterlist src[3];
40 	struct scatterlist dst[3];
41 	struct aead_request subreq;
42 };
43 
44 struct crypto_rfc4543_instance_ctx {
45 	struct crypto_aead_spawn aead;
46 };
47 
48 struct crypto_rfc4543_ctx {
49 	struct crypto_aead *child;
50 	struct crypto_sync_skcipher *null;
51 	u8 nonce[4];
52 };
53 
54 struct crypto_rfc4543_req_ctx {
55 	struct aead_request subreq;
56 };
57 
58 struct crypto_gcm_ghash_ctx {
59 	unsigned int cryptlen;
60 	struct scatterlist *src;
61 	int (*complete)(struct aead_request *req, u32 flags);
62 };
63 
64 struct crypto_gcm_req_priv_ctx {
65 	u8 iv[16];
66 	u8 auth_tag[16];
67 	u8 iauth_tag[16];
68 	struct scatterlist src[3];
69 	struct scatterlist dst[3];
70 	struct scatterlist sg;
71 	struct crypto_gcm_ghash_ctx ghash_ctx;
72 	union {
73 		struct ahash_request ahreq;
74 		struct skcipher_request skreq;
75 	} u;
76 };
77 
78 static struct {
79 	u8 buf[16];
80 	struct scatterlist sg;
81 } *gcm_zeroes;
82 
83 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc);
84 
85 static inline struct crypto_gcm_req_priv_ctx *crypto_gcm_reqctx(
86 	struct aead_request *req)
87 {
88 	unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
89 
90 	return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
91 }
92 
93 static int crypto_gcm_setkey(struct crypto_aead *aead, const u8 *key,
94 			     unsigned int keylen)
95 {
96 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
97 	struct crypto_ahash *ghash = ctx->ghash;
98 	struct crypto_skcipher *ctr = ctx->ctr;
99 	struct {
100 		be128 hash;
101 		u8 iv[16];
102 
103 		struct crypto_wait wait;
104 
105 		struct scatterlist sg[1];
106 		struct skcipher_request req;
107 	} *data;
108 	int err;
109 
110 	crypto_skcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
111 	crypto_skcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
112 				       CRYPTO_TFM_REQ_MASK);
113 	err = crypto_skcipher_setkey(ctr, key, keylen);
114 	if (err)
115 		return err;
116 
117 	data = kzalloc(sizeof(*data) + crypto_skcipher_reqsize(ctr),
118 		       GFP_KERNEL);
119 	if (!data)
120 		return -ENOMEM;
121 
122 	crypto_init_wait(&data->wait);
123 	sg_init_one(data->sg, &data->hash, sizeof(data->hash));
124 	skcipher_request_set_tfm(&data->req, ctr);
125 	skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
126 						  CRYPTO_TFM_REQ_MAY_BACKLOG,
127 				      crypto_req_done,
128 				      &data->wait);
129 	skcipher_request_set_crypt(&data->req, data->sg, data->sg,
130 				   sizeof(data->hash), data->iv);
131 
132 	err = crypto_wait_req(crypto_skcipher_encrypt(&data->req),
133 							&data->wait);
134 
135 	if (err)
136 		goto out;
137 
138 	crypto_ahash_clear_flags(ghash, CRYPTO_TFM_REQ_MASK);
139 	crypto_ahash_set_flags(ghash, crypto_aead_get_flags(aead) &
140 			       CRYPTO_TFM_REQ_MASK);
141 	err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128));
142 out:
143 	kzfree(data);
144 	return err;
145 }
146 
147 static int crypto_gcm_setauthsize(struct crypto_aead *tfm,
148 				  unsigned int authsize)
149 {
150 	return crypto_gcm_check_authsize(authsize);
151 }
152 
153 static void crypto_gcm_init_common(struct aead_request *req)
154 {
155 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
156 	__be32 counter = cpu_to_be32(1);
157 	struct scatterlist *sg;
158 
159 	memset(pctx->auth_tag, 0, sizeof(pctx->auth_tag));
160 	memcpy(pctx->iv, req->iv, GCM_AES_IV_SIZE);
161 	memcpy(pctx->iv + GCM_AES_IV_SIZE, &counter, 4);
162 
163 	sg_init_table(pctx->src, 3);
164 	sg_set_buf(pctx->src, pctx->auth_tag, sizeof(pctx->auth_tag));
165 	sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
166 	if (sg != pctx->src + 1)
167 		sg_chain(pctx->src, 2, sg);
168 
169 	if (req->src != req->dst) {
170 		sg_init_table(pctx->dst, 3);
171 		sg_set_buf(pctx->dst, pctx->auth_tag, sizeof(pctx->auth_tag));
172 		sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
173 		if (sg != pctx->dst + 1)
174 			sg_chain(pctx->dst, 2, sg);
175 	}
176 }
177 
178 static void crypto_gcm_init_crypt(struct aead_request *req,
179 				  unsigned int cryptlen)
180 {
181 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
182 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(aead);
183 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
184 	struct skcipher_request *skreq = &pctx->u.skreq;
185 	struct scatterlist *dst;
186 
187 	dst = req->src == req->dst ? pctx->src : pctx->dst;
188 
189 	skcipher_request_set_tfm(skreq, ctx->ctr);
190 	skcipher_request_set_crypt(skreq, pctx->src, dst,
191 				     cryptlen + sizeof(pctx->auth_tag),
192 				     pctx->iv);
193 }
194 
195 static inline unsigned int gcm_remain(unsigned int len)
196 {
197 	len &= 0xfU;
198 	return len ? 16 - len : 0;
199 }
200 
201 static void gcm_hash_len_done(struct crypto_async_request *areq, int err);
202 
203 static int gcm_hash_update(struct aead_request *req,
204 			   crypto_completion_t compl,
205 			   struct scatterlist *src,
206 			   unsigned int len, u32 flags)
207 {
208 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
209 	struct ahash_request *ahreq = &pctx->u.ahreq;
210 
211 	ahash_request_set_callback(ahreq, flags, compl, req);
212 	ahash_request_set_crypt(ahreq, src, NULL, len);
213 
214 	return crypto_ahash_update(ahreq);
215 }
216 
217 static int gcm_hash_remain(struct aead_request *req,
218 			   unsigned int remain,
219 			   crypto_completion_t compl, u32 flags)
220 {
221 	return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags);
222 }
223 
224 static int gcm_hash_len(struct aead_request *req, u32 flags)
225 {
226 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
227 	struct ahash_request *ahreq = &pctx->u.ahreq;
228 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
229 	be128 lengths;
230 
231 	lengths.a = cpu_to_be64(req->assoclen * 8);
232 	lengths.b = cpu_to_be64(gctx->cryptlen * 8);
233 	memcpy(pctx->iauth_tag, &lengths, 16);
234 	sg_init_one(&pctx->sg, pctx->iauth_tag, 16);
235 	ahash_request_set_callback(ahreq, flags, gcm_hash_len_done, req);
236 	ahash_request_set_crypt(ahreq, &pctx->sg,
237 				pctx->iauth_tag, sizeof(lengths));
238 
239 	return crypto_ahash_finup(ahreq);
240 }
241 
242 static int gcm_hash_len_continue(struct aead_request *req, u32 flags)
243 {
244 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
245 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
246 
247 	return gctx->complete(req, flags);
248 }
249 
250 static void gcm_hash_len_done(struct crypto_async_request *areq, int err)
251 {
252 	struct aead_request *req = areq->data;
253 
254 	if (err)
255 		goto out;
256 
257 	err = gcm_hash_len_continue(req, 0);
258 	if (err == -EINPROGRESS)
259 		return;
260 
261 out:
262 	aead_request_complete(req, err);
263 }
264 
265 static int gcm_hash_crypt_remain_continue(struct aead_request *req, u32 flags)
266 {
267 	return gcm_hash_len(req, flags) ?:
268 	       gcm_hash_len_continue(req, flags);
269 }
270 
271 static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq,
272 				       int err)
273 {
274 	struct aead_request *req = areq->data;
275 
276 	if (err)
277 		goto out;
278 
279 	err = gcm_hash_crypt_remain_continue(req, 0);
280 	if (err == -EINPROGRESS)
281 		return;
282 
283 out:
284 	aead_request_complete(req, err);
285 }
286 
287 static int gcm_hash_crypt_continue(struct aead_request *req, u32 flags)
288 {
289 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
290 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
291 	unsigned int remain;
292 
293 	remain = gcm_remain(gctx->cryptlen);
294 	if (remain)
295 		return gcm_hash_remain(req, remain,
296 				       gcm_hash_crypt_remain_done, flags) ?:
297 		       gcm_hash_crypt_remain_continue(req, flags);
298 
299 	return gcm_hash_crypt_remain_continue(req, flags);
300 }
301 
302 static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err)
303 {
304 	struct aead_request *req = areq->data;
305 
306 	if (err)
307 		goto out;
308 
309 	err = gcm_hash_crypt_continue(req, 0);
310 	if (err == -EINPROGRESS)
311 		return;
312 
313 out:
314 	aead_request_complete(req, err);
315 }
316 
317 static int gcm_hash_assoc_remain_continue(struct aead_request *req, u32 flags)
318 {
319 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
320 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
321 
322 	if (gctx->cryptlen)
323 		return gcm_hash_update(req, gcm_hash_crypt_done,
324 				       gctx->src, gctx->cryptlen, flags) ?:
325 		       gcm_hash_crypt_continue(req, flags);
326 
327 	return gcm_hash_crypt_remain_continue(req, flags);
328 }
329 
330 static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq,
331 				       int err)
332 {
333 	struct aead_request *req = areq->data;
334 
335 	if (err)
336 		goto out;
337 
338 	err = gcm_hash_assoc_remain_continue(req, 0);
339 	if (err == -EINPROGRESS)
340 		return;
341 
342 out:
343 	aead_request_complete(req, err);
344 }
345 
346 static int gcm_hash_assoc_continue(struct aead_request *req, u32 flags)
347 {
348 	unsigned int remain;
349 
350 	remain = gcm_remain(req->assoclen);
351 	if (remain)
352 		return gcm_hash_remain(req, remain,
353 				       gcm_hash_assoc_remain_done, flags) ?:
354 		       gcm_hash_assoc_remain_continue(req, flags);
355 
356 	return gcm_hash_assoc_remain_continue(req, flags);
357 }
358 
359 static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err)
360 {
361 	struct aead_request *req = areq->data;
362 
363 	if (err)
364 		goto out;
365 
366 	err = gcm_hash_assoc_continue(req, 0);
367 	if (err == -EINPROGRESS)
368 		return;
369 
370 out:
371 	aead_request_complete(req, err);
372 }
373 
374 static int gcm_hash_init_continue(struct aead_request *req, u32 flags)
375 {
376 	if (req->assoclen)
377 		return gcm_hash_update(req, gcm_hash_assoc_done,
378 				       req->src, req->assoclen, flags) ?:
379 		       gcm_hash_assoc_continue(req, flags);
380 
381 	return gcm_hash_assoc_remain_continue(req, flags);
382 }
383 
384 static void gcm_hash_init_done(struct crypto_async_request *areq, int err)
385 {
386 	struct aead_request *req = areq->data;
387 
388 	if (err)
389 		goto out;
390 
391 	err = gcm_hash_init_continue(req, 0);
392 	if (err == -EINPROGRESS)
393 		return;
394 
395 out:
396 	aead_request_complete(req, err);
397 }
398 
399 static int gcm_hash(struct aead_request *req, u32 flags)
400 {
401 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
402 	struct ahash_request *ahreq = &pctx->u.ahreq;
403 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
404 
405 	ahash_request_set_tfm(ahreq, ctx->ghash);
406 
407 	ahash_request_set_callback(ahreq, flags, gcm_hash_init_done, req);
408 	return crypto_ahash_init(ahreq) ?:
409 	       gcm_hash_init_continue(req, flags);
410 }
411 
412 static int gcm_enc_copy_hash(struct aead_request *req, u32 flags)
413 {
414 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
415 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
416 	u8 *auth_tag = pctx->auth_tag;
417 
418 	crypto_xor(auth_tag, pctx->iauth_tag, 16);
419 	scatterwalk_map_and_copy(auth_tag, req->dst,
420 				 req->assoclen + req->cryptlen,
421 				 crypto_aead_authsize(aead), 1);
422 	return 0;
423 }
424 
425 static int gcm_encrypt_continue(struct aead_request *req, u32 flags)
426 {
427 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
428 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
429 
430 	gctx->src = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
431 	gctx->cryptlen = req->cryptlen;
432 	gctx->complete = gcm_enc_copy_hash;
433 
434 	return gcm_hash(req, flags);
435 }
436 
437 static void gcm_encrypt_done(struct crypto_async_request *areq, int err)
438 {
439 	struct aead_request *req = areq->data;
440 
441 	if (err)
442 		goto out;
443 
444 	err = gcm_encrypt_continue(req, 0);
445 	if (err == -EINPROGRESS)
446 		return;
447 
448 out:
449 	aead_request_complete(req, err);
450 }
451 
452 static int crypto_gcm_encrypt(struct aead_request *req)
453 {
454 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
455 	struct skcipher_request *skreq = &pctx->u.skreq;
456 	u32 flags = aead_request_flags(req);
457 
458 	crypto_gcm_init_common(req);
459 	crypto_gcm_init_crypt(req, req->cryptlen);
460 	skcipher_request_set_callback(skreq, flags, gcm_encrypt_done, req);
461 
462 	return crypto_skcipher_encrypt(skreq) ?:
463 	       gcm_encrypt_continue(req, flags);
464 }
465 
466 static int crypto_gcm_verify(struct aead_request *req)
467 {
468 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
469 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
470 	u8 *auth_tag = pctx->auth_tag;
471 	u8 *iauth_tag = pctx->iauth_tag;
472 	unsigned int authsize = crypto_aead_authsize(aead);
473 	unsigned int cryptlen = req->cryptlen - authsize;
474 
475 	crypto_xor(auth_tag, iauth_tag, 16);
476 	scatterwalk_map_and_copy(iauth_tag, req->src,
477 				 req->assoclen + cryptlen, authsize, 0);
478 	return crypto_memneq(iauth_tag, auth_tag, authsize) ? -EBADMSG : 0;
479 }
480 
481 static void gcm_decrypt_done(struct crypto_async_request *areq, int err)
482 {
483 	struct aead_request *req = areq->data;
484 
485 	if (!err)
486 		err = crypto_gcm_verify(req);
487 
488 	aead_request_complete(req, err);
489 }
490 
491 static int gcm_dec_hash_continue(struct aead_request *req, u32 flags)
492 {
493 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
494 	struct skcipher_request *skreq = &pctx->u.skreq;
495 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
496 
497 	crypto_gcm_init_crypt(req, gctx->cryptlen);
498 	skcipher_request_set_callback(skreq, flags, gcm_decrypt_done, req);
499 	return crypto_skcipher_decrypt(skreq) ?: crypto_gcm_verify(req);
500 }
501 
502 static int crypto_gcm_decrypt(struct aead_request *req)
503 {
504 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
505 	struct crypto_gcm_req_priv_ctx *pctx = crypto_gcm_reqctx(req);
506 	struct crypto_gcm_ghash_ctx *gctx = &pctx->ghash_ctx;
507 	unsigned int authsize = crypto_aead_authsize(aead);
508 	unsigned int cryptlen = req->cryptlen;
509 	u32 flags = aead_request_flags(req);
510 
511 	cryptlen -= authsize;
512 
513 	crypto_gcm_init_common(req);
514 
515 	gctx->src = sg_next(pctx->src);
516 	gctx->cryptlen = cryptlen;
517 	gctx->complete = gcm_dec_hash_continue;
518 
519 	return gcm_hash(req, flags);
520 }
521 
522 static int crypto_gcm_init_tfm(struct crypto_aead *tfm)
523 {
524 	struct aead_instance *inst = aead_alg_instance(tfm);
525 	struct gcm_instance_ctx *ictx = aead_instance_ctx(inst);
526 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
527 	struct crypto_skcipher *ctr;
528 	struct crypto_ahash *ghash;
529 	unsigned long align;
530 	int err;
531 
532 	ghash = crypto_spawn_ahash(&ictx->ghash);
533 	if (IS_ERR(ghash))
534 		return PTR_ERR(ghash);
535 
536 	ctr = crypto_spawn_skcipher(&ictx->ctr);
537 	err = PTR_ERR(ctr);
538 	if (IS_ERR(ctr))
539 		goto err_free_hash;
540 
541 	ctx->ctr = ctr;
542 	ctx->ghash = ghash;
543 
544 	align = crypto_aead_alignmask(tfm);
545 	align &= ~(crypto_tfm_ctx_alignment() - 1);
546 	crypto_aead_set_reqsize(tfm,
547 		align + offsetof(struct crypto_gcm_req_priv_ctx, u) +
548 		max(sizeof(struct skcipher_request) +
549 		    crypto_skcipher_reqsize(ctr),
550 		    sizeof(struct ahash_request) +
551 		    crypto_ahash_reqsize(ghash)));
552 
553 	return 0;
554 
555 err_free_hash:
556 	crypto_free_ahash(ghash);
557 	return err;
558 }
559 
560 static void crypto_gcm_exit_tfm(struct crypto_aead *tfm)
561 {
562 	struct crypto_gcm_ctx *ctx = crypto_aead_ctx(tfm);
563 
564 	crypto_free_ahash(ctx->ghash);
565 	crypto_free_skcipher(ctx->ctr);
566 }
567 
568 static void crypto_gcm_free(struct aead_instance *inst)
569 {
570 	struct gcm_instance_ctx *ctx = aead_instance_ctx(inst);
571 
572 	crypto_drop_skcipher(&ctx->ctr);
573 	crypto_drop_ahash(&ctx->ghash);
574 	kfree(inst);
575 }
576 
577 static int crypto_gcm_create_common(struct crypto_template *tmpl,
578 				    struct rtattr **tb,
579 				    const char *ctr_name,
580 				    const char *ghash_name)
581 {
582 	struct crypto_attr_type *algt;
583 	u32 mask;
584 	struct aead_instance *inst;
585 	struct skcipher_alg *ctr;
586 	struct crypto_alg *ghash_alg;
587 	struct hash_alg_common *ghash;
588 	struct gcm_instance_ctx *ctx;
589 	int err;
590 
591 	algt = crypto_get_attr_type(tb);
592 	if (IS_ERR(algt))
593 		return PTR_ERR(algt);
594 
595 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
596 		return -EINVAL;
597 
598 	mask = crypto_requires_sync(algt->type, algt->mask);
599 
600 	ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type,
601 				    CRYPTO_ALG_TYPE_HASH,
602 				    CRYPTO_ALG_TYPE_AHASH_MASK | mask);
603 	if (IS_ERR(ghash_alg))
604 		return PTR_ERR(ghash_alg);
605 
606 	ghash = __crypto_hash_alg_common(ghash_alg);
607 
608 	err = -ENOMEM;
609 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
610 	if (!inst)
611 		goto out_put_ghash;
612 
613 	ctx = aead_instance_ctx(inst);
614 	err = crypto_init_ahash_spawn(&ctx->ghash, ghash,
615 				      aead_crypto_instance(inst));
616 	if (err)
617 		goto err_free_inst;
618 
619 	err = -EINVAL;
620 	if (strcmp(ghash->base.cra_name, "ghash") != 0 ||
621 	    ghash->digestsize != 16)
622 		goto err_drop_ghash;
623 
624 	err = crypto_grab_skcipher(&ctx->ctr, aead_crypto_instance(inst),
625 				   ctr_name, 0, mask);
626 	if (err)
627 		goto err_drop_ghash;
628 
629 	ctr = crypto_spawn_skcipher_alg(&ctx->ctr);
630 
631 	/* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
632 	err = -EINVAL;
633 	if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
634 	    crypto_skcipher_alg_ivsize(ctr) != 16 ||
635 	    ctr->base.cra_blocksize != 1)
636 		goto out_put_ctr;
637 
638 	err = -ENAMETOOLONG;
639 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
640 		     "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
641 		goto out_put_ctr;
642 
643 	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
644 		     "gcm_base(%s,%s)", ctr->base.cra_driver_name,
645 		     ghash_alg->cra_driver_name) >=
646 	    CRYPTO_MAX_ALG_NAME)
647 		goto out_put_ctr;
648 
649 	inst->alg.base.cra_flags = (ghash->base.cra_flags |
650 				    ctr->base.cra_flags) & CRYPTO_ALG_ASYNC;
651 	inst->alg.base.cra_priority = (ghash->base.cra_priority +
652 				       ctr->base.cra_priority) / 2;
653 	inst->alg.base.cra_blocksize = 1;
654 	inst->alg.base.cra_alignmask = ghash->base.cra_alignmask |
655 				       ctr->base.cra_alignmask;
656 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_gcm_ctx);
657 	inst->alg.ivsize = GCM_AES_IV_SIZE;
658 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(ctr);
659 	inst->alg.maxauthsize = 16;
660 	inst->alg.init = crypto_gcm_init_tfm;
661 	inst->alg.exit = crypto_gcm_exit_tfm;
662 	inst->alg.setkey = crypto_gcm_setkey;
663 	inst->alg.setauthsize = crypto_gcm_setauthsize;
664 	inst->alg.encrypt = crypto_gcm_encrypt;
665 	inst->alg.decrypt = crypto_gcm_decrypt;
666 
667 	inst->free = crypto_gcm_free;
668 
669 	err = aead_register_instance(tmpl, inst);
670 	if (err)
671 		goto out_put_ctr;
672 
673 out_put_ghash:
674 	crypto_mod_put(ghash_alg);
675 	return err;
676 
677 out_put_ctr:
678 	crypto_drop_skcipher(&ctx->ctr);
679 err_drop_ghash:
680 	crypto_drop_ahash(&ctx->ghash);
681 err_free_inst:
682 	kfree(inst);
683 	goto out_put_ghash;
684 }
685 
686 static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)
687 {
688 	const char *cipher_name;
689 	char ctr_name[CRYPTO_MAX_ALG_NAME];
690 
691 	cipher_name = crypto_attr_alg_name(tb[1]);
692 	if (IS_ERR(cipher_name))
693 		return PTR_ERR(cipher_name);
694 
695 	if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)", cipher_name) >=
696 	    CRYPTO_MAX_ALG_NAME)
697 		return -ENAMETOOLONG;
698 
699 	return crypto_gcm_create_common(tmpl, tb, ctr_name, "ghash");
700 }
701 
702 static int crypto_gcm_base_create(struct crypto_template *tmpl,
703 				  struct rtattr **tb)
704 {
705 	const char *ctr_name;
706 	const char *ghash_name;
707 
708 	ctr_name = crypto_attr_alg_name(tb[1]);
709 	if (IS_ERR(ctr_name))
710 		return PTR_ERR(ctr_name);
711 
712 	ghash_name = crypto_attr_alg_name(tb[2]);
713 	if (IS_ERR(ghash_name))
714 		return PTR_ERR(ghash_name);
715 
716 	return crypto_gcm_create_common(tmpl, tb, ctr_name, ghash_name);
717 }
718 
719 static int crypto_rfc4106_setkey(struct crypto_aead *parent, const u8 *key,
720 				 unsigned int keylen)
721 {
722 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
723 	struct crypto_aead *child = ctx->child;
724 
725 	if (keylen < 4)
726 		return -EINVAL;
727 
728 	keylen -= 4;
729 	memcpy(ctx->nonce, key + keylen, 4);
730 
731 	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
732 	crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
733 				     CRYPTO_TFM_REQ_MASK);
734 	return crypto_aead_setkey(child, key, keylen);
735 }
736 
737 static int crypto_rfc4106_setauthsize(struct crypto_aead *parent,
738 				      unsigned int authsize)
739 {
740 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(parent);
741 	int err;
742 
743 	err = crypto_rfc4106_check_authsize(authsize);
744 	if (err)
745 		return err;
746 
747 	return crypto_aead_setauthsize(ctx->child, authsize);
748 }
749 
750 static struct aead_request *crypto_rfc4106_crypt(struct aead_request *req)
751 {
752 	struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req);
753 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
754 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(aead);
755 	struct aead_request *subreq = &rctx->subreq;
756 	struct crypto_aead *child = ctx->child;
757 	struct scatterlist *sg;
758 	u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
759 			   crypto_aead_alignmask(child) + 1);
760 
761 	scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0);
762 
763 	memcpy(iv, ctx->nonce, 4);
764 	memcpy(iv + 4, req->iv, 8);
765 
766 	sg_init_table(rctx->src, 3);
767 	sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
768 	sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
769 	if (sg != rctx->src + 1)
770 		sg_chain(rctx->src, 2, sg);
771 
772 	if (req->src != req->dst) {
773 		sg_init_table(rctx->dst, 3);
774 		sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8);
775 		sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
776 		if (sg != rctx->dst + 1)
777 			sg_chain(rctx->dst, 2, sg);
778 	}
779 
780 	aead_request_set_tfm(subreq, child);
781 	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
782 				  req->base.data);
783 	aead_request_set_crypt(subreq, rctx->src,
784 			       req->src == req->dst ? rctx->src : rctx->dst,
785 			       req->cryptlen, iv);
786 	aead_request_set_ad(subreq, req->assoclen - 8);
787 
788 	return subreq;
789 }
790 
791 static int crypto_rfc4106_encrypt(struct aead_request *req)
792 {
793 	int err;
794 
795 	err = crypto_ipsec_check_assoclen(req->assoclen);
796 	if (err)
797 		return err;
798 
799 	req = crypto_rfc4106_crypt(req);
800 
801 	return crypto_aead_encrypt(req);
802 }
803 
804 static int crypto_rfc4106_decrypt(struct aead_request *req)
805 {
806 	int err;
807 
808 	err = crypto_ipsec_check_assoclen(req->assoclen);
809 	if (err)
810 		return err;
811 
812 	req = crypto_rfc4106_crypt(req);
813 
814 	return crypto_aead_decrypt(req);
815 }
816 
817 static int crypto_rfc4106_init_tfm(struct crypto_aead *tfm)
818 {
819 	struct aead_instance *inst = aead_alg_instance(tfm);
820 	struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
821 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
822 	struct crypto_aead *aead;
823 	unsigned long align;
824 
825 	aead = crypto_spawn_aead(spawn);
826 	if (IS_ERR(aead))
827 		return PTR_ERR(aead);
828 
829 	ctx->child = aead;
830 
831 	align = crypto_aead_alignmask(aead);
832 	align &= ~(crypto_tfm_ctx_alignment() - 1);
833 	crypto_aead_set_reqsize(
834 		tfm,
835 		sizeof(struct crypto_rfc4106_req_ctx) +
836 		ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
837 		align + 24);
838 
839 	return 0;
840 }
841 
842 static void crypto_rfc4106_exit_tfm(struct crypto_aead *tfm)
843 {
844 	struct crypto_rfc4106_ctx *ctx = crypto_aead_ctx(tfm);
845 
846 	crypto_free_aead(ctx->child);
847 }
848 
849 static void crypto_rfc4106_free(struct aead_instance *inst)
850 {
851 	crypto_drop_aead(aead_instance_ctx(inst));
852 	kfree(inst);
853 }
854 
855 static int crypto_rfc4106_create(struct crypto_template *tmpl,
856 				 struct rtattr **tb)
857 {
858 	struct crypto_attr_type *algt;
859 	u32 mask;
860 	struct aead_instance *inst;
861 	struct crypto_aead_spawn *spawn;
862 	struct aead_alg *alg;
863 	const char *ccm_name;
864 	int err;
865 
866 	algt = crypto_get_attr_type(tb);
867 	if (IS_ERR(algt))
868 		return PTR_ERR(algt);
869 
870 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
871 		return -EINVAL;
872 
873 	mask = crypto_requires_sync(algt->type, algt->mask);
874 
875 	ccm_name = crypto_attr_alg_name(tb[1]);
876 	if (IS_ERR(ccm_name))
877 		return PTR_ERR(ccm_name);
878 
879 	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
880 	if (!inst)
881 		return -ENOMEM;
882 
883 	spawn = aead_instance_ctx(inst);
884 	err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
885 			       ccm_name, 0, mask);
886 	if (err)
887 		goto out_free_inst;
888 
889 	alg = crypto_spawn_aead_alg(spawn);
890 
891 	err = -EINVAL;
892 
893 	/* Underlying IV size must be 12. */
894 	if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
895 		goto out_drop_alg;
896 
897 	/* Not a stream cipher? */
898 	if (alg->base.cra_blocksize != 1)
899 		goto out_drop_alg;
900 
901 	err = -ENAMETOOLONG;
902 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
903 		     "rfc4106(%s)", alg->base.cra_name) >=
904 	    CRYPTO_MAX_ALG_NAME ||
905 	    snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
906 		     "rfc4106(%s)", alg->base.cra_driver_name) >=
907 	    CRYPTO_MAX_ALG_NAME)
908 		goto out_drop_alg;
909 
910 	inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
911 	inst->alg.base.cra_priority = alg->base.cra_priority;
912 	inst->alg.base.cra_blocksize = 1;
913 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
914 
915 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4106_ctx);
916 
917 	inst->alg.ivsize = GCM_RFC4106_IV_SIZE;
918 	inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
919 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
920 
921 	inst->alg.init = crypto_rfc4106_init_tfm;
922 	inst->alg.exit = crypto_rfc4106_exit_tfm;
923 
924 	inst->alg.setkey = crypto_rfc4106_setkey;
925 	inst->alg.setauthsize = crypto_rfc4106_setauthsize;
926 	inst->alg.encrypt = crypto_rfc4106_encrypt;
927 	inst->alg.decrypt = crypto_rfc4106_decrypt;
928 
929 	inst->free = crypto_rfc4106_free;
930 
931 	err = aead_register_instance(tmpl, inst);
932 	if (err)
933 		goto out_drop_alg;
934 
935 out:
936 	return err;
937 
938 out_drop_alg:
939 	crypto_drop_aead(spawn);
940 out_free_inst:
941 	kfree(inst);
942 	goto out;
943 }
944 
945 static int crypto_rfc4543_setkey(struct crypto_aead *parent, const u8 *key,
946 				 unsigned int keylen)
947 {
948 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
949 	struct crypto_aead *child = ctx->child;
950 
951 	if (keylen < 4)
952 		return -EINVAL;
953 
954 	keylen -= 4;
955 	memcpy(ctx->nonce, key + keylen, 4);
956 
957 	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
958 	crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
959 				     CRYPTO_TFM_REQ_MASK);
960 	return crypto_aead_setkey(child, key, keylen);
961 }
962 
963 static int crypto_rfc4543_setauthsize(struct crypto_aead *parent,
964 				      unsigned int authsize)
965 {
966 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(parent);
967 
968 	if (authsize != 16)
969 		return -EINVAL;
970 
971 	return crypto_aead_setauthsize(ctx->child, authsize);
972 }
973 
974 static int crypto_rfc4543_crypt(struct aead_request *req, bool enc)
975 {
976 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
977 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
978 	struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req);
979 	struct aead_request *subreq = &rctx->subreq;
980 	unsigned int authsize = crypto_aead_authsize(aead);
981 	u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child),
982 			   crypto_aead_alignmask(ctx->child) + 1);
983 	int err;
984 
985 	if (req->src != req->dst) {
986 		err = crypto_rfc4543_copy_src_to_dst(req, enc);
987 		if (err)
988 			return err;
989 	}
990 
991 	memcpy(iv, ctx->nonce, 4);
992 	memcpy(iv + 4, req->iv, 8);
993 
994 	aead_request_set_tfm(subreq, ctx->child);
995 	aead_request_set_callback(subreq, req->base.flags,
996 				  req->base.complete, req->base.data);
997 	aead_request_set_crypt(subreq, req->src, req->dst,
998 			       enc ? 0 : authsize, iv);
999 	aead_request_set_ad(subreq, req->assoclen + req->cryptlen -
1000 				    subreq->cryptlen);
1001 
1002 	return enc ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq);
1003 }
1004 
1005 static int crypto_rfc4543_copy_src_to_dst(struct aead_request *req, bool enc)
1006 {
1007 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1008 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(aead);
1009 	unsigned int authsize = crypto_aead_authsize(aead);
1010 	unsigned int nbytes = req->assoclen + req->cryptlen -
1011 			      (enc ? 0 : authsize);
1012 	SYNC_SKCIPHER_REQUEST_ON_STACK(nreq, ctx->null);
1013 
1014 	skcipher_request_set_sync_tfm(nreq, ctx->null);
1015 	skcipher_request_set_callback(nreq, req->base.flags, NULL, NULL);
1016 	skcipher_request_set_crypt(nreq, req->src, req->dst, nbytes, NULL);
1017 
1018 	return crypto_skcipher_encrypt(nreq);
1019 }
1020 
1021 static int crypto_rfc4543_encrypt(struct aead_request *req)
1022 {
1023 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
1024 	       crypto_rfc4543_crypt(req, true);
1025 }
1026 
1027 static int crypto_rfc4543_decrypt(struct aead_request *req)
1028 {
1029 	return crypto_ipsec_check_assoclen(req->assoclen) ?:
1030 	       crypto_rfc4543_crypt(req, false);
1031 }
1032 
1033 static int crypto_rfc4543_init_tfm(struct crypto_aead *tfm)
1034 {
1035 	struct aead_instance *inst = aead_alg_instance(tfm);
1036 	struct crypto_rfc4543_instance_ctx *ictx = aead_instance_ctx(inst);
1037 	struct crypto_aead_spawn *spawn = &ictx->aead;
1038 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
1039 	struct crypto_aead *aead;
1040 	struct crypto_sync_skcipher *null;
1041 	unsigned long align;
1042 	int err = 0;
1043 
1044 	aead = crypto_spawn_aead(spawn);
1045 	if (IS_ERR(aead))
1046 		return PTR_ERR(aead);
1047 
1048 	null = crypto_get_default_null_skcipher();
1049 	err = PTR_ERR(null);
1050 	if (IS_ERR(null))
1051 		goto err_free_aead;
1052 
1053 	ctx->child = aead;
1054 	ctx->null = null;
1055 
1056 	align = crypto_aead_alignmask(aead);
1057 	align &= ~(crypto_tfm_ctx_alignment() - 1);
1058 	crypto_aead_set_reqsize(
1059 		tfm,
1060 		sizeof(struct crypto_rfc4543_req_ctx) +
1061 		ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
1062 		align + GCM_AES_IV_SIZE);
1063 
1064 	return 0;
1065 
1066 err_free_aead:
1067 	crypto_free_aead(aead);
1068 	return err;
1069 }
1070 
1071 static void crypto_rfc4543_exit_tfm(struct crypto_aead *tfm)
1072 {
1073 	struct crypto_rfc4543_ctx *ctx = crypto_aead_ctx(tfm);
1074 
1075 	crypto_free_aead(ctx->child);
1076 	crypto_put_default_null_skcipher();
1077 }
1078 
1079 static void crypto_rfc4543_free(struct aead_instance *inst)
1080 {
1081 	struct crypto_rfc4543_instance_ctx *ctx = aead_instance_ctx(inst);
1082 
1083 	crypto_drop_aead(&ctx->aead);
1084 
1085 	kfree(inst);
1086 }
1087 
1088 static int crypto_rfc4543_create(struct crypto_template *tmpl,
1089 				struct rtattr **tb)
1090 {
1091 	struct crypto_attr_type *algt;
1092 	u32 mask;
1093 	struct aead_instance *inst;
1094 	struct crypto_aead_spawn *spawn;
1095 	struct aead_alg *alg;
1096 	struct crypto_rfc4543_instance_ctx *ctx;
1097 	const char *ccm_name;
1098 	int err;
1099 
1100 	algt = crypto_get_attr_type(tb);
1101 	if (IS_ERR(algt))
1102 		return PTR_ERR(algt);
1103 
1104 	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
1105 		return -EINVAL;
1106 
1107 	mask = crypto_requires_sync(algt->type, algt->mask);
1108 
1109 	ccm_name = crypto_attr_alg_name(tb[1]);
1110 	if (IS_ERR(ccm_name))
1111 		return PTR_ERR(ccm_name);
1112 
1113 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1114 	if (!inst)
1115 		return -ENOMEM;
1116 
1117 	ctx = aead_instance_ctx(inst);
1118 	spawn = &ctx->aead;
1119 	err = crypto_grab_aead(spawn, aead_crypto_instance(inst),
1120 			       ccm_name, 0, mask);
1121 	if (err)
1122 		goto out_free_inst;
1123 
1124 	alg = crypto_spawn_aead_alg(spawn);
1125 
1126 	err = -EINVAL;
1127 
1128 	/* Underlying IV size must be 12. */
1129 	if (crypto_aead_alg_ivsize(alg) != GCM_AES_IV_SIZE)
1130 		goto out_drop_alg;
1131 
1132 	/* Not a stream cipher? */
1133 	if (alg->base.cra_blocksize != 1)
1134 		goto out_drop_alg;
1135 
1136 	err = -ENAMETOOLONG;
1137 	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1138 		     "rfc4543(%s)", alg->base.cra_name) >=
1139 	    CRYPTO_MAX_ALG_NAME ||
1140 	    snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1141 		     "rfc4543(%s)", alg->base.cra_driver_name) >=
1142 	    CRYPTO_MAX_ALG_NAME)
1143 		goto out_drop_alg;
1144 
1145 	inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
1146 	inst->alg.base.cra_priority = alg->base.cra_priority;
1147 	inst->alg.base.cra_blocksize = 1;
1148 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
1149 
1150 	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4543_ctx);
1151 
1152 	inst->alg.ivsize = GCM_RFC4543_IV_SIZE;
1153 	inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
1154 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
1155 
1156 	inst->alg.init = crypto_rfc4543_init_tfm;
1157 	inst->alg.exit = crypto_rfc4543_exit_tfm;
1158 
1159 	inst->alg.setkey = crypto_rfc4543_setkey;
1160 	inst->alg.setauthsize = crypto_rfc4543_setauthsize;
1161 	inst->alg.encrypt = crypto_rfc4543_encrypt;
1162 	inst->alg.decrypt = crypto_rfc4543_decrypt;
1163 
1164 	inst->free = crypto_rfc4543_free,
1165 
1166 	err = aead_register_instance(tmpl, inst);
1167 	if (err)
1168 		goto out_drop_alg;
1169 
1170 out:
1171 	return err;
1172 
1173 out_drop_alg:
1174 	crypto_drop_aead(spawn);
1175 out_free_inst:
1176 	kfree(inst);
1177 	goto out;
1178 }
1179 
1180 static struct crypto_template crypto_gcm_tmpls[] = {
1181 	{
1182 		.name = "gcm_base",
1183 		.create = crypto_gcm_base_create,
1184 		.module = THIS_MODULE,
1185 	}, {
1186 		.name = "gcm",
1187 		.create = crypto_gcm_create,
1188 		.module = THIS_MODULE,
1189 	}, {
1190 		.name = "rfc4106",
1191 		.create = crypto_rfc4106_create,
1192 		.module = THIS_MODULE,
1193 	}, {
1194 		.name = "rfc4543",
1195 		.create = crypto_rfc4543_create,
1196 		.module = THIS_MODULE,
1197 	},
1198 };
1199 
1200 static int __init crypto_gcm_module_init(void)
1201 {
1202 	int err;
1203 
1204 	gcm_zeroes = kzalloc(sizeof(*gcm_zeroes), GFP_KERNEL);
1205 	if (!gcm_zeroes)
1206 		return -ENOMEM;
1207 
1208 	sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf));
1209 
1210 	err = crypto_register_templates(crypto_gcm_tmpls,
1211 					ARRAY_SIZE(crypto_gcm_tmpls));
1212 	if (err)
1213 		kfree(gcm_zeroes);
1214 
1215 	return err;
1216 }
1217 
1218 static void __exit crypto_gcm_module_exit(void)
1219 {
1220 	kfree(gcm_zeroes);
1221 	crypto_unregister_templates(crypto_gcm_tmpls,
1222 				    ARRAY_SIZE(crypto_gcm_tmpls));
1223 }
1224 
1225 subsys_initcall(crypto_gcm_module_init);
1226 module_exit(crypto_gcm_module_exit);
1227 
1228 MODULE_LICENSE("GPL");
1229 MODULE_DESCRIPTION("Galois/Counter Mode");
1230 MODULE_AUTHOR("Mikko Herranen <mh1@iki.fi>");
1231 MODULE_ALIAS_CRYPTO("gcm_base");
1232 MODULE_ALIAS_CRYPTO("rfc4106");
1233 MODULE_ALIAS_CRYPTO("rfc4543");
1234 MODULE_ALIAS_CRYPTO("gcm");
1235