xref: /openbmc/linux/arch/arm/crypto/ghash-ce-glue.c (revision 151f4e2b)
1 /*
2  * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
3  *
4  * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published
8  * by the Free Software Foundation.
9  */
10 
11 #include <asm/hwcap.h>
12 #include <asm/neon.h>
13 #include <asm/simd.h>
14 #include <asm/unaligned.h>
15 #include <crypto/cryptd.h>
16 #include <crypto/internal/hash.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/gf128mul.h>
19 #include <linux/cpufeature.h>
20 #include <linux/crypto.h>
21 #include <linux/module.h>
22 
23 MODULE_DESCRIPTION("GHASH secure hash using ARMv8 Crypto Extensions");
24 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
25 MODULE_LICENSE("GPL v2");
26 MODULE_ALIAS_CRYPTO("ghash");
27 
28 #define GHASH_BLOCK_SIZE	16
29 #define GHASH_DIGEST_SIZE	16
30 
31 struct ghash_key {
32 	u64	h[2];
33 	u64	h2[2];
34 	u64	h3[2];
35 	u64	h4[2];
36 };
37 
38 struct ghash_desc_ctx {
39 	u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
40 	u8 buf[GHASH_BLOCK_SIZE];
41 	u32 count;
42 };
43 
44 struct ghash_async_ctx {
45 	struct cryptd_ahash *cryptd_tfm;
46 };
47 
48 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
49 				       struct ghash_key const *k,
50 				       const char *head);
51 
52 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
53 				      struct ghash_key const *k,
54 				      const char *head);
55 
56 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
57 				  struct ghash_key const *k,
58 				  const char *head);
59 
60 static int ghash_init(struct shash_desc *desc)
61 {
62 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
63 
64 	*ctx = (struct ghash_desc_ctx){};
65 	return 0;
66 }
67 
68 static int ghash_update(struct shash_desc *desc, const u8 *src,
69 			unsigned int len)
70 {
71 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
72 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
73 
74 	ctx->count += len;
75 
76 	if ((partial + len) >= GHASH_BLOCK_SIZE) {
77 		struct ghash_key *key = crypto_shash_ctx(desc->tfm);
78 		int blocks;
79 
80 		if (partial) {
81 			int p = GHASH_BLOCK_SIZE - partial;
82 
83 			memcpy(ctx->buf + partial, src, p);
84 			src += p;
85 			len -= p;
86 		}
87 
88 		blocks = len / GHASH_BLOCK_SIZE;
89 		len %= GHASH_BLOCK_SIZE;
90 
91 		kernel_neon_begin();
92 		pmull_ghash_update(blocks, ctx->digest, src, key,
93 				   partial ? ctx->buf : NULL);
94 		kernel_neon_end();
95 		src += blocks * GHASH_BLOCK_SIZE;
96 		partial = 0;
97 	}
98 	if (len)
99 		memcpy(ctx->buf + partial, src, len);
100 	return 0;
101 }
102 
103 static int ghash_final(struct shash_desc *desc, u8 *dst)
104 {
105 	struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
106 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
107 
108 	if (partial) {
109 		struct ghash_key *key = crypto_shash_ctx(desc->tfm);
110 
111 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
112 		kernel_neon_begin();
113 		pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
114 		kernel_neon_end();
115 	}
116 	put_unaligned_be64(ctx->digest[1], dst);
117 	put_unaligned_be64(ctx->digest[0], dst + 8);
118 
119 	*ctx = (struct ghash_desc_ctx){};
120 	return 0;
121 }
122 
123 static void ghash_reflect(u64 h[], const be128 *k)
124 {
125 	u64 carry = be64_to_cpu(k->a) >> 63;
126 
127 	h[0] = (be64_to_cpu(k->b) << 1) | carry;
128 	h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
129 
130 	if (carry)
131 		h[1] ^= 0xc200000000000000UL;
132 }
133 
134 static int ghash_setkey(struct crypto_shash *tfm,
135 			const u8 *inkey, unsigned int keylen)
136 {
137 	struct ghash_key *key = crypto_shash_ctx(tfm);
138 	be128 h, k;
139 
140 	if (keylen != GHASH_BLOCK_SIZE) {
141 		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
142 		return -EINVAL;
143 	}
144 
145 	memcpy(&k, inkey, GHASH_BLOCK_SIZE);
146 	ghash_reflect(key->h, &k);
147 
148 	h = k;
149 	gf128mul_lle(&h, &k);
150 	ghash_reflect(key->h2, &h);
151 
152 	gf128mul_lle(&h, &k);
153 	ghash_reflect(key->h3, &h);
154 
155 	gf128mul_lle(&h, &k);
156 	ghash_reflect(key->h4, &h);
157 
158 	return 0;
159 }
160 
161 static struct shash_alg ghash_alg = {
162 	.digestsize		= GHASH_DIGEST_SIZE,
163 	.init			= ghash_init,
164 	.update			= ghash_update,
165 	.final			= ghash_final,
166 	.setkey			= ghash_setkey,
167 	.descsize		= sizeof(struct ghash_desc_ctx),
168 	.base			= {
169 		.cra_name	= "__ghash",
170 		.cra_driver_name = "__driver-ghash-ce",
171 		.cra_priority	= 0,
172 		.cra_flags	= CRYPTO_ALG_INTERNAL,
173 		.cra_blocksize	= GHASH_BLOCK_SIZE,
174 		.cra_ctxsize	= sizeof(struct ghash_key),
175 		.cra_module	= THIS_MODULE,
176 	},
177 };
178 
179 static int ghash_async_init(struct ahash_request *req)
180 {
181 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
182 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
183 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
184 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
185 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
186 	struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
187 
188 	desc->tfm = child;
189 	return crypto_shash_init(desc);
190 }
191 
192 static int ghash_async_update(struct ahash_request *req)
193 {
194 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
195 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
196 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
197 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
198 
199 	if (!crypto_simd_usable() ||
200 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
201 		memcpy(cryptd_req, req, sizeof(*req));
202 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
203 		return crypto_ahash_update(cryptd_req);
204 	} else {
205 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
206 		return shash_ahash_update(req, desc);
207 	}
208 }
209 
210 static int ghash_async_final(struct ahash_request *req)
211 {
212 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
213 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
214 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
215 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
216 
217 	if (!crypto_simd_usable() ||
218 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
219 		memcpy(cryptd_req, req, sizeof(*req));
220 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
221 		return crypto_ahash_final(cryptd_req);
222 	} else {
223 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
224 		return crypto_shash_final(desc, req->result);
225 	}
226 }
227 
228 static int ghash_async_digest(struct ahash_request *req)
229 {
230 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
231 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
232 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
233 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
234 
235 	if (!crypto_simd_usable() ||
236 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
237 		memcpy(cryptd_req, req, sizeof(*req));
238 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
239 		return crypto_ahash_digest(cryptd_req);
240 	} else {
241 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
242 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
243 
244 		desc->tfm = child;
245 		return shash_ahash_digest(req, desc);
246 	}
247 }
248 
249 static int ghash_async_import(struct ahash_request *req, const void *in)
250 {
251 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
252 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
253 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
254 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
255 
256 	desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
257 
258 	return crypto_shash_import(desc, in);
259 }
260 
261 static int ghash_async_export(struct ahash_request *req, void *out)
262 {
263 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
264 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
265 
266 	return crypto_shash_export(desc, out);
267 }
268 
269 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
270 			      unsigned int keylen)
271 {
272 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
273 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
274 	int err;
275 
276 	crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
277 	crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
278 			       & CRYPTO_TFM_REQ_MASK);
279 	err = crypto_ahash_setkey(child, key, keylen);
280 	crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
281 			       & CRYPTO_TFM_RES_MASK);
282 
283 	return err;
284 }
285 
286 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
287 {
288 	struct cryptd_ahash *cryptd_tfm;
289 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
290 
291 	cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce",
292 					CRYPTO_ALG_INTERNAL,
293 					CRYPTO_ALG_INTERNAL);
294 	if (IS_ERR(cryptd_tfm))
295 		return PTR_ERR(cryptd_tfm);
296 	ctx->cryptd_tfm = cryptd_tfm;
297 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
298 				 sizeof(struct ahash_request) +
299 				 crypto_ahash_reqsize(&cryptd_tfm->base));
300 
301 	return 0;
302 }
303 
304 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
305 {
306 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
307 
308 	cryptd_free_ahash(ctx->cryptd_tfm);
309 }
310 
311 static struct ahash_alg ghash_async_alg = {
312 	.init			= ghash_async_init,
313 	.update			= ghash_async_update,
314 	.final			= ghash_async_final,
315 	.setkey			= ghash_async_setkey,
316 	.digest			= ghash_async_digest,
317 	.import			= ghash_async_import,
318 	.export			= ghash_async_export,
319 	.halg.digestsize	= GHASH_DIGEST_SIZE,
320 	.halg.statesize		= sizeof(struct ghash_desc_ctx),
321 	.halg.base		= {
322 		.cra_name	= "ghash",
323 		.cra_driver_name = "ghash-ce",
324 		.cra_priority	= 300,
325 		.cra_flags	= CRYPTO_ALG_ASYNC,
326 		.cra_blocksize	= GHASH_BLOCK_SIZE,
327 		.cra_ctxsize	= sizeof(struct ghash_async_ctx),
328 		.cra_module	= THIS_MODULE,
329 		.cra_init	= ghash_async_init_tfm,
330 		.cra_exit	= ghash_async_exit_tfm,
331 	},
332 };
333 
334 static int __init ghash_ce_mod_init(void)
335 {
336 	int err;
337 
338 	if (!(elf_hwcap & HWCAP_NEON))
339 		return -ENODEV;
340 
341 	if (elf_hwcap2 & HWCAP2_PMULL)
342 		pmull_ghash_update = pmull_ghash_update_p64;
343 	else
344 		pmull_ghash_update = pmull_ghash_update_p8;
345 
346 	err = crypto_register_shash(&ghash_alg);
347 	if (err)
348 		return err;
349 	err = crypto_register_ahash(&ghash_async_alg);
350 	if (err)
351 		goto err_shash;
352 
353 	return 0;
354 
355 err_shash:
356 	crypto_unregister_shash(&ghash_alg);
357 	return err;
358 }
359 
360 static void __exit ghash_ce_mod_exit(void)
361 {
362 	crypto_unregister_ahash(&ghash_async_alg);
363 	crypto_unregister_shash(&ghash_alg);
364 }
365 
366 module_init(ghash_ce_mod_init);
367 module_exit(ghash_ce_mod_exit);
368