1 /*
2  * Cryptographic API.
3  *
4  * SHA-224 and SHA-256 Secure Hash Algorithm.
5  *
6  * Adapted for OCTEON by Aaro Koskinen <aaro.koskinen@iki.fi>.
7  *
8  * Based on crypto/sha256_generic.c, which is:
9  *
10  * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
11  * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
12  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
13  * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
14  *
15  * This program is free software; you can redistribute it and/or modify it
16  * under the terms of the GNU General Public License as published by the Free
17  * Software Foundation; either version 2 of the License, or (at your option)
18  * any later version.
19  */
20 
21 #include <linux/mm.h>
22 #include <crypto/sha.h>
23 #include <linux/init.h>
24 #include <linux/types.h>
25 #include <linux/module.h>
26 #include <asm/byteorder.h>
27 #include <asm/octeon/octeon.h>
28 #include <crypto/internal/hash.h>
29 
30 #include "octeon-crypto.h"
31 
32 /*
33  * We pass everything as 64-bit. OCTEON can handle misaligned data.
34  */
35 
36 static void octeon_sha256_store_hash(struct sha256_state *sctx)
37 {
38 	u64 *hash = (u64 *)sctx->state;
39 
40 	write_octeon_64bit_hash_dword(hash[0], 0);
41 	write_octeon_64bit_hash_dword(hash[1], 1);
42 	write_octeon_64bit_hash_dword(hash[2], 2);
43 	write_octeon_64bit_hash_dword(hash[3], 3);
44 }
45 
46 static void octeon_sha256_read_hash(struct sha256_state *sctx)
47 {
48 	u64 *hash = (u64 *)sctx->state;
49 
50 	hash[0] = read_octeon_64bit_hash_dword(0);
51 	hash[1] = read_octeon_64bit_hash_dword(1);
52 	hash[2] = read_octeon_64bit_hash_dword(2);
53 	hash[3] = read_octeon_64bit_hash_dword(3);
54 }
55 
56 static void octeon_sha256_transform(const void *_block)
57 {
58 	const u64 *block = _block;
59 
60 	write_octeon_64bit_block_dword(block[0], 0);
61 	write_octeon_64bit_block_dword(block[1], 1);
62 	write_octeon_64bit_block_dword(block[2], 2);
63 	write_octeon_64bit_block_dword(block[3], 3);
64 	write_octeon_64bit_block_dword(block[4], 4);
65 	write_octeon_64bit_block_dword(block[5], 5);
66 	write_octeon_64bit_block_dword(block[6], 6);
67 	octeon_sha256_start(block[7]);
68 }
69 
70 static int octeon_sha224_init(struct shash_desc *desc)
71 {
72 	struct sha256_state *sctx = shash_desc_ctx(desc);
73 
74 	sctx->state[0] = SHA224_H0;
75 	sctx->state[1] = SHA224_H1;
76 	sctx->state[2] = SHA224_H2;
77 	sctx->state[3] = SHA224_H3;
78 	sctx->state[4] = SHA224_H4;
79 	sctx->state[5] = SHA224_H5;
80 	sctx->state[6] = SHA224_H6;
81 	sctx->state[7] = SHA224_H7;
82 	sctx->count = 0;
83 
84 	return 0;
85 }
86 
87 static int octeon_sha256_init(struct shash_desc *desc)
88 {
89 	struct sha256_state *sctx = shash_desc_ctx(desc);
90 
91 	sctx->state[0] = SHA256_H0;
92 	sctx->state[1] = SHA256_H1;
93 	sctx->state[2] = SHA256_H2;
94 	sctx->state[3] = SHA256_H3;
95 	sctx->state[4] = SHA256_H4;
96 	sctx->state[5] = SHA256_H5;
97 	sctx->state[6] = SHA256_H6;
98 	sctx->state[7] = SHA256_H7;
99 	sctx->count = 0;
100 
101 	return 0;
102 }
103 
104 static void __octeon_sha256_update(struct sha256_state *sctx, const u8 *data,
105 				   unsigned int len)
106 {
107 	unsigned int partial;
108 	unsigned int done;
109 	const u8 *src;
110 
111 	partial = sctx->count % SHA256_BLOCK_SIZE;
112 	sctx->count += len;
113 	done = 0;
114 	src = data;
115 
116 	if ((partial + len) >= SHA256_BLOCK_SIZE) {
117 		if (partial) {
118 			done = -partial;
119 			memcpy(sctx->buf + partial, data,
120 			       done + SHA256_BLOCK_SIZE);
121 			src = sctx->buf;
122 		}
123 
124 		do {
125 			octeon_sha256_transform(src);
126 			done += SHA256_BLOCK_SIZE;
127 			src = data + done;
128 		} while (done + SHA256_BLOCK_SIZE <= len);
129 
130 		partial = 0;
131 	}
132 	memcpy(sctx->buf + partial, src, len - done);
133 }
134 
135 static int octeon_sha256_update(struct shash_desc *desc, const u8 *data,
136 				unsigned int len)
137 {
138 	struct sha256_state *sctx = shash_desc_ctx(desc);
139 	struct octeon_cop2_state state;
140 	unsigned long flags;
141 
142 	/*
143 	 * Small updates never reach the crypto engine, so the generic sha256 is
144 	 * faster because of the heavyweight octeon_crypto_enable() /
145 	 * octeon_crypto_disable().
146 	 */
147 	if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE)
148 		return crypto_sha256_update(desc, data, len);
149 
150 	flags = octeon_crypto_enable(&state);
151 	octeon_sha256_store_hash(sctx);
152 
153 	__octeon_sha256_update(sctx, data, len);
154 
155 	octeon_sha256_read_hash(sctx);
156 	octeon_crypto_disable(&state, flags);
157 
158 	return 0;
159 }
160 
161 static int octeon_sha256_final(struct shash_desc *desc, u8 *out)
162 {
163 	struct sha256_state *sctx = shash_desc_ctx(desc);
164 	static const u8 padding[64] = { 0x80, };
165 	struct octeon_cop2_state state;
166 	__be32 *dst = (__be32 *)out;
167 	unsigned int pad_len;
168 	unsigned long flags;
169 	unsigned int index;
170 	__be64 bits;
171 	int i;
172 
173 	/* Save number of bits. */
174 	bits = cpu_to_be64(sctx->count << 3);
175 
176 	/* Pad out to 56 mod 64. */
177 	index = sctx->count & 0x3f;
178 	pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
179 
180 	flags = octeon_crypto_enable(&state);
181 	octeon_sha256_store_hash(sctx);
182 
183 	__octeon_sha256_update(sctx, padding, pad_len);
184 
185 	/* Append length (before padding). */
186 	__octeon_sha256_update(sctx, (const u8 *)&bits, sizeof(bits));
187 
188 	octeon_sha256_read_hash(sctx);
189 	octeon_crypto_disable(&state, flags);
190 
191 	/* Store state in digest */
192 	for (i = 0; i < 8; i++)
193 		dst[i] = cpu_to_be32(sctx->state[i]);
194 
195 	/* Zeroize sensitive information. */
196 	memset(sctx, 0, sizeof(*sctx));
197 
198 	return 0;
199 }
200 
201 static int octeon_sha224_final(struct shash_desc *desc, u8 *hash)
202 {
203 	u8 D[SHA256_DIGEST_SIZE];
204 
205 	octeon_sha256_final(desc, D);
206 
207 	memcpy(hash, D, SHA224_DIGEST_SIZE);
208 	memzero_explicit(D, SHA256_DIGEST_SIZE);
209 
210 	return 0;
211 }
212 
213 static int octeon_sha256_export(struct shash_desc *desc, void *out)
214 {
215 	struct sha256_state *sctx = shash_desc_ctx(desc);
216 
217 	memcpy(out, sctx, sizeof(*sctx));
218 	return 0;
219 }
220 
221 static int octeon_sha256_import(struct shash_desc *desc, const void *in)
222 {
223 	struct sha256_state *sctx = shash_desc_ctx(desc);
224 
225 	memcpy(sctx, in, sizeof(*sctx));
226 	return 0;
227 }
228 
229 static struct shash_alg octeon_sha256_algs[2] = { {
230 	.digestsize	=	SHA256_DIGEST_SIZE,
231 	.init		=	octeon_sha256_init,
232 	.update		=	octeon_sha256_update,
233 	.final		=	octeon_sha256_final,
234 	.export		=	octeon_sha256_export,
235 	.import		=	octeon_sha256_import,
236 	.descsize	=	sizeof(struct sha256_state),
237 	.statesize	=	sizeof(struct sha256_state),
238 	.base		=	{
239 		.cra_name	=	"sha256",
240 		.cra_driver_name=	"octeon-sha256",
241 		.cra_priority	=	OCTEON_CR_OPCODE_PRIORITY,
242 		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
243 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
244 		.cra_module	=	THIS_MODULE,
245 	}
246 }, {
247 	.digestsize	=	SHA224_DIGEST_SIZE,
248 	.init		=	octeon_sha224_init,
249 	.update		=	octeon_sha256_update,
250 	.final		=	octeon_sha224_final,
251 	.descsize	=	sizeof(struct sha256_state),
252 	.base		=	{
253 		.cra_name	=	"sha224",
254 		.cra_driver_name=	"octeon-sha224",
255 		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
256 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
257 		.cra_module	=	THIS_MODULE,
258 	}
259 } };
260 
261 static int __init octeon_sha256_mod_init(void)
262 {
263 	if (!octeon_has_crypto())
264 		return -ENOTSUPP;
265 	return crypto_register_shashes(octeon_sha256_algs,
266 				       ARRAY_SIZE(octeon_sha256_algs));
267 }
268 
269 static void __exit octeon_sha256_mod_fini(void)
270 {
271 	crypto_unregister_shashes(octeon_sha256_algs,
272 				  ARRAY_SIZE(octeon_sha256_algs));
273 }
274 
275 module_init(octeon_sha256_mod_init);
276 module_exit(octeon_sha256_mod_fini);
277 
278 MODULE_LICENSE("GPL");
279 MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm (OCTEON)");
280 MODULE_AUTHOR("Aaro Koskinen <aaro.koskinen@iki.fi>");
281