1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
3  *
4  * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
5  */
6 
7 #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
8 
9 #include <linux/crypto.h>
10 #include <linux/init.h>
11 #include <linux/module.h>
12 #include <linux/mm.h>
13 #include <linux/types.h>
14 #include <crypto/algapi.h>
15 #include <crypto/internal/skcipher.h>
16 
17 #include <asm/fpumacro.h>
18 #include <asm/pstate.h>
19 #include <asm/elf.h>
20 
21 #include "opcodes.h"
22 
23 #define CAMELLIA_MIN_KEY_SIZE        16
24 #define CAMELLIA_MAX_KEY_SIZE        32
25 #define CAMELLIA_BLOCK_SIZE          16
26 #define CAMELLIA_TABLE_BYTE_LEN     272
27 
28 struct camellia_sparc64_ctx {
29 	u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
30 	u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
31 	int key_len;
32 };
33 
34 extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
35 					unsigned int key_len, u64 *decrypt_key);
36 
37 static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
38 			    unsigned int key_len)
39 {
40 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
41 	const u32 *in_key = (const u32 *) _in_key;
42 	u32 *flags = &tfm->crt_flags;
43 
44 	if (key_len != 16 && key_len != 24 && key_len != 32) {
45 		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
46 		return -EINVAL;
47 	}
48 
49 	ctx->key_len = key_len;
50 
51 	camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
52 				    key_len, &ctx->decrypt_key[0]);
53 	return 0;
54 }
55 
56 static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
57 				     const u8 *in_key, unsigned int key_len)
58 {
59 	return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
60 }
61 
62 extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
63 				   u32 *output, unsigned int key_len);
64 
65 static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
66 {
67 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
68 
69 	camellia_sparc64_crypt(&ctx->encrypt_key[0],
70 			       (const u32 *) src,
71 			       (u32 *) dst, ctx->key_len);
72 }
73 
74 static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
75 {
76 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
77 
78 	camellia_sparc64_crypt(&ctx->decrypt_key[0],
79 			       (const u32 *) src,
80 			       (u32 *) dst, ctx->key_len);
81 }
82 
83 extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
84 
85 typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
86 			  const u64 *key);
87 
88 extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
89 extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
90 
91 static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
92 {
93 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
94 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
95 	struct skcipher_walk walk;
96 	ecb_crypt_op *op;
97 	const u64 *key;
98 	unsigned int nbytes;
99 	int err;
100 
101 	op = camellia_sparc64_ecb_crypt_3_grand_rounds;
102 	if (ctx->key_len != 16)
103 		op = camellia_sparc64_ecb_crypt_4_grand_rounds;
104 
105 	err = skcipher_walk_virt(&walk, req, true);
106 	if (err)
107 		return err;
108 
109 	if (encrypt)
110 		key = &ctx->encrypt_key[0];
111 	else
112 		key = &ctx->decrypt_key[0];
113 	camellia_sparc64_load_keys(key, ctx->key_len);
114 	while ((nbytes = walk.nbytes) != 0) {
115 		op(walk.src.virt.addr, walk.dst.virt.addr,
116 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
117 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
118 	}
119 	fprs_write(0);
120 	return err;
121 }
122 
123 static int ecb_encrypt(struct skcipher_request *req)
124 {
125 	return __ecb_crypt(req, true);
126 }
127 
128 static int ecb_decrypt(struct skcipher_request *req)
129 {
130 	return __ecb_crypt(req, false);
131 }
132 
133 typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
134 			  const u64 *key, u64 *iv);
135 
136 extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
137 extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
138 extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
139 extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
140 
141 static int cbc_encrypt(struct skcipher_request *req)
142 {
143 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
144 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
145 	struct skcipher_walk walk;
146 	cbc_crypt_op *op;
147 	const u64 *key;
148 	unsigned int nbytes;
149 	int err;
150 
151 	op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
152 	if (ctx->key_len != 16)
153 		op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
154 
155 	err = skcipher_walk_virt(&walk, req, true);
156 	if (err)
157 		return err;
158 
159 	key = &ctx->encrypt_key[0];
160 	camellia_sparc64_load_keys(key, ctx->key_len);
161 	while ((nbytes = walk.nbytes) != 0) {
162 		op(walk.src.virt.addr, walk.dst.virt.addr,
163 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
164 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
165 	}
166 	fprs_write(0);
167 	return err;
168 }
169 
170 static int cbc_decrypt(struct skcipher_request *req)
171 {
172 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
174 	struct skcipher_walk walk;
175 	cbc_crypt_op *op;
176 	const u64 *key;
177 	unsigned int nbytes;
178 	int err;
179 
180 	op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
181 	if (ctx->key_len != 16)
182 		op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
183 
184 	err = skcipher_walk_virt(&walk, req, true);
185 	if (err)
186 		return err;
187 
188 	key = &ctx->decrypt_key[0];
189 	camellia_sparc64_load_keys(key, ctx->key_len);
190 	while ((nbytes = walk.nbytes) != 0) {
191 		op(walk.src.virt.addr, walk.dst.virt.addr,
192 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
193 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
194 	}
195 	fprs_write(0);
196 	return err;
197 }
198 
199 static struct crypto_alg cipher_alg = {
200 	.cra_name		= "camellia",
201 	.cra_driver_name	= "camellia-sparc64",
202 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
203 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
204 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
205 	.cra_ctxsize		= sizeof(struct camellia_sparc64_ctx),
206 	.cra_alignmask		= 3,
207 	.cra_module		= THIS_MODULE,
208 	.cra_u	= {
209 		.cipher	= {
210 			.cia_min_keysize	= CAMELLIA_MIN_KEY_SIZE,
211 			.cia_max_keysize	= CAMELLIA_MAX_KEY_SIZE,
212 			.cia_setkey		= camellia_set_key,
213 			.cia_encrypt		= camellia_encrypt,
214 			.cia_decrypt		= camellia_decrypt
215 		}
216 	}
217 };
218 
219 static struct skcipher_alg skcipher_algs[] = {
220 	{
221 		.base.cra_name		= "ecb(camellia)",
222 		.base.cra_driver_name	= "ecb-camellia-sparc64",
223 		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
224 		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,
225 		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),
226 		.base.cra_alignmask	= 7,
227 		.base.cra_module	= THIS_MODULE,
228 		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,
229 		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,
230 		.setkey			= camellia_set_key_skcipher,
231 		.encrypt		= ecb_encrypt,
232 		.decrypt		= ecb_decrypt,
233 	}, {
234 		.base.cra_name		= "cbc(camellia)",
235 		.base.cra_driver_name	= "cbc-camellia-sparc64",
236 		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
237 		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,
238 		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),
239 		.base.cra_alignmask	= 7,
240 		.base.cra_module	= THIS_MODULE,
241 		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,
242 		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,
243 		.ivsize			= CAMELLIA_BLOCK_SIZE,
244 		.setkey			= camellia_set_key_skcipher,
245 		.encrypt		= cbc_encrypt,
246 		.decrypt		= cbc_decrypt,
247 	}
248 };
249 
250 static bool __init sparc64_has_camellia_opcode(void)
251 {
252 	unsigned long cfr;
253 
254 	if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
255 		return false;
256 
257 	__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
258 	if (!(cfr & CFR_CAMELLIA))
259 		return false;
260 
261 	return true;
262 }
263 
264 static int __init camellia_sparc64_mod_init(void)
265 {
266 	int err;
267 
268 	if (!sparc64_has_camellia_opcode()) {
269 		pr_info("sparc64 camellia opcodes not available.\n");
270 		return -ENODEV;
271 	}
272 	pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
273 	err = crypto_register_alg(&cipher_alg);
274 	if (err)
275 		return err;
276 	err = crypto_register_skciphers(skcipher_algs,
277 					ARRAY_SIZE(skcipher_algs));
278 	if (err)
279 		crypto_unregister_alg(&cipher_alg);
280 	return err;
281 }
282 
283 static void __exit camellia_sparc64_mod_fini(void)
284 {
285 	crypto_unregister_alg(&cipher_alg);
286 	crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
287 }
288 
289 module_init(camellia_sparc64_mod_init);
290 module_exit(camellia_sparc64_mod_fini);
291 
292 MODULE_LICENSE("GPL");
293 MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
294 
295 MODULE_ALIAS_CRYPTO("camellia");
296 
297 #include "crop_devid.c"
298