xref: /openbmc/linux/arch/s390/crypto/aes_s390.c (revision 96de0e252cedffad61b3cb5e05662c591898e69a)
1 /*
2  * Cryptographic API.
3  *
4  * s390 implementation of the AES Cipher Algorithm.
5  *
6  * s390 Version:
7  *   Copyright IBM Corp. 2005,2007
8  *   Author(s): Jan Glauber (jang@de.ibm.com)
9  *
10  * Derived from "crypto/aes_generic.c"
11  *
12  * This program is free software; you can redistribute it and/or modify it
13  * under the terms of the GNU General Public License as published by the Free
14  * Software Foundation; either version 2 of the License, or (at your option)
15  * any later version.
16  *
17  */
18 
19 #include <crypto/algapi.h>
20 #include <linux/module.h>
21 #include <linux/init.h>
22 #include "crypt_s390.h"
23 
24 #define AES_MIN_KEY_SIZE	16
25 #define AES_MAX_KEY_SIZE	32
26 
27 /* data block size for all key lengths */
28 #define AES_BLOCK_SIZE		16
29 
30 #define AES_KEYLEN_128		1
31 #define AES_KEYLEN_192		2
32 #define AES_KEYLEN_256		4
33 
34 static char keylen_flag = 0;
35 
36 struct s390_aes_ctx {
37 	u8 iv[AES_BLOCK_SIZE];
38 	u8 key[AES_MAX_KEY_SIZE];
39 	long enc;
40 	long dec;
41 	int key_len;
42 };
43 
44 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
45 		       unsigned int key_len)
46 {
47 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
48 	u32 *flags = &tfm->crt_flags;
49 
50 	switch (key_len) {
51 	case 16:
52 		if (!(keylen_flag & AES_KEYLEN_128))
53 			goto fail;
54 		break;
55 	case 24:
56 		if (!(keylen_flag & AES_KEYLEN_192))
57 			goto fail;
58 
59 		break;
60 	case 32:
61 		if (!(keylen_flag & AES_KEYLEN_256))
62 			goto fail;
63 		break;
64 	default:
65 		goto fail;
66 		break;
67 	}
68 
69 	sctx->key_len = key_len;
70 	memcpy(sctx->key, in_key, key_len);
71 	return 0;
72 fail:
73 	*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
74 	return -EINVAL;
75 }
76 
77 static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
78 {
79 	const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
80 
81 	switch (sctx->key_len) {
82 	case 16:
83 		crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
84 			      AES_BLOCK_SIZE);
85 		break;
86 	case 24:
87 		crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
88 			      AES_BLOCK_SIZE);
89 		break;
90 	case 32:
91 		crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
92 			      AES_BLOCK_SIZE);
93 		break;
94 	}
95 }
96 
97 static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
98 {
99 	const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
100 
101 	switch (sctx->key_len) {
102 	case 16:
103 		crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
104 			      AES_BLOCK_SIZE);
105 		break;
106 	case 24:
107 		crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
108 			      AES_BLOCK_SIZE);
109 		break;
110 	case 32:
111 		crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
112 			      AES_BLOCK_SIZE);
113 		break;
114 	}
115 }
116 
117 
118 static struct crypto_alg aes_alg = {
119 	.cra_name		=	"aes",
120 	.cra_driver_name	=	"aes-s390",
121 	.cra_priority		=	CRYPT_S390_PRIORITY,
122 	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER |
123 					CRYPTO_ALG_NEED_FALLBACK,
124 	.cra_blocksize		=	AES_BLOCK_SIZE,
125 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
126 	.cra_module		=	THIS_MODULE,
127 	.cra_list		=	LIST_HEAD_INIT(aes_alg.cra_list),
128 	.cra_u			=	{
129 		.cipher = {
130 			.cia_min_keysize	=	AES_MIN_KEY_SIZE,
131 			.cia_max_keysize	=	AES_MAX_KEY_SIZE,
132 			.cia_setkey		=	aes_set_key,
133 			.cia_encrypt		=	aes_encrypt,
134 			.cia_decrypt		=	aes_decrypt,
135 		}
136 	}
137 };
138 
139 static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
140 			   unsigned int key_len)
141 {
142 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
143 
144 	switch (key_len) {
145 	case 16:
146 		sctx->enc = KM_AES_128_ENCRYPT;
147 		sctx->dec = KM_AES_128_DECRYPT;
148 		break;
149 	case 24:
150 		sctx->enc = KM_AES_192_ENCRYPT;
151 		sctx->dec = KM_AES_192_DECRYPT;
152 		break;
153 	case 32:
154 		sctx->enc = KM_AES_256_ENCRYPT;
155 		sctx->dec = KM_AES_256_DECRYPT;
156 		break;
157 	}
158 
159 	return aes_set_key(tfm, in_key, key_len);
160 }
161 
162 static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
163 			 struct blkcipher_walk *walk)
164 {
165 	int ret = blkcipher_walk_virt(desc, walk);
166 	unsigned int nbytes;
167 
168 	while ((nbytes = walk->nbytes)) {
169 		/* only use complete blocks */
170 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
171 		u8 *out = walk->dst.virt.addr;
172 		u8 *in = walk->src.virt.addr;
173 
174 		ret = crypt_s390_km(func, param, out, in, n);
175 		BUG_ON((ret < 0) || (ret != n));
176 
177 		nbytes &= AES_BLOCK_SIZE - 1;
178 		ret = blkcipher_walk_done(desc, walk, nbytes);
179 	}
180 
181 	return ret;
182 }
183 
184 static int ecb_aes_encrypt(struct blkcipher_desc *desc,
185 			   struct scatterlist *dst, struct scatterlist *src,
186 			   unsigned int nbytes)
187 {
188 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
189 	struct blkcipher_walk walk;
190 
191 	blkcipher_walk_init(&walk, dst, src, nbytes);
192 	return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
193 }
194 
195 static int ecb_aes_decrypt(struct blkcipher_desc *desc,
196 			   struct scatterlist *dst, struct scatterlist *src,
197 			   unsigned int nbytes)
198 {
199 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
200 	struct blkcipher_walk walk;
201 
202 	blkcipher_walk_init(&walk, dst, src, nbytes);
203 	return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
204 }
205 
206 static struct crypto_alg ecb_aes_alg = {
207 	.cra_name		=	"ecb(aes)",
208 	.cra_driver_name	=	"ecb-aes-s390",
209 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
210 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
211 					CRYPTO_ALG_NEED_FALLBACK,
212 	.cra_blocksize		=	AES_BLOCK_SIZE,
213 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
214 	.cra_type		=	&crypto_blkcipher_type,
215 	.cra_module		=	THIS_MODULE,
216 	.cra_list		=	LIST_HEAD_INIT(ecb_aes_alg.cra_list),
217 	.cra_u			=	{
218 		.blkcipher = {
219 			.min_keysize		=	AES_MIN_KEY_SIZE,
220 			.max_keysize		=	AES_MAX_KEY_SIZE,
221 			.setkey			=	ecb_aes_set_key,
222 			.encrypt		=	ecb_aes_encrypt,
223 			.decrypt		=	ecb_aes_decrypt,
224 		}
225 	}
226 };
227 
228 static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
229 			   unsigned int key_len)
230 {
231 	struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
232 
233 	switch (key_len) {
234 	case 16:
235 		sctx->enc = KMC_AES_128_ENCRYPT;
236 		sctx->dec = KMC_AES_128_DECRYPT;
237 		break;
238 	case 24:
239 		sctx->enc = KMC_AES_192_ENCRYPT;
240 		sctx->dec = KMC_AES_192_DECRYPT;
241 		break;
242 	case 32:
243 		sctx->enc = KMC_AES_256_ENCRYPT;
244 		sctx->dec = KMC_AES_256_DECRYPT;
245 		break;
246 	}
247 
248 	return aes_set_key(tfm, in_key, key_len);
249 }
250 
251 static int cbc_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
252 			 struct blkcipher_walk *walk)
253 {
254 	int ret = blkcipher_walk_virt(desc, walk);
255 	unsigned int nbytes = walk->nbytes;
256 
257 	if (!nbytes)
258 		goto out;
259 
260 	memcpy(param, walk->iv, AES_BLOCK_SIZE);
261 	do {
262 		/* only use complete blocks */
263 		unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
264 		u8 *out = walk->dst.virt.addr;
265 		u8 *in = walk->src.virt.addr;
266 
267 		ret = crypt_s390_kmc(func, param, out, in, n);
268 		BUG_ON((ret < 0) || (ret != n));
269 
270 		nbytes &= AES_BLOCK_SIZE - 1;
271 		ret = blkcipher_walk_done(desc, walk, nbytes);
272 	} while ((nbytes = walk->nbytes));
273 	memcpy(walk->iv, param, AES_BLOCK_SIZE);
274 
275 out:
276 	return ret;
277 }
278 
279 static int cbc_aes_encrypt(struct blkcipher_desc *desc,
280 			   struct scatterlist *dst, struct scatterlist *src,
281 			   unsigned int nbytes)
282 {
283 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
284 	struct blkcipher_walk walk;
285 
286 	blkcipher_walk_init(&walk, dst, src, nbytes);
287 	return cbc_aes_crypt(desc, sctx->enc, sctx->iv, &walk);
288 }
289 
290 static int cbc_aes_decrypt(struct blkcipher_desc *desc,
291 			   struct scatterlist *dst, struct scatterlist *src,
292 			   unsigned int nbytes)
293 {
294 	struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
295 	struct blkcipher_walk walk;
296 
297 	blkcipher_walk_init(&walk, dst, src, nbytes);
298 	return cbc_aes_crypt(desc, sctx->dec, sctx->iv, &walk);
299 }
300 
301 static struct crypto_alg cbc_aes_alg = {
302 	.cra_name		=	"cbc(aes)",
303 	.cra_driver_name	=	"cbc-aes-s390",
304 	.cra_priority		=	CRYPT_S390_COMPOSITE_PRIORITY,
305 	.cra_flags		=	CRYPTO_ALG_TYPE_BLKCIPHER |
306 					CRYPTO_ALG_NEED_FALLBACK,
307 	.cra_blocksize		=	AES_BLOCK_SIZE,
308 	.cra_ctxsize		=	sizeof(struct s390_aes_ctx),
309 	.cra_type		=	&crypto_blkcipher_type,
310 	.cra_module		=	THIS_MODULE,
311 	.cra_list		=	LIST_HEAD_INIT(cbc_aes_alg.cra_list),
312 	.cra_u			=	{
313 		.blkcipher = {
314 			.min_keysize		=	AES_MIN_KEY_SIZE,
315 			.max_keysize		=	AES_MAX_KEY_SIZE,
316 			.ivsize			=	AES_BLOCK_SIZE,
317 			.setkey			=	cbc_aes_set_key,
318 			.encrypt		=	cbc_aes_encrypt,
319 			.decrypt		=	cbc_aes_decrypt,
320 		}
321 	}
322 };
323 
324 static int __init aes_init(void)
325 {
326 	int ret;
327 
328 	if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
329 		keylen_flag |= AES_KEYLEN_128;
330 	if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
331 		keylen_flag |= AES_KEYLEN_192;
332 	if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
333 		keylen_flag |= AES_KEYLEN_256;
334 
335 	if (!keylen_flag)
336 		return -EOPNOTSUPP;
337 
338 	/* z9 109 and z9 BC/EC only support 128 bit key length */
339 	if (keylen_flag == AES_KEYLEN_128) {
340 		aes_alg.cra_u.cipher.cia_max_keysize = AES_MIN_KEY_SIZE;
341 		ecb_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
342 		cbc_aes_alg.cra_u.blkcipher.max_keysize = AES_MIN_KEY_SIZE;
343 		printk(KERN_INFO
344 		       "aes_s390: hardware acceleration only available for"
345 		       "128 bit keys\n");
346 	}
347 
348 	ret = crypto_register_alg(&aes_alg);
349 	if (ret)
350 		goto aes_err;
351 
352 	ret = crypto_register_alg(&ecb_aes_alg);
353 	if (ret)
354 		goto ecb_aes_err;
355 
356 	ret = crypto_register_alg(&cbc_aes_alg);
357 	if (ret)
358 		goto cbc_aes_err;
359 
360 out:
361 	return ret;
362 
363 cbc_aes_err:
364 	crypto_unregister_alg(&ecb_aes_alg);
365 ecb_aes_err:
366 	crypto_unregister_alg(&aes_alg);
367 aes_err:
368 	goto out;
369 }
370 
371 static void __exit aes_fini(void)
372 {
373 	crypto_unregister_alg(&cbc_aes_alg);
374 	crypto_unregister_alg(&ecb_aes_alg);
375 	crypto_unregister_alg(&aes_alg);
376 }
377 
378 module_init(aes_init);
379 module_exit(aes_fini);
380 
381 MODULE_ALIAS("aes");
382 
383 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
384 MODULE_LICENSE("GPL");
385 
386