xref: /openbmc/linux/drivers/crypto/vmx/aes_xts.c (revision ae213c44)
1 /**
2  * AES XTS routines supporting VMX In-core instructions on Power 8
3  *
4  * Copyright (C) 2015 International Business Machines Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundations; version 2 only.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY of FITNESS FOR A PARTICUPAR PURPOSE. See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18  *
19  * Author: Leonidas S. Barbosa <leosilva@linux.vnet.ibm.com>
20  */
21 
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <asm/simd.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/internal/simd.h>
30 #include <crypto/scatterwalk.h>
31 #include <crypto/xts.h>
32 #include <crypto/skcipher.h>
33 
34 #include "aesp8-ppc.h"
35 
36 struct p8_aes_xts_ctx {
37 	struct crypto_sync_skcipher *fallback;
38 	struct aes_key enc_key;
39 	struct aes_key dec_key;
40 	struct aes_key tweak_key;
41 };
42 
43 static int p8_aes_xts_init(struct crypto_tfm *tfm)
44 {
45 	const char *alg = crypto_tfm_alg_name(tfm);
46 	struct crypto_sync_skcipher *fallback;
47 	struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
48 
49 	fallback = crypto_alloc_sync_skcipher(alg, 0,
50 					      CRYPTO_ALG_NEED_FALLBACK);
51 	if (IS_ERR(fallback)) {
52 		printk(KERN_ERR
53 			"Failed to allocate transformation for '%s': %ld\n",
54 			alg, PTR_ERR(fallback));
55 		return PTR_ERR(fallback);
56 	}
57 
58 	crypto_sync_skcipher_set_flags(
59 		fallback,
60 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
61 	ctx->fallback = fallback;
62 
63 	return 0;
64 }
65 
66 static void p8_aes_xts_exit(struct crypto_tfm *tfm)
67 {
68 	struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
69 
70 	if (ctx->fallback) {
71 		crypto_free_sync_skcipher(ctx->fallback);
72 		ctx->fallback = NULL;
73 	}
74 }
75 
76 static int p8_aes_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
77 			     unsigned int keylen)
78 {
79 	int ret;
80 	struct p8_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
81 
82 	ret = xts_check_key(tfm, key, keylen);
83 	if (ret)
84 		return ret;
85 
86 	preempt_disable();
87 	pagefault_disable();
88 	enable_kernel_vsx();
89 	ret = aes_p8_set_encrypt_key(key + keylen/2, (keylen/2) * 8, &ctx->tweak_key);
90 	ret |= aes_p8_set_encrypt_key(key, (keylen/2) * 8, &ctx->enc_key);
91 	ret |= aes_p8_set_decrypt_key(key, (keylen/2) * 8, &ctx->dec_key);
92 	disable_kernel_vsx();
93 	pagefault_enable();
94 	preempt_enable();
95 
96 	ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
97 
98 	return ret ? -EINVAL : 0;
99 }
100 
101 static int p8_aes_xts_crypt(struct blkcipher_desc *desc,
102 			    struct scatterlist *dst,
103 			    struct scatterlist *src,
104 			    unsigned int nbytes, int enc)
105 {
106 	int ret;
107 	u8 tweak[AES_BLOCK_SIZE];
108 	u8 *iv;
109 	struct blkcipher_walk walk;
110 	struct p8_aes_xts_ctx *ctx =
111 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
112 
113 	if (!crypto_simd_usable()) {
114 		SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
115 		skcipher_request_set_sync_tfm(req, ctx->fallback);
116 		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
117 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
118 		ret = enc? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
119 		skcipher_request_zero(req);
120 	} else {
121 		blkcipher_walk_init(&walk, dst, src, nbytes);
122 
123 		ret = blkcipher_walk_virt(desc, &walk);
124 
125 		preempt_disable();
126 		pagefault_disable();
127 		enable_kernel_vsx();
128 
129 		iv = walk.iv;
130 		memset(tweak, 0, AES_BLOCK_SIZE);
131 		aes_p8_encrypt(iv, tweak, &ctx->tweak_key);
132 
133 		disable_kernel_vsx();
134 		pagefault_enable();
135 		preempt_enable();
136 
137 		while ((nbytes = walk.nbytes)) {
138 			preempt_disable();
139 			pagefault_disable();
140 			enable_kernel_vsx();
141 			if (enc)
142 				aes_p8_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr,
143 						nbytes & AES_BLOCK_MASK, &ctx->enc_key, NULL, tweak);
144 			else
145 				aes_p8_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr,
146 						nbytes & AES_BLOCK_MASK, &ctx->dec_key, NULL, tweak);
147 			disable_kernel_vsx();
148 			pagefault_enable();
149 			preempt_enable();
150 
151 			nbytes &= AES_BLOCK_SIZE - 1;
152 			ret = blkcipher_walk_done(desc, &walk, nbytes);
153 		}
154 	}
155 	return ret;
156 }
157 
158 static int p8_aes_xts_encrypt(struct blkcipher_desc *desc,
159 			      struct scatterlist *dst,
160 			      struct scatterlist *src, unsigned int nbytes)
161 {
162 	return p8_aes_xts_crypt(desc, dst, src, nbytes, 1);
163 }
164 
165 static int p8_aes_xts_decrypt(struct blkcipher_desc *desc,
166 			      struct scatterlist *dst,
167 			      struct scatterlist *src, unsigned int nbytes)
168 {
169 	return p8_aes_xts_crypt(desc, dst, src, nbytes, 0);
170 }
171 
172 struct crypto_alg p8_aes_xts_alg = {
173 	.cra_name = "xts(aes)",
174 	.cra_driver_name = "p8_aes_xts",
175 	.cra_module = THIS_MODULE,
176 	.cra_priority = 2000,
177 	.cra_type = &crypto_blkcipher_type,
178 	.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
179 	.cra_alignmask = 0,
180 	.cra_blocksize = AES_BLOCK_SIZE,
181 	.cra_ctxsize = sizeof(struct p8_aes_xts_ctx),
182 	.cra_init = p8_aes_xts_init,
183 	.cra_exit = p8_aes_xts_exit,
184 	.cra_blkcipher = {
185 			.ivsize = AES_BLOCK_SIZE,
186 			.min_keysize = 2 * AES_MIN_KEY_SIZE,
187 			.max_keysize = 2 * AES_MAX_KEY_SIZE,
188 			.setkey	 = p8_aes_xts_setkey,
189 			.encrypt = p8_aes_xts_encrypt,
190 			.decrypt = p8_aes_xts_decrypt,
191 	}
192 };
193