xref: /openbmc/linux/drivers/crypto/vmx/aes_cbc.c (revision f79e4d5f)
1 /**
2  * AES CBC routines supporting VMX instructions on the Power 8
3  *
4  * Copyright (C) 2015 International Business Machines Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; version 2 only.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18  *
19  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
20  */
21 
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/skcipher.h>
31 
32 #include "aesp8-ppc.h"
33 
34 struct p8_aes_cbc_ctx {
35 	struct crypto_skcipher *fallback;
36 	struct aes_key enc_key;
37 	struct aes_key dec_key;
38 };
39 
40 static int p8_aes_cbc_init(struct crypto_tfm *tfm)
41 {
42 	const char *alg = crypto_tfm_alg_name(tfm);
43 	struct crypto_skcipher *fallback;
44 	struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
45 
46 	fallback = crypto_alloc_skcipher(alg, 0,
47 			CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
48 
49 	if (IS_ERR(fallback)) {
50 		printk(KERN_ERR
51 		       "Failed to allocate transformation for '%s': %ld\n",
52 		       alg, PTR_ERR(fallback));
53 		return PTR_ERR(fallback);
54 	}
55 
56 	crypto_skcipher_set_flags(
57 		fallback,
58 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
59 	ctx->fallback = fallback;
60 
61 	return 0;
62 }
63 
64 static void p8_aes_cbc_exit(struct crypto_tfm *tfm)
65 {
66 	struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
67 
68 	if (ctx->fallback) {
69 		crypto_free_skcipher(ctx->fallback);
70 		ctx->fallback = NULL;
71 	}
72 }
73 
74 static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
75 			     unsigned int keylen)
76 {
77 	int ret;
78 	struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
79 
80 	preempt_disable();
81 	pagefault_disable();
82 	enable_kernel_vsx();
83 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
84 	ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
85 	disable_kernel_vsx();
86 	pagefault_enable();
87 	preempt_enable();
88 
89 	ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
90 	return ret;
91 }
92 
93 static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
94 			      struct scatterlist *dst,
95 			      struct scatterlist *src, unsigned int nbytes)
96 {
97 	int ret;
98 	struct blkcipher_walk walk;
99 	struct p8_aes_cbc_ctx *ctx =
100 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
101 
102 	if (in_interrupt()) {
103 		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
104 		skcipher_request_set_tfm(req, ctx->fallback);
105 		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
106 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
107 		ret = crypto_skcipher_encrypt(req);
108 		skcipher_request_zero(req);
109 	} else {
110 		preempt_disable();
111 		pagefault_disable();
112 		enable_kernel_vsx();
113 
114 		blkcipher_walk_init(&walk, dst, src, nbytes);
115 		ret = blkcipher_walk_virt(desc, &walk);
116 		while ((nbytes = walk.nbytes)) {
117 			aes_p8_cbc_encrypt(walk.src.virt.addr,
118 					   walk.dst.virt.addr,
119 					   nbytes & AES_BLOCK_MASK,
120 					   &ctx->enc_key, walk.iv, 1);
121 			nbytes &= AES_BLOCK_SIZE - 1;
122 			ret = blkcipher_walk_done(desc, &walk, nbytes);
123 		}
124 
125 		disable_kernel_vsx();
126 		pagefault_enable();
127 		preempt_enable();
128 	}
129 
130 	return ret;
131 }
132 
133 static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
134 			      struct scatterlist *dst,
135 			      struct scatterlist *src, unsigned int nbytes)
136 {
137 	int ret;
138 	struct blkcipher_walk walk;
139 	struct p8_aes_cbc_ctx *ctx =
140 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
141 
142 	if (in_interrupt()) {
143 		SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
144 		skcipher_request_set_tfm(req, ctx->fallback);
145 		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
146 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
147 		ret = crypto_skcipher_decrypt(req);
148 		skcipher_request_zero(req);
149 	} else {
150 		preempt_disable();
151 		pagefault_disable();
152 		enable_kernel_vsx();
153 
154 		blkcipher_walk_init(&walk, dst, src, nbytes);
155 		ret = blkcipher_walk_virt(desc, &walk);
156 		while ((nbytes = walk.nbytes)) {
157 			aes_p8_cbc_encrypt(walk.src.virt.addr,
158 					   walk.dst.virt.addr,
159 					   nbytes & AES_BLOCK_MASK,
160 					   &ctx->dec_key, walk.iv, 0);
161 			nbytes &= AES_BLOCK_SIZE - 1;
162 			ret = blkcipher_walk_done(desc, &walk, nbytes);
163 		}
164 
165 		disable_kernel_vsx();
166 		pagefault_enable();
167 		preempt_enable();
168 	}
169 
170 	return ret;
171 }
172 
173 
174 struct crypto_alg p8_aes_cbc_alg = {
175 	.cra_name = "cbc(aes)",
176 	.cra_driver_name = "p8_aes_cbc",
177 	.cra_module = THIS_MODULE,
178 	.cra_priority = 2000,
179 	.cra_type = &crypto_blkcipher_type,
180 	.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
181 	.cra_alignmask = 0,
182 	.cra_blocksize = AES_BLOCK_SIZE,
183 	.cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
184 	.cra_init = p8_aes_cbc_init,
185 	.cra_exit = p8_aes_cbc_exit,
186 	.cra_blkcipher = {
187 			  .ivsize = AES_BLOCK_SIZE,
188 			  .min_keysize = AES_MIN_KEY_SIZE,
189 			  .max_keysize = AES_MAX_KEY_SIZE,
190 			  .setkey = p8_aes_cbc_setkey,
191 			  .encrypt = p8_aes_cbc_encrypt,
192 			  .decrypt = p8_aes_cbc_decrypt,
193 	},
194 };
195