xref: /openbmc/linux/drivers/crypto/vmx/aes_ctr.c (revision 176f011b)
1 /**
2  * AES CTR routines supporting VMX instructions on the Power 8
3  *
4  * Copyright (C) 2015 International Business Machines Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; version 2 only.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18  *
19  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
20  */
21 
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/skcipher.h>
31 
32 #include "aesp8-ppc.h"
33 
34 struct p8_aes_ctr_ctx {
35 	struct crypto_sync_skcipher *fallback;
36 	struct aes_key enc_key;
37 };
38 
39 static int p8_aes_ctr_init(struct crypto_tfm *tfm)
40 {
41 	const char *alg = crypto_tfm_alg_name(tfm);
42 	struct crypto_sync_skcipher *fallback;
43 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
44 
45 	fallback = crypto_alloc_sync_skcipher(alg, 0,
46 					      CRYPTO_ALG_NEED_FALLBACK);
47 	if (IS_ERR(fallback)) {
48 		printk(KERN_ERR
49 		       "Failed to allocate transformation for '%s': %ld\n",
50 		       alg, PTR_ERR(fallback));
51 		return PTR_ERR(fallback);
52 	}
53 
54 	crypto_sync_skcipher_set_flags(
55 		fallback,
56 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
57 	ctx->fallback = fallback;
58 
59 	return 0;
60 }
61 
62 static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
63 {
64 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
65 
66 	if (ctx->fallback) {
67 		crypto_free_sync_skcipher(ctx->fallback);
68 		ctx->fallback = NULL;
69 	}
70 }
71 
72 static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
73 			     unsigned int keylen)
74 {
75 	int ret;
76 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
77 
78 	preempt_disable();
79 	pagefault_disable();
80 	enable_kernel_vsx();
81 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
82 	disable_kernel_vsx();
83 	pagefault_enable();
84 	preempt_enable();
85 
86 	ret += crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
87 	return ret;
88 }
89 
90 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
91 			     struct blkcipher_walk *walk)
92 {
93 	u8 *ctrblk = walk->iv;
94 	u8 keystream[AES_BLOCK_SIZE];
95 	u8 *src = walk->src.virt.addr;
96 	u8 *dst = walk->dst.virt.addr;
97 	unsigned int nbytes = walk->nbytes;
98 
99 	preempt_disable();
100 	pagefault_disable();
101 	enable_kernel_vsx();
102 	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
103 	disable_kernel_vsx();
104 	pagefault_enable();
105 	preempt_enable();
106 
107 	crypto_xor_cpy(dst, keystream, src, nbytes);
108 	crypto_inc(ctrblk, AES_BLOCK_SIZE);
109 }
110 
111 static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
112 			    struct scatterlist *dst,
113 			    struct scatterlist *src, unsigned int nbytes)
114 {
115 	int ret;
116 	u64 inc;
117 	struct blkcipher_walk walk;
118 	struct p8_aes_ctr_ctx *ctx =
119 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
120 
121 	if (in_interrupt()) {
122 		SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
123 		skcipher_request_set_sync_tfm(req, ctx->fallback);
124 		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
125 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
126 		ret = crypto_skcipher_encrypt(req);
127 		skcipher_request_zero(req);
128 	} else {
129 		blkcipher_walk_init(&walk, dst, src, nbytes);
130 		ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
131 		while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
132 			preempt_disable();
133 			pagefault_disable();
134 			enable_kernel_vsx();
135 			aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
136 						    walk.dst.virt.addr,
137 						    (nbytes &
138 						     AES_BLOCK_MASK) /
139 						    AES_BLOCK_SIZE,
140 						    &ctx->enc_key,
141 						    walk.iv);
142 			disable_kernel_vsx();
143 			pagefault_enable();
144 			preempt_enable();
145 
146 			/* We need to update IV mostly for last bytes/round */
147 			inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
148 			if (inc > 0)
149 				while (inc--)
150 					crypto_inc(walk.iv, AES_BLOCK_SIZE);
151 
152 			nbytes &= AES_BLOCK_SIZE - 1;
153 			ret = blkcipher_walk_done(desc, &walk, nbytes);
154 		}
155 		if (walk.nbytes) {
156 			p8_aes_ctr_final(ctx, &walk);
157 			ret = blkcipher_walk_done(desc, &walk, 0);
158 		}
159 	}
160 
161 	return ret;
162 }
163 
164 struct crypto_alg p8_aes_ctr_alg = {
165 	.cra_name = "ctr(aes)",
166 	.cra_driver_name = "p8_aes_ctr",
167 	.cra_module = THIS_MODULE,
168 	.cra_priority = 2000,
169 	.cra_type = &crypto_blkcipher_type,
170 	.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
171 	.cra_alignmask = 0,
172 	.cra_blocksize = 1,
173 	.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
174 	.cra_init = p8_aes_ctr_init,
175 	.cra_exit = p8_aes_ctr_exit,
176 	.cra_blkcipher = {
177 			  .ivsize = AES_BLOCK_SIZE,
178 			  .min_keysize = AES_MIN_KEY_SIZE,
179 			  .max_keysize = AES_MAX_KEY_SIZE,
180 			  .setkey = p8_aes_ctr_setkey,
181 			  .encrypt = p8_aes_ctr_crypt,
182 			  .decrypt = p8_aes_ctr_crypt,
183 	},
184 };
185