xref: /openbmc/linux/drivers/crypto/vmx/aes_ctr.c (revision 7f6964c5)
1 // SPDX-License-Identifier: GPL-2.0-only
2 /**
3  * AES CTR routines supporting VMX instructions on the Power 8
4  *
5  * Copyright (C) 2015 International Business Machines Inc.
6  *
7  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
8  */
9 
10 #include <linux/types.h>
11 #include <linux/err.h>
12 #include <linux/crypto.h>
13 #include <linux/delay.h>
14 #include <asm/simd.h>
15 #include <asm/switch_to.h>
16 #include <crypto/aes.h>
17 #include <crypto/internal/simd.h>
18 #include <crypto/scatterwalk.h>
19 #include <crypto/skcipher.h>
20 
21 #include "aesp8-ppc.h"
22 
23 struct p8_aes_ctr_ctx {
24 	struct crypto_sync_skcipher *fallback;
25 	struct aes_key enc_key;
26 };
27 
28 static int p8_aes_ctr_init(struct crypto_tfm *tfm)
29 {
30 	const char *alg = crypto_tfm_alg_name(tfm);
31 	struct crypto_sync_skcipher *fallback;
32 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
33 
34 	fallback = crypto_alloc_sync_skcipher(alg, 0,
35 					      CRYPTO_ALG_NEED_FALLBACK);
36 	if (IS_ERR(fallback)) {
37 		printk(KERN_ERR
38 		       "Failed to allocate transformation for '%s': %ld\n",
39 		       alg, PTR_ERR(fallback));
40 		return PTR_ERR(fallback);
41 	}
42 
43 	crypto_sync_skcipher_set_flags(
44 		fallback,
45 		crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
46 	ctx->fallback = fallback;
47 
48 	return 0;
49 }
50 
51 static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
52 {
53 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
54 
55 	if (ctx->fallback) {
56 		crypto_free_sync_skcipher(ctx->fallback);
57 		ctx->fallback = NULL;
58 	}
59 }
60 
61 static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
62 			     unsigned int keylen)
63 {
64 	int ret;
65 	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
66 
67 	preempt_disable();
68 	pagefault_disable();
69 	enable_kernel_vsx();
70 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
71 	disable_kernel_vsx();
72 	pagefault_enable();
73 	preempt_enable();
74 
75 	ret |= crypto_sync_skcipher_setkey(ctx->fallback, key, keylen);
76 
77 	return ret ? -EINVAL : 0;
78 }
79 
80 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
81 			     struct blkcipher_walk *walk)
82 {
83 	u8 *ctrblk = walk->iv;
84 	u8 keystream[AES_BLOCK_SIZE];
85 	u8 *src = walk->src.virt.addr;
86 	u8 *dst = walk->dst.virt.addr;
87 	unsigned int nbytes = walk->nbytes;
88 
89 	preempt_disable();
90 	pagefault_disable();
91 	enable_kernel_vsx();
92 	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
93 	disable_kernel_vsx();
94 	pagefault_enable();
95 	preempt_enable();
96 
97 	crypto_xor_cpy(dst, keystream, src, nbytes);
98 	crypto_inc(ctrblk, AES_BLOCK_SIZE);
99 }
100 
101 static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
102 			    struct scatterlist *dst,
103 			    struct scatterlist *src, unsigned int nbytes)
104 {
105 	int ret;
106 	u64 inc;
107 	struct blkcipher_walk walk;
108 	struct p8_aes_ctr_ctx *ctx =
109 		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
110 
111 	if (!crypto_simd_usable()) {
112 		SYNC_SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
113 		skcipher_request_set_sync_tfm(req, ctx->fallback);
114 		skcipher_request_set_callback(req, desc->flags, NULL, NULL);
115 		skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
116 		ret = crypto_skcipher_encrypt(req);
117 		skcipher_request_zero(req);
118 	} else {
119 		blkcipher_walk_init(&walk, dst, src, nbytes);
120 		ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
121 		while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
122 			preempt_disable();
123 			pagefault_disable();
124 			enable_kernel_vsx();
125 			aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
126 						    walk.dst.virt.addr,
127 						    (nbytes &
128 						     AES_BLOCK_MASK) /
129 						    AES_BLOCK_SIZE,
130 						    &ctx->enc_key,
131 						    walk.iv);
132 			disable_kernel_vsx();
133 			pagefault_enable();
134 			preempt_enable();
135 
136 			/* We need to update IV mostly for last bytes/round */
137 			inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
138 			if (inc > 0)
139 				while (inc--)
140 					crypto_inc(walk.iv, AES_BLOCK_SIZE);
141 
142 			nbytes &= AES_BLOCK_SIZE - 1;
143 			ret = blkcipher_walk_done(desc, &walk, nbytes);
144 		}
145 		if (walk.nbytes) {
146 			p8_aes_ctr_final(ctx, &walk);
147 			ret = blkcipher_walk_done(desc, &walk, 0);
148 		}
149 	}
150 
151 	return ret;
152 }
153 
154 struct crypto_alg p8_aes_ctr_alg = {
155 	.cra_name = "ctr(aes)",
156 	.cra_driver_name = "p8_aes_ctr",
157 	.cra_module = THIS_MODULE,
158 	.cra_priority = 2000,
159 	.cra_type = &crypto_blkcipher_type,
160 	.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
161 	.cra_alignmask = 0,
162 	.cra_blocksize = 1,
163 	.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
164 	.cra_init = p8_aes_ctr_init,
165 	.cra_exit = p8_aes_ctr_exit,
166 	.cra_blkcipher = {
167 			  .ivsize = AES_BLOCK_SIZE,
168 			  .min_keysize = AES_MIN_KEY_SIZE,
169 			  .max_keysize = AES_MAX_KEY_SIZE,
170 			  .setkey = p8_aes_ctr_setkey,
171 			  .encrypt = p8_aes_ctr_crypt,
172 			  .decrypt = p8_aes_ctr_crypt,
173 	},
174 };
175