xref: /openbmc/linux/arch/arm64/crypto/sm4-ce-glue.c (revision 5b33e0ec)
1*5b33e0ecSTianjia Zhang /* SPDX-License-Identifier: GPL-2.0-or-later */
2*5b33e0ecSTianjia Zhang /*
3*5b33e0ecSTianjia Zhang  * SM4 Cipher Algorithm, using ARMv8 Crypto Extensions
4*5b33e0ecSTianjia Zhang  * as specified in
5*5b33e0ecSTianjia Zhang  * https://tools.ietf.org/id/draft-ribose-cfrg-sm4-10.html
6*5b33e0ecSTianjia Zhang  *
7*5b33e0ecSTianjia Zhang  * Copyright (C) 2022, Alibaba Group.
8*5b33e0ecSTianjia Zhang  * Copyright (C) 2022 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
9*5b33e0ecSTianjia Zhang  */
10*5b33e0ecSTianjia Zhang 
11*5b33e0ecSTianjia Zhang #include <linux/module.h>
12*5b33e0ecSTianjia Zhang #include <linux/crypto.h>
13*5b33e0ecSTianjia Zhang #include <linux/kernel.h>
14*5b33e0ecSTianjia Zhang #include <linux/cpufeature.h>
15*5b33e0ecSTianjia Zhang #include <asm/neon.h>
16*5b33e0ecSTianjia Zhang #include <asm/simd.h>
17*5b33e0ecSTianjia Zhang #include <crypto/internal/simd.h>
18*5b33e0ecSTianjia Zhang #include <crypto/internal/skcipher.h>
19*5b33e0ecSTianjia Zhang #include <crypto/sm4.h>
20*5b33e0ecSTianjia Zhang 
21*5b33e0ecSTianjia Zhang #define BYTES2BLKS(nbytes)	((nbytes) >> 4)
22*5b33e0ecSTianjia Zhang 
23*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_expand_key(const u8 *key, u32 *rkey_enc, u32 *rkey_dec,
24*5b33e0ecSTianjia Zhang 				  const u32 *fk, const u32 *ck);
25*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_crypt_block(const u32 *rkey, u8 *dst, const u8 *src);
26*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_crypt(const u32 *rkey, u8 *dst, const u8 *src,
27*5b33e0ecSTianjia Zhang 			     unsigned int nblks);
28*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_cbc_enc(const u32 *rkey, u8 *dst, const u8 *src,
29*5b33e0ecSTianjia Zhang 			       u8 *iv, unsigned int nblks);
30*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_cbc_dec(const u32 *rkey, u8 *dst, const u8 *src,
31*5b33e0ecSTianjia Zhang 			       u8 *iv, unsigned int nblks);
32*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_cfb_enc(const u32 *rkey, u8 *dst, const u8 *src,
33*5b33e0ecSTianjia Zhang 			       u8 *iv, unsigned int nblks);
34*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_cfb_dec(const u32 *rkey, u8 *dst, const u8 *src,
35*5b33e0ecSTianjia Zhang 			       u8 *iv, unsigned int nblks);
36*5b33e0ecSTianjia Zhang asmlinkage void sm4_ce_ctr_enc(const u32 *rkey, u8 *dst, const u8 *src,
37*5b33e0ecSTianjia Zhang 			       u8 *iv, unsigned int nblks);
38*5b33e0ecSTianjia Zhang 
39*5b33e0ecSTianjia Zhang static int sm4_setkey(struct crypto_skcipher *tfm, const u8 *key,
40*5b33e0ecSTianjia Zhang 		      unsigned int key_len)
41*5b33e0ecSTianjia Zhang {
42*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
43*5b33e0ecSTianjia Zhang 
44*5b33e0ecSTianjia Zhang 	if (key_len != SM4_KEY_SIZE)
45*5b33e0ecSTianjia Zhang 		return -EINVAL;
46*5b33e0ecSTianjia Zhang 
47*5b33e0ecSTianjia Zhang 	sm4_ce_expand_key(key, ctx->rkey_enc, ctx->rkey_dec,
48*5b33e0ecSTianjia Zhang 			  crypto_sm4_fk, crypto_sm4_ck);
49*5b33e0ecSTianjia Zhang 	return 0;
50*5b33e0ecSTianjia Zhang }
51*5b33e0ecSTianjia Zhang 
52*5b33e0ecSTianjia Zhang static int sm4_ecb_do_crypt(struct skcipher_request *req, const u32 *rkey)
53*5b33e0ecSTianjia Zhang {
54*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
55*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
56*5b33e0ecSTianjia Zhang 	int err;
57*5b33e0ecSTianjia Zhang 
58*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
59*5b33e0ecSTianjia Zhang 
60*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
61*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
62*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
63*5b33e0ecSTianjia Zhang 		unsigned int nblks;
64*5b33e0ecSTianjia Zhang 
65*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
66*5b33e0ecSTianjia Zhang 
67*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
68*5b33e0ecSTianjia Zhang 		if (nblks) {
69*5b33e0ecSTianjia Zhang 			sm4_ce_crypt(rkey, dst, src, nblks);
70*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
71*5b33e0ecSTianjia Zhang 		}
72*5b33e0ecSTianjia Zhang 
73*5b33e0ecSTianjia Zhang 		kernel_neon_end();
74*5b33e0ecSTianjia Zhang 
75*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
76*5b33e0ecSTianjia Zhang 	}
77*5b33e0ecSTianjia Zhang 
78*5b33e0ecSTianjia Zhang 	return err;
79*5b33e0ecSTianjia Zhang }
80*5b33e0ecSTianjia Zhang 
81*5b33e0ecSTianjia Zhang static int sm4_ecb_encrypt(struct skcipher_request *req)
82*5b33e0ecSTianjia Zhang {
83*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
84*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
85*5b33e0ecSTianjia Zhang 
86*5b33e0ecSTianjia Zhang 	return sm4_ecb_do_crypt(req, ctx->rkey_enc);
87*5b33e0ecSTianjia Zhang }
88*5b33e0ecSTianjia Zhang 
89*5b33e0ecSTianjia Zhang static int sm4_ecb_decrypt(struct skcipher_request *req)
90*5b33e0ecSTianjia Zhang {
91*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
92*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
93*5b33e0ecSTianjia Zhang 
94*5b33e0ecSTianjia Zhang 	return sm4_ecb_do_crypt(req, ctx->rkey_dec);
95*5b33e0ecSTianjia Zhang }
96*5b33e0ecSTianjia Zhang 
97*5b33e0ecSTianjia Zhang static int sm4_cbc_encrypt(struct skcipher_request *req)
98*5b33e0ecSTianjia Zhang {
99*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
100*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
101*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
102*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
103*5b33e0ecSTianjia Zhang 	int err;
104*5b33e0ecSTianjia Zhang 
105*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
106*5b33e0ecSTianjia Zhang 
107*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
108*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
109*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
110*5b33e0ecSTianjia Zhang 		unsigned int nblks;
111*5b33e0ecSTianjia Zhang 
112*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
113*5b33e0ecSTianjia Zhang 
114*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
115*5b33e0ecSTianjia Zhang 		if (nblks) {
116*5b33e0ecSTianjia Zhang 			sm4_ce_cbc_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
117*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
118*5b33e0ecSTianjia Zhang 		}
119*5b33e0ecSTianjia Zhang 
120*5b33e0ecSTianjia Zhang 		kernel_neon_end();
121*5b33e0ecSTianjia Zhang 
122*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
123*5b33e0ecSTianjia Zhang 	}
124*5b33e0ecSTianjia Zhang 
125*5b33e0ecSTianjia Zhang 	return err;
126*5b33e0ecSTianjia Zhang }
127*5b33e0ecSTianjia Zhang 
128*5b33e0ecSTianjia Zhang static int sm4_cbc_decrypt(struct skcipher_request *req)
129*5b33e0ecSTianjia Zhang {
130*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
131*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
132*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
133*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
134*5b33e0ecSTianjia Zhang 	int err;
135*5b33e0ecSTianjia Zhang 
136*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
137*5b33e0ecSTianjia Zhang 
138*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
139*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
140*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
141*5b33e0ecSTianjia Zhang 		unsigned int nblks;
142*5b33e0ecSTianjia Zhang 
143*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
144*5b33e0ecSTianjia Zhang 
145*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
146*5b33e0ecSTianjia Zhang 		if (nblks) {
147*5b33e0ecSTianjia Zhang 			sm4_ce_cbc_dec(ctx->rkey_dec, dst, src, walk.iv, nblks);
148*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
149*5b33e0ecSTianjia Zhang 		}
150*5b33e0ecSTianjia Zhang 
151*5b33e0ecSTianjia Zhang 		kernel_neon_end();
152*5b33e0ecSTianjia Zhang 
153*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
154*5b33e0ecSTianjia Zhang 	}
155*5b33e0ecSTianjia Zhang 
156*5b33e0ecSTianjia Zhang 	return err;
157*5b33e0ecSTianjia Zhang }
158*5b33e0ecSTianjia Zhang 
159*5b33e0ecSTianjia Zhang static int sm4_cfb_encrypt(struct skcipher_request *req)
160*5b33e0ecSTianjia Zhang {
161*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
162*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
163*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
164*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
165*5b33e0ecSTianjia Zhang 	int err;
166*5b33e0ecSTianjia Zhang 
167*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
168*5b33e0ecSTianjia Zhang 
169*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
170*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
171*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
172*5b33e0ecSTianjia Zhang 		unsigned int nblks;
173*5b33e0ecSTianjia Zhang 
174*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
175*5b33e0ecSTianjia Zhang 
176*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
177*5b33e0ecSTianjia Zhang 		if (nblks) {
178*5b33e0ecSTianjia Zhang 			sm4_ce_cfb_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
179*5b33e0ecSTianjia Zhang 			dst += nblks * SM4_BLOCK_SIZE;
180*5b33e0ecSTianjia Zhang 			src += nblks * SM4_BLOCK_SIZE;
181*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
182*5b33e0ecSTianjia Zhang 		}
183*5b33e0ecSTianjia Zhang 
184*5b33e0ecSTianjia Zhang 		/* tail */
185*5b33e0ecSTianjia Zhang 		if (walk.nbytes == walk.total && nbytes > 0) {
186*5b33e0ecSTianjia Zhang 			u8 keystream[SM4_BLOCK_SIZE];
187*5b33e0ecSTianjia Zhang 
188*5b33e0ecSTianjia Zhang 			sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
189*5b33e0ecSTianjia Zhang 			crypto_xor_cpy(dst, src, keystream, nbytes);
190*5b33e0ecSTianjia Zhang 			nbytes = 0;
191*5b33e0ecSTianjia Zhang 		}
192*5b33e0ecSTianjia Zhang 
193*5b33e0ecSTianjia Zhang 		kernel_neon_end();
194*5b33e0ecSTianjia Zhang 
195*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
196*5b33e0ecSTianjia Zhang 	}
197*5b33e0ecSTianjia Zhang 
198*5b33e0ecSTianjia Zhang 	return err;
199*5b33e0ecSTianjia Zhang }
200*5b33e0ecSTianjia Zhang 
201*5b33e0ecSTianjia Zhang static int sm4_cfb_decrypt(struct skcipher_request *req)
202*5b33e0ecSTianjia Zhang {
203*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
204*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
205*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
206*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
207*5b33e0ecSTianjia Zhang 	int err;
208*5b33e0ecSTianjia Zhang 
209*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
210*5b33e0ecSTianjia Zhang 
211*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
212*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
213*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
214*5b33e0ecSTianjia Zhang 		unsigned int nblks;
215*5b33e0ecSTianjia Zhang 
216*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
217*5b33e0ecSTianjia Zhang 
218*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
219*5b33e0ecSTianjia Zhang 		if (nblks) {
220*5b33e0ecSTianjia Zhang 			sm4_ce_cfb_dec(ctx->rkey_enc, dst, src, walk.iv, nblks);
221*5b33e0ecSTianjia Zhang 			dst += nblks * SM4_BLOCK_SIZE;
222*5b33e0ecSTianjia Zhang 			src += nblks * SM4_BLOCK_SIZE;
223*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
224*5b33e0ecSTianjia Zhang 		}
225*5b33e0ecSTianjia Zhang 
226*5b33e0ecSTianjia Zhang 		/* tail */
227*5b33e0ecSTianjia Zhang 		if (walk.nbytes == walk.total && nbytes > 0) {
228*5b33e0ecSTianjia Zhang 			u8 keystream[SM4_BLOCK_SIZE];
229*5b33e0ecSTianjia Zhang 
230*5b33e0ecSTianjia Zhang 			sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
231*5b33e0ecSTianjia Zhang 			crypto_xor_cpy(dst, src, keystream, nbytes);
232*5b33e0ecSTianjia Zhang 			nbytes = 0;
233*5b33e0ecSTianjia Zhang 		}
234*5b33e0ecSTianjia Zhang 
235*5b33e0ecSTianjia Zhang 		kernel_neon_end();
236*5b33e0ecSTianjia Zhang 
237*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
238*5b33e0ecSTianjia Zhang 	}
239*5b33e0ecSTianjia Zhang 
240*5b33e0ecSTianjia Zhang 	return err;
241*5b33e0ecSTianjia Zhang }
242*5b33e0ecSTianjia Zhang 
243*5b33e0ecSTianjia Zhang static int sm4_ctr_crypt(struct skcipher_request *req)
244*5b33e0ecSTianjia Zhang {
245*5b33e0ecSTianjia Zhang 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
246*5b33e0ecSTianjia Zhang 	struct sm4_ctx *ctx = crypto_skcipher_ctx(tfm);
247*5b33e0ecSTianjia Zhang 	struct skcipher_walk walk;
248*5b33e0ecSTianjia Zhang 	unsigned int nbytes;
249*5b33e0ecSTianjia Zhang 	int err;
250*5b33e0ecSTianjia Zhang 
251*5b33e0ecSTianjia Zhang 	err = skcipher_walk_virt(&walk, req, false);
252*5b33e0ecSTianjia Zhang 
253*5b33e0ecSTianjia Zhang 	while ((nbytes = walk.nbytes) > 0) {
254*5b33e0ecSTianjia Zhang 		const u8 *src = walk.src.virt.addr;
255*5b33e0ecSTianjia Zhang 		u8 *dst = walk.dst.virt.addr;
256*5b33e0ecSTianjia Zhang 		unsigned int nblks;
257*5b33e0ecSTianjia Zhang 
258*5b33e0ecSTianjia Zhang 		kernel_neon_begin();
259*5b33e0ecSTianjia Zhang 
260*5b33e0ecSTianjia Zhang 		nblks = BYTES2BLKS(nbytes);
261*5b33e0ecSTianjia Zhang 		if (nblks) {
262*5b33e0ecSTianjia Zhang 			sm4_ce_ctr_enc(ctx->rkey_enc, dst, src, walk.iv, nblks);
263*5b33e0ecSTianjia Zhang 			dst += nblks * SM4_BLOCK_SIZE;
264*5b33e0ecSTianjia Zhang 			src += nblks * SM4_BLOCK_SIZE;
265*5b33e0ecSTianjia Zhang 			nbytes -= nblks * SM4_BLOCK_SIZE;
266*5b33e0ecSTianjia Zhang 		}
267*5b33e0ecSTianjia Zhang 
268*5b33e0ecSTianjia Zhang 		/* tail */
269*5b33e0ecSTianjia Zhang 		if (walk.nbytes == walk.total && nbytes > 0) {
270*5b33e0ecSTianjia Zhang 			u8 keystream[SM4_BLOCK_SIZE];
271*5b33e0ecSTianjia Zhang 
272*5b33e0ecSTianjia Zhang 			sm4_ce_crypt_block(ctx->rkey_enc, keystream, walk.iv);
273*5b33e0ecSTianjia Zhang 			crypto_inc(walk.iv, SM4_BLOCK_SIZE);
274*5b33e0ecSTianjia Zhang 			crypto_xor_cpy(dst, src, keystream, nbytes);
275*5b33e0ecSTianjia Zhang 			nbytes = 0;
276*5b33e0ecSTianjia Zhang 		}
277*5b33e0ecSTianjia Zhang 
278*5b33e0ecSTianjia Zhang 		kernel_neon_end();
279*5b33e0ecSTianjia Zhang 
280*5b33e0ecSTianjia Zhang 		err = skcipher_walk_done(&walk, nbytes);
281*5b33e0ecSTianjia Zhang 	}
282*5b33e0ecSTianjia Zhang 
283*5b33e0ecSTianjia Zhang 	return err;
284*5b33e0ecSTianjia Zhang }
285*5b33e0ecSTianjia Zhang 
286*5b33e0ecSTianjia Zhang static struct skcipher_alg sm4_algs[] = {
287*5b33e0ecSTianjia Zhang 	{
288*5b33e0ecSTianjia Zhang 		.base = {
289*5b33e0ecSTianjia Zhang 			.cra_name		= "ecb(sm4)",
290*5b33e0ecSTianjia Zhang 			.cra_driver_name	= "ecb-sm4-ce",
291*5b33e0ecSTianjia Zhang 			.cra_priority		= 400,
292*5b33e0ecSTianjia Zhang 			.cra_blocksize		= SM4_BLOCK_SIZE,
293*5b33e0ecSTianjia Zhang 			.cra_ctxsize		= sizeof(struct sm4_ctx),
294*5b33e0ecSTianjia Zhang 			.cra_module		= THIS_MODULE,
295*5b33e0ecSTianjia Zhang 		},
296*5b33e0ecSTianjia Zhang 		.min_keysize	= SM4_KEY_SIZE,
297*5b33e0ecSTianjia Zhang 		.max_keysize	= SM4_KEY_SIZE,
298*5b33e0ecSTianjia Zhang 		.setkey		= sm4_setkey,
299*5b33e0ecSTianjia Zhang 		.encrypt	= sm4_ecb_encrypt,
300*5b33e0ecSTianjia Zhang 		.decrypt	= sm4_ecb_decrypt,
301*5b33e0ecSTianjia Zhang 	}, {
302*5b33e0ecSTianjia Zhang 		.base = {
303*5b33e0ecSTianjia Zhang 			.cra_name		= "cbc(sm4)",
304*5b33e0ecSTianjia Zhang 			.cra_driver_name	= "cbc-sm4-ce",
305*5b33e0ecSTianjia Zhang 			.cra_priority		= 400,
306*5b33e0ecSTianjia Zhang 			.cra_blocksize		= SM4_BLOCK_SIZE,
307*5b33e0ecSTianjia Zhang 			.cra_ctxsize		= sizeof(struct sm4_ctx),
308*5b33e0ecSTianjia Zhang 			.cra_module		= THIS_MODULE,
309*5b33e0ecSTianjia Zhang 		},
310*5b33e0ecSTianjia Zhang 		.min_keysize	= SM4_KEY_SIZE,
311*5b33e0ecSTianjia Zhang 		.max_keysize	= SM4_KEY_SIZE,
312*5b33e0ecSTianjia Zhang 		.ivsize		= SM4_BLOCK_SIZE,
313*5b33e0ecSTianjia Zhang 		.setkey		= sm4_setkey,
314*5b33e0ecSTianjia Zhang 		.encrypt	= sm4_cbc_encrypt,
315*5b33e0ecSTianjia Zhang 		.decrypt	= sm4_cbc_decrypt,
316*5b33e0ecSTianjia Zhang 	}, {
317*5b33e0ecSTianjia Zhang 		.base = {
318*5b33e0ecSTianjia Zhang 			.cra_name		= "cfb(sm4)",
319*5b33e0ecSTianjia Zhang 			.cra_driver_name	= "cfb-sm4-ce",
320*5b33e0ecSTianjia Zhang 			.cra_priority		= 400,
321*5b33e0ecSTianjia Zhang 			.cra_blocksize		= 1,
322*5b33e0ecSTianjia Zhang 			.cra_ctxsize		= sizeof(struct sm4_ctx),
323*5b33e0ecSTianjia Zhang 			.cra_module		= THIS_MODULE,
324*5b33e0ecSTianjia Zhang 		},
325*5b33e0ecSTianjia Zhang 		.min_keysize	= SM4_KEY_SIZE,
326*5b33e0ecSTianjia Zhang 		.max_keysize	= SM4_KEY_SIZE,
327*5b33e0ecSTianjia Zhang 		.ivsize		= SM4_BLOCK_SIZE,
328*5b33e0ecSTianjia Zhang 		.chunksize	= SM4_BLOCK_SIZE,
329*5b33e0ecSTianjia Zhang 		.setkey		= sm4_setkey,
330*5b33e0ecSTianjia Zhang 		.encrypt	= sm4_cfb_encrypt,
331*5b33e0ecSTianjia Zhang 		.decrypt	= sm4_cfb_decrypt,
332*5b33e0ecSTianjia Zhang 	}, {
333*5b33e0ecSTianjia Zhang 		.base = {
334*5b33e0ecSTianjia Zhang 			.cra_name		= "ctr(sm4)",
335*5b33e0ecSTianjia Zhang 			.cra_driver_name	= "ctr-sm4-ce",
336*5b33e0ecSTianjia Zhang 			.cra_priority		= 400,
337*5b33e0ecSTianjia Zhang 			.cra_blocksize		= 1,
338*5b33e0ecSTianjia Zhang 			.cra_ctxsize		= sizeof(struct sm4_ctx),
339*5b33e0ecSTianjia Zhang 			.cra_module		= THIS_MODULE,
340*5b33e0ecSTianjia Zhang 		},
341*5b33e0ecSTianjia Zhang 		.min_keysize	= SM4_KEY_SIZE,
342*5b33e0ecSTianjia Zhang 		.max_keysize	= SM4_KEY_SIZE,
343*5b33e0ecSTianjia Zhang 		.ivsize		= SM4_BLOCK_SIZE,
344*5b33e0ecSTianjia Zhang 		.chunksize	= SM4_BLOCK_SIZE,
345*5b33e0ecSTianjia Zhang 		.setkey		= sm4_setkey,
346*5b33e0ecSTianjia Zhang 		.encrypt	= sm4_ctr_crypt,
347*5b33e0ecSTianjia Zhang 		.decrypt	= sm4_ctr_crypt,
348*5b33e0ecSTianjia Zhang 	}
349*5b33e0ecSTianjia Zhang };
350*5b33e0ecSTianjia Zhang 
351*5b33e0ecSTianjia Zhang static int __init sm4_init(void)
352*5b33e0ecSTianjia Zhang {
353*5b33e0ecSTianjia Zhang 	return crypto_register_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
354*5b33e0ecSTianjia Zhang }
355*5b33e0ecSTianjia Zhang 
356*5b33e0ecSTianjia Zhang static void __exit sm4_exit(void)
357*5b33e0ecSTianjia Zhang {
358*5b33e0ecSTianjia Zhang 	crypto_unregister_skciphers(sm4_algs, ARRAY_SIZE(sm4_algs));
359*5b33e0ecSTianjia Zhang }
360*5b33e0ecSTianjia Zhang 
361*5b33e0ecSTianjia Zhang module_cpu_feature_match(SM4, sm4_init);
362*5b33e0ecSTianjia Zhang module_exit(sm4_exit);
363*5b33e0ecSTianjia Zhang 
364*5b33e0ecSTianjia Zhang MODULE_DESCRIPTION("SM4 ECB/CBC/CFB/CTR using ARMv8 Crypto Extensions");
365*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("sm4-ce");
366*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("sm4");
367*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("ecb(sm4)");
368*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("cbc(sm4)");
369*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("cfb(sm4)");
370*5b33e0ecSTianjia Zhang MODULE_ALIAS_CRYPTO("ctr(sm4)");
371*5b33e0ecSTianjia Zhang MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
372*5b33e0ecSTianjia Zhang MODULE_LICENSE("GPL v2");
373