xref: /openbmc/linux/arch/x86/crypto/glue_helper-asm-avx2.S (revision cf1521a1a5e21fd1e79a458605c4282fbfbbeee2)
1*cf1521a1SJussi Kivilinna/*
2*cf1521a1SJussi Kivilinna * Shared glue code for 128bit block ciphers, AVX2 assembler macros
3*cf1521a1SJussi Kivilinna *
4*cf1521a1SJussi Kivilinna * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
5*cf1521a1SJussi Kivilinna *
6*cf1521a1SJussi Kivilinna * This program is free software; you can redistribute it and/or modify
7*cf1521a1SJussi Kivilinna * it under the terms of the GNU General Public License as published by
8*cf1521a1SJussi Kivilinna * the Free Software Foundation; either version 2 of the License, or
9*cf1521a1SJussi Kivilinna * (at your option) any later version.
10*cf1521a1SJussi Kivilinna *
11*cf1521a1SJussi Kivilinna */
12*cf1521a1SJussi Kivilinna
13*cf1521a1SJussi Kivilinna#define load_16way(src, x0, x1, x2, x3, x4, x5, x6, x7) \
14*cf1521a1SJussi Kivilinna	vmovdqu (0*32)(src), x0; \
15*cf1521a1SJussi Kivilinna	vmovdqu (1*32)(src), x1; \
16*cf1521a1SJussi Kivilinna	vmovdqu (2*32)(src), x2; \
17*cf1521a1SJussi Kivilinna	vmovdqu (3*32)(src), x3; \
18*cf1521a1SJussi Kivilinna	vmovdqu (4*32)(src), x4; \
19*cf1521a1SJussi Kivilinna	vmovdqu (5*32)(src), x5; \
20*cf1521a1SJussi Kivilinna	vmovdqu (6*32)(src), x6; \
21*cf1521a1SJussi Kivilinna	vmovdqu (7*32)(src), x7;
22*cf1521a1SJussi Kivilinna
23*cf1521a1SJussi Kivilinna#define store_16way(dst, x0, x1, x2, x3, x4, x5, x6, x7) \
24*cf1521a1SJussi Kivilinna	vmovdqu x0, (0*32)(dst); \
25*cf1521a1SJussi Kivilinna	vmovdqu x1, (1*32)(dst); \
26*cf1521a1SJussi Kivilinna	vmovdqu x2, (2*32)(dst); \
27*cf1521a1SJussi Kivilinna	vmovdqu x3, (3*32)(dst); \
28*cf1521a1SJussi Kivilinna	vmovdqu x4, (4*32)(dst); \
29*cf1521a1SJussi Kivilinna	vmovdqu x5, (5*32)(dst); \
30*cf1521a1SJussi Kivilinna	vmovdqu x6, (6*32)(dst); \
31*cf1521a1SJussi Kivilinna	vmovdqu x7, (7*32)(dst);
32*cf1521a1SJussi Kivilinna
33*cf1521a1SJussi Kivilinna#define store_cbc_16way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7, t0) \
34*cf1521a1SJussi Kivilinna	vpxor t0, t0, t0; \
35*cf1521a1SJussi Kivilinna	vinserti128 $1, (src), t0, t0; \
36*cf1521a1SJussi Kivilinna	vpxor t0, x0, x0; \
37*cf1521a1SJussi Kivilinna	vpxor (0*32+16)(src), x1, x1; \
38*cf1521a1SJussi Kivilinna	vpxor (1*32+16)(src), x2, x2; \
39*cf1521a1SJussi Kivilinna	vpxor (2*32+16)(src), x3, x3; \
40*cf1521a1SJussi Kivilinna	vpxor (3*32+16)(src), x4, x4; \
41*cf1521a1SJussi Kivilinna	vpxor (4*32+16)(src), x5, x5; \
42*cf1521a1SJussi Kivilinna	vpxor (5*32+16)(src), x6, x6; \
43*cf1521a1SJussi Kivilinna	vpxor (6*32+16)(src), x7, x7; \
44*cf1521a1SJussi Kivilinna	store_16way(dst, x0, x1, x2, x3, x4, x5, x6, x7);
45*cf1521a1SJussi Kivilinna
46*cf1521a1SJussi Kivilinna#define inc_le128(x, minus_one, tmp) \
47*cf1521a1SJussi Kivilinna	vpcmpeqq minus_one, x, tmp; \
48*cf1521a1SJussi Kivilinna	vpsubq minus_one, x, x; \
49*cf1521a1SJussi Kivilinna	vpslldq $8, tmp, tmp; \
50*cf1521a1SJussi Kivilinna	vpsubq tmp, x, x;
51*cf1521a1SJussi Kivilinna
52*cf1521a1SJussi Kivilinna#define add2_le128(x, minus_one, minus_two, tmp1, tmp2) \
53*cf1521a1SJussi Kivilinna	vpcmpeqq minus_one, x, tmp1; \
54*cf1521a1SJussi Kivilinna	vpcmpeqq minus_two, x, tmp2; \
55*cf1521a1SJussi Kivilinna	vpsubq minus_two, x, x; \
56*cf1521a1SJussi Kivilinna	vpor tmp2, tmp1, tmp1; \
57*cf1521a1SJussi Kivilinna	vpslldq $8, tmp1, tmp1; \
58*cf1521a1SJussi Kivilinna	vpsubq tmp1, x, x;
59*cf1521a1SJussi Kivilinna
60*cf1521a1SJussi Kivilinna#define load_ctr_16way(iv, bswap, x0, x1, x2, x3, x4, x5, x6, x7, t0, t0x, t1, \
61*cf1521a1SJussi Kivilinna		       t1x, t2, t2x, t3, t3x, t4, t5) \
62*cf1521a1SJussi Kivilinna	vpcmpeqd t0, t0, t0; \
63*cf1521a1SJussi Kivilinna	vpsrldq $8, t0, t0; /* ab: -1:0 ; cd: -1:0 */ \
64*cf1521a1SJussi Kivilinna	vpaddq t0, t0, t4; /* ab: -2:0 ; cd: -2:0 */\
65*cf1521a1SJussi Kivilinna	\
66*cf1521a1SJussi Kivilinna	/* load IV and byteswap */ \
67*cf1521a1SJussi Kivilinna	vmovdqu (iv), t2x; \
68*cf1521a1SJussi Kivilinna	vmovdqa t2x, t3x; \
69*cf1521a1SJussi Kivilinna	inc_le128(t2x, t0x, t1x); \
70*cf1521a1SJussi Kivilinna	vbroadcasti128 bswap, t1; \
71*cf1521a1SJussi Kivilinna	vinserti128 $1, t2x, t3, t2; /* ab: le0 ; cd: le1 */ \
72*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x0; \
73*cf1521a1SJussi Kivilinna	\
74*cf1521a1SJussi Kivilinna	/* construct IVs */ \
75*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); /* ab: le2 ; cd: le3 */ \
76*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x1; \
77*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
78*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x2; \
79*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
80*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x3; \
81*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
82*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x4; \
83*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
84*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x5; \
85*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
86*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x6; \
87*cf1521a1SJussi Kivilinna	add2_le128(t2, t0, t4, t3, t5); \
88*cf1521a1SJussi Kivilinna	vpshufb t1, t2, x7; \
89*cf1521a1SJussi Kivilinna	vextracti128 $1, t2, t2x; \
90*cf1521a1SJussi Kivilinna	inc_le128(t2x, t0x, t3x); \
91*cf1521a1SJussi Kivilinna	vmovdqu t2x, (iv);
92*cf1521a1SJussi Kivilinna
93*cf1521a1SJussi Kivilinna#define store_ctr_16way(src, dst, x0, x1, x2, x3, x4, x5, x6, x7) \
94*cf1521a1SJussi Kivilinna	vpxor (0*32)(src), x0, x0; \
95*cf1521a1SJussi Kivilinna	vpxor (1*32)(src), x1, x1; \
96*cf1521a1SJussi Kivilinna	vpxor (2*32)(src), x2, x2; \
97*cf1521a1SJussi Kivilinna	vpxor (3*32)(src), x3, x3; \
98*cf1521a1SJussi Kivilinna	vpxor (4*32)(src), x4, x4; \
99*cf1521a1SJussi Kivilinna	vpxor (5*32)(src), x5, x5; \
100*cf1521a1SJussi Kivilinna	vpxor (6*32)(src), x6, x6; \
101*cf1521a1SJussi Kivilinna	vpxor (7*32)(src), x7, x7; \
102*cf1521a1SJussi Kivilinna	store_16way(dst, x0, x1, x2, x3, x4, x5, x6, x7);
103*cf1521a1SJussi Kivilinna
104*cf1521a1SJussi Kivilinna#define gf128mul_x_ble(iv, mask, tmp) \
105*cf1521a1SJussi Kivilinna	vpsrad $31, iv, tmp; \
106*cf1521a1SJussi Kivilinna	vpaddq iv, iv, iv; \
107*cf1521a1SJussi Kivilinna	vpshufd $0x13, tmp, tmp; \
108*cf1521a1SJussi Kivilinna	vpand mask, tmp, tmp; \
109*cf1521a1SJussi Kivilinna	vpxor tmp, iv, iv;
110*cf1521a1SJussi Kivilinna
111*cf1521a1SJussi Kivilinna#define gf128mul_x2_ble(iv, mask1, mask2, tmp0, tmp1) \
112*cf1521a1SJussi Kivilinna	vpsrad $31, iv, tmp0; \
113*cf1521a1SJussi Kivilinna	vpaddq iv, iv, tmp1; \
114*cf1521a1SJussi Kivilinna	vpsllq $2, iv, iv; \
115*cf1521a1SJussi Kivilinna	vpshufd $0x13, tmp0, tmp0; \
116*cf1521a1SJussi Kivilinna	vpsrad $31, tmp1, tmp1; \
117*cf1521a1SJussi Kivilinna	vpand mask2, tmp0, tmp0; \
118*cf1521a1SJussi Kivilinna	vpshufd $0x13, tmp1, tmp1; \
119*cf1521a1SJussi Kivilinna	vpxor tmp0, iv, iv; \
120*cf1521a1SJussi Kivilinna	vpand mask1, tmp1, tmp1; \
121*cf1521a1SJussi Kivilinna	vpxor tmp1, iv, iv;
122*cf1521a1SJussi Kivilinna
123*cf1521a1SJussi Kivilinna#define load_xts_16way(iv, src, dst, x0, x1, x2, x3, x4, x5, x6, x7, tiv, \
124*cf1521a1SJussi Kivilinna		       tivx, t0, t0x, t1, t1x, t2, t2x, t3, \
125*cf1521a1SJussi Kivilinna		       xts_gf128mul_and_shl1_mask_0, \
126*cf1521a1SJussi Kivilinna		       xts_gf128mul_and_shl1_mask_1) \
127*cf1521a1SJussi Kivilinna	vbroadcasti128 xts_gf128mul_and_shl1_mask_0, t1; \
128*cf1521a1SJussi Kivilinna	\
129*cf1521a1SJussi Kivilinna	/* load IV and construct second IV */ \
130*cf1521a1SJussi Kivilinna	vmovdqu (iv), tivx; \
131*cf1521a1SJussi Kivilinna	vmovdqa tivx, t0x; \
132*cf1521a1SJussi Kivilinna	gf128mul_x_ble(tivx, t1x, t2x); \
133*cf1521a1SJussi Kivilinna	vbroadcasti128 xts_gf128mul_and_shl1_mask_1, t2; \
134*cf1521a1SJussi Kivilinna	vinserti128 $1, tivx, t0, tiv; \
135*cf1521a1SJussi Kivilinna	vpxor (0*32)(src), tiv, x0; \
136*cf1521a1SJussi Kivilinna	vmovdqu tiv, (0*32)(dst); \
137*cf1521a1SJussi Kivilinna	\
138*cf1521a1SJussi Kivilinna	/* construct and store IVs, also xor with source */ \
139*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
140*cf1521a1SJussi Kivilinna	vpxor (1*32)(src), tiv, x1; \
141*cf1521a1SJussi Kivilinna	vmovdqu tiv, (1*32)(dst); \
142*cf1521a1SJussi Kivilinna	\
143*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
144*cf1521a1SJussi Kivilinna	vpxor (2*32)(src), tiv, x2; \
145*cf1521a1SJussi Kivilinna	vmovdqu tiv, (2*32)(dst); \
146*cf1521a1SJussi Kivilinna	\
147*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
148*cf1521a1SJussi Kivilinna	vpxor (3*32)(src), tiv, x3; \
149*cf1521a1SJussi Kivilinna	vmovdqu tiv, (3*32)(dst); \
150*cf1521a1SJussi Kivilinna	\
151*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
152*cf1521a1SJussi Kivilinna	vpxor (4*32)(src), tiv, x4; \
153*cf1521a1SJussi Kivilinna	vmovdqu tiv, (4*32)(dst); \
154*cf1521a1SJussi Kivilinna	\
155*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
156*cf1521a1SJussi Kivilinna	vpxor (5*32)(src), tiv, x5; \
157*cf1521a1SJussi Kivilinna	vmovdqu tiv, (5*32)(dst); \
158*cf1521a1SJussi Kivilinna	\
159*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
160*cf1521a1SJussi Kivilinna	vpxor (6*32)(src), tiv, x6; \
161*cf1521a1SJussi Kivilinna	vmovdqu tiv, (6*32)(dst); \
162*cf1521a1SJussi Kivilinna	\
163*cf1521a1SJussi Kivilinna	gf128mul_x2_ble(tiv, t1, t2, t0, t3); \
164*cf1521a1SJussi Kivilinna	vpxor (7*32)(src), tiv, x7; \
165*cf1521a1SJussi Kivilinna	vmovdqu tiv, (7*32)(dst); \
166*cf1521a1SJussi Kivilinna	\
167*cf1521a1SJussi Kivilinna	vextracti128 $1, tiv, tivx; \
168*cf1521a1SJussi Kivilinna	gf128mul_x_ble(tivx, t1x, t2x); \
169*cf1521a1SJussi Kivilinna	vmovdqu tivx, (iv);
170*cf1521a1SJussi Kivilinna
171*cf1521a1SJussi Kivilinna#define store_xts_16way(dst, x0, x1, x2, x3, x4, x5, x6, x7) \
172*cf1521a1SJussi Kivilinna	vpxor (0*32)(dst), x0, x0; \
173*cf1521a1SJussi Kivilinna	vpxor (1*32)(dst), x1, x1; \
174*cf1521a1SJussi Kivilinna	vpxor (2*32)(dst), x2, x2; \
175*cf1521a1SJussi Kivilinna	vpxor (3*32)(dst), x3, x3; \
176*cf1521a1SJussi Kivilinna	vpxor (4*32)(dst), x4, x4; \
177*cf1521a1SJussi Kivilinna	vpxor (5*32)(dst), x5, x5; \
178*cf1521a1SJussi Kivilinna	vpxor (6*32)(dst), x6, x6; \
179*cf1521a1SJussi Kivilinna	vpxor (7*32)(dst), x7, x7; \
180*cf1521a1SJussi Kivilinna	store_16way(dst, x0, x1, x2, x3, x4, x5, x6, x7);
181