1d8b167f9SMatt Fleming /*
2d8b167f9SMatt Fleming  * This file contains instructions for testing by the test titled:
3d8b167f9SMatt Fleming  *
4d8b167f9SMatt Fleming  *         "Test x86 instruction decoder - new instructions"
5d8b167f9SMatt Fleming  *
6d8b167f9SMatt Fleming  * Note that the 'Expecting' comment lines are consumed by the
7d8b167f9SMatt Fleming  * gen-insn-x86-dat.awk script and have the format:
8d8b167f9SMatt Fleming  *
9d8b167f9SMatt Fleming  *         Expecting: <op> <branch> <rel>
10d8b167f9SMatt Fleming  *
11d8b167f9SMatt Fleming  * If this file is changed, remember to run the gen-insn-x86-dat.sh
12d8b167f9SMatt Fleming  * script and commit the result.
13d8b167f9SMatt Fleming  *
14d8b167f9SMatt Fleming  * Refer to insn-x86.c for more details.
15d8b167f9SMatt Fleming  */
16d8b167f9SMatt Fleming 
17d8b167f9SMatt Fleming int main(void)
18d8b167f9SMatt Fleming {
19d8b167f9SMatt Fleming 	/* Following line is a marker for the awk script - do not change */
20d8b167f9SMatt Fleming 	asm volatile("rdtsc"); /* Start here */
21d8b167f9SMatt Fleming 
226f6ef07fSAdrian Hunter 	/* Test fix for vcvtph2ps in x86-opcode-map.txt */
236f6ef07fSAdrian Hunter 
246f6ef07fSAdrian Hunter 	asm volatile("vcvtph2ps %xmm3,%ymm5");
256f6ef07fSAdrian Hunter 
26d8b167f9SMatt Fleming #ifdef __x86_64__
27d8b167f9SMatt Fleming 
28d8b167f9SMatt Fleming 	/* bndmk m64, bnd */
29d8b167f9SMatt Fleming 
30d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax), %bnd0");
31d8b167f9SMatt Fleming 	asm volatile("bndmk (%r8), %bnd0");
32d8b167f9SMatt Fleming 	asm volatile("bndmk (0x12345678), %bnd0");
33d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax), %bnd3");
34d8b167f9SMatt Fleming 	asm volatile("bndmk (%rcx,%rax,1), %bnd0");
35d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
36d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax,%rcx,1), %bnd0");
37d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax,%rcx,8), %bnd0");
38d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax), %bnd0");
39d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rbp), %bnd0");
40d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
41d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
42d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
43d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
44d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax), %bnd0");
45d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rbp), %bnd0");
46d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
47d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
48d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
49d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
50d8b167f9SMatt Fleming 
51d8b167f9SMatt Fleming 	/* bndcl r/m64, bnd */
52d8b167f9SMatt Fleming 
53d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax), %bnd0");
54d8b167f9SMatt Fleming 	asm volatile("bndcl (%r8), %bnd0");
55d8b167f9SMatt Fleming 	asm volatile("bndcl (0x12345678), %bnd0");
56d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax), %bnd3");
57d8b167f9SMatt Fleming 	asm volatile("bndcl (%rcx,%rax,1), %bnd0");
58d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
59d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax,%rcx,1), %bnd0");
60d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax,%rcx,8), %bnd0");
61d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax), %bnd0");
62d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rbp), %bnd0");
63d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
64d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
65d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
66d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
67d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax), %bnd0");
68d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rbp), %bnd0");
69d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
70d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
71d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
72d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
73d8b167f9SMatt Fleming 	asm volatile("bndcl %rax, %bnd0");
74d8b167f9SMatt Fleming 
75d8b167f9SMatt Fleming 	/* bndcu r/m64, bnd */
76d8b167f9SMatt Fleming 
77d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax), %bnd0");
78d8b167f9SMatt Fleming 	asm volatile("bndcu (%r8), %bnd0");
79d8b167f9SMatt Fleming 	asm volatile("bndcu (0x12345678), %bnd0");
80d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax), %bnd3");
81d8b167f9SMatt Fleming 	asm volatile("bndcu (%rcx,%rax,1), %bnd0");
82d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
83d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax,%rcx,1), %bnd0");
84d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax,%rcx,8), %bnd0");
85d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax), %bnd0");
86d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rbp), %bnd0");
87d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
88d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
89d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
90d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
91d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax), %bnd0");
92d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rbp), %bnd0");
93d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
94d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
95d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
96d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
97d8b167f9SMatt Fleming 	asm volatile("bndcu %rax, %bnd0");
98d8b167f9SMatt Fleming 
99d8b167f9SMatt Fleming 	/* bndcn r/m64, bnd */
100d8b167f9SMatt Fleming 
101d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax), %bnd0");
102d8b167f9SMatt Fleming 	asm volatile("bndcn (%r8), %bnd0");
103d8b167f9SMatt Fleming 	asm volatile("bndcn (0x12345678), %bnd0");
104d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax), %bnd3");
105d8b167f9SMatt Fleming 	asm volatile("bndcn (%rcx,%rax,1), %bnd0");
106d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
107d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax,%rcx,1), %bnd0");
108d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax,%rcx,8), %bnd0");
109d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax), %bnd0");
110d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rbp), %bnd0");
111d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
112d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
113d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
114d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
115d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax), %bnd0");
116d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rbp), %bnd0");
117d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
118d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
119d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
120d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
121d8b167f9SMatt Fleming 	asm volatile("bndcn %rax, %bnd0");
122d8b167f9SMatt Fleming 
123d8b167f9SMatt Fleming 	/* bndmov m128, bnd */
124d8b167f9SMatt Fleming 
125d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax), %bnd0");
126d8b167f9SMatt Fleming 	asm volatile("bndmov (%r8), %bnd0");
127d8b167f9SMatt Fleming 	asm volatile("bndmov (0x12345678), %bnd0");
128d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax), %bnd3");
129d8b167f9SMatt Fleming 	asm volatile("bndmov (%rcx,%rax,1), %bnd0");
130d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
131d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax,%rcx,1), %bnd0");
132d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax,%rcx,8), %bnd0");
133d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax), %bnd0");
134d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rbp), %bnd0");
135d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
136d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
137d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
138d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
139d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax), %bnd0");
140d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rbp), %bnd0");
141d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
142d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
143d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
144d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
145d8b167f9SMatt Fleming 
146d8b167f9SMatt Fleming 	/* bndmov bnd, m128 */
147d8b167f9SMatt Fleming 
148d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax)");
149d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%r8)");
150d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (0x12345678)");
151d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd3, (%rax)");
152d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
153d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
154d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
155d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
156d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax)");
157d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rbp)");
158d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
159d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
160d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
161d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
162d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax)");
163d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
164d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
165d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
166d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
167d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
168d8b167f9SMatt Fleming 
169d8b167f9SMatt Fleming 	/* bndmov bnd2, bnd1 */
170d8b167f9SMatt Fleming 
171d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, %bnd1");
172d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd1, %bnd0");
173d8b167f9SMatt Fleming 
174d8b167f9SMatt Fleming 	/* bndldx mib, bnd */
175d8b167f9SMatt Fleming 
176d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax), %bnd0");
177d8b167f9SMatt Fleming 	asm volatile("bndldx (%r8), %bnd0");
178d8b167f9SMatt Fleming 	asm volatile("bndldx (0x12345678), %bnd0");
179d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax), %bnd3");
180d8b167f9SMatt Fleming 	asm volatile("bndldx (%rcx,%rax,1), %bnd0");
181d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
182d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax,%rcx,1), %bnd0");
183d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rax), %bnd0");
184d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rbp), %bnd0");
185d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
186d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
187d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
188d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rax), %bnd0");
189d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rbp), %bnd0");
190d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
191d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
192d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
193d8b167f9SMatt Fleming 
194d8b167f9SMatt Fleming 	/* bndstx bnd, mib */
195d8b167f9SMatt Fleming 
196d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rax)");
197d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%r8)");
198d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (0x12345678)");
199d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd3, (%rax)");
200d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
201d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
202d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
203d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rax)");
204d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rbp)");
205d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
206d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
207d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
208d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rax)");
209d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
210d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
211d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
212d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
213d8b167f9SMatt Fleming 
214d8b167f9SMatt Fleming 	/* bnd prefix on call, ret, jmp and all jcc */
215d8b167f9SMatt Fleming 
216d8b167f9SMatt Fleming 	asm volatile("bnd call label1");  /* Expecting: call unconditional 0 */
217d8b167f9SMatt Fleming 	asm volatile("bnd call *(%eax)"); /* Expecting: call indirect      0 */
218d8b167f9SMatt Fleming 	asm volatile("bnd ret");          /* Expecting: ret  indirect      0 */
219d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0 */
220d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0 */
221d8b167f9SMatt Fleming 	asm volatile("bnd jmp *(%ecx)");  /* Expecting: jmp  indirect      0 */
222d8b167f9SMatt Fleming 	asm volatile("bnd jne label1");   /* Expecting: jcc  conditional   0 */
223d8b167f9SMatt Fleming 
224d8b167f9SMatt Fleming 	/* sha1rnds4 imm8, xmm2/m128, xmm1 */
225d8b167f9SMatt Fleming 
226d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
227d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
228d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
229d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
230d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
231d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
232d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
233d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
234d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
235d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
236d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
237d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
238d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
239d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
240d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
241d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
242d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
243d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
244d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
245d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
246d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
247d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
248d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
249d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
250d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
251d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
252d8b167f9SMatt Fleming 
253d8b167f9SMatt Fleming 	/* sha1nexte xmm2/m128, xmm1 */
254d8b167f9SMatt Fleming 
255d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm1, %xmm0");
256d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm2");
257d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm8, %xmm0");
258d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm8");
259d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm15, %xmm8");
260d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax), %xmm0");
261d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%r8), %xmm0");
262d8b167f9SMatt Fleming 	asm volatile("sha1nexte (0x12345678), %xmm0");
263d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax), %xmm3");
264d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
265d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
266d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
267d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
268d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax), %xmm0");
269d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rbp), %xmm0");
270d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
271d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
272d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
273d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
274d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
275d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
276d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
277d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
278d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
279d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
280d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
281d8b167f9SMatt Fleming 
282d8b167f9SMatt Fleming 	/* sha1msg1 xmm2/m128, xmm1 */
283d8b167f9SMatt Fleming 
284d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm1, %xmm0");
285d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm2");
286d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm8, %xmm0");
287d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm8");
288d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm15, %xmm8");
289d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax), %xmm0");
290d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%r8), %xmm0");
291d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (0x12345678), %xmm0");
292d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax), %xmm3");
293d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
294d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
295d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
296d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
297d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax), %xmm0");
298d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rbp), %xmm0");
299d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
300d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
301d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
302d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
303d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
304d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
305d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
306d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
307d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
308d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
309d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
310d8b167f9SMatt Fleming 
311d8b167f9SMatt Fleming 	/* sha1msg2 xmm2/m128, xmm1 */
312d8b167f9SMatt Fleming 
313d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm1, %xmm0");
314d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm2");
315d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm8, %xmm0");
316d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm8");
317d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm15, %xmm8");
318d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax), %xmm0");
319d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%r8), %xmm0");
320d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (0x12345678), %xmm0");
321d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax), %xmm3");
322d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
323d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
324d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
325d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
326d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax), %xmm0");
327d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rbp), %xmm0");
328d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
329d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
330d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
331d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
332d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
333d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
334d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
335d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
336d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
337d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
338d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
339d8b167f9SMatt Fleming 
340d8b167f9SMatt Fleming 	/* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
341d8b167f9SMatt Fleming 	/* Note sha256rnds2 has an implicit operand 'xmm0' */
342d8b167f9SMatt Fleming 
343d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm4, %xmm1");
344d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm2");
345d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm8, %xmm1");
346d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm8");
347d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm15, %xmm8");
348d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax), %xmm1");
349d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%r8), %xmm1");
350d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (0x12345678), %xmm1");
351d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax), %xmm3");
352d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
353d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
354d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
355d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
356d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax), %xmm1");
357d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
358d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
359d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
360d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
361d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
362d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
363d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
364d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
365d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
366d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
367d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
368d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
369d8b167f9SMatt Fleming 
370d8b167f9SMatt Fleming 	/* sha256msg1 xmm2/m128, xmm1 */
371d8b167f9SMatt Fleming 
372d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm1, %xmm0");
373d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm2");
374d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm8, %xmm0");
375d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm8");
376d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm15, %xmm8");
377d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax), %xmm0");
378d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%r8), %xmm0");
379d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (0x12345678), %xmm0");
380d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax), %xmm3");
381d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
382d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
383d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
384d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
385d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax), %xmm0");
386d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rbp), %xmm0");
387d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
388d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
389d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
390d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
391d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
392d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
393d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
394d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
395d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
396d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
397d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
398d8b167f9SMatt Fleming 
399d8b167f9SMatt Fleming 	/* sha256msg2 xmm2/m128, xmm1 */
400d8b167f9SMatt Fleming 
401d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm1, %xmm0");
402d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm2");
403d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm8, %xmm0");
404d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm8");
405d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm15, %xmm8");
406d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax), %xmm0");
407d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%r8), %xmm0");
408d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (0x12345678), %xmm0");
409d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax), %xmm3");
410d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
411d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
412d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
413d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
414d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax), %xmm0");
415d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rbp), %xmm0");
416d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
417d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
418d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
419d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
420d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
421d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
422d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
423d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
424d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
425d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
426d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
427d8b167f9SMatt Fleming 
428d8b167f9SMatt Fleming 	/* clflushopt m8 */
429d8b167f9SMatt Fleming 
430d8b167f9SMatt Fleming 	asm volatile("clflushopt (%rax)");
431d8b167f9SMatt Fleming 	asm volatile("clflushopt (%r8)");
432d8b167f9SMatt Fleming 	asm volatile("clflushopt (0x12345678)");
433d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
434d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
435d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clflushopt */
436d8b167f9SMatt Fleming 	asm volatile("clflush (%rax)");
437d8b167f9SMatt Fleming 	asm volatile("clflush (%r8)");
438d8b167f9SMatt Fleming 	asm volatile("sfence");
439d8b167f9SMatt Fleming 
440d8b167f9SMatt Fleming 	/* clwb m8 */
441d8b167f9SMatt Fleming 
442d8b167f9SMatt Fleming 	asm volatile("clwb (%rax)");
443d8b167f9SMatt Fleming 	asm volatile("clwb (%r8)");
444d8b167f9SMatt Fleming 	asm volatile("clwb (0x12345678)");
445d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%rax,%rcx,8)");
446d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%r8,%rcx,8)");
447d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clwb */
448d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%rax)");
449d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%r8)");
450d8b167f9SMatt Fleming 	asm volatile("mfence");
451d8b167f9SMatt Fleming 
452d8b167f9SMatt Fleming 	/* xsavec mem */
453d8b167f9SMatt Fleming 
454d8b167f9SMatt Fleming 	asm volatile("xsavec (%rax)");
455d8b167f9SMatt Fleming 	asm volatile("xsavec (%r8)");
456d8b167f9SMatt Fleming 	asm volatile("xsavec (0x12345678)");
457d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
458d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
459d8b167f9SMatt Fleming 
460d8b167f9SMatt Fleming 	/* xsaves mem */
461d8b167f9SMatt Fleming 
462d8b167f9SMatt Fleming 	asm volatile("xsaves (%rax)");
463d8b167f9SMatt Fleming 	asm volatile("xsaves (%r8)");
464d8b167f9SMatt Fleming 	asm volatile("xsaves (0x12345678)");
465d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
466d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
467d8b167f9SMatt Fleming 
468d8b167f9SMatt Fleming 	/* xrstors mem */
469d8b167f9SMatt Fleming 
470d8b167f9SMatt Fleming 	asm volatile("xrstors (%rax)");
471d8b167f9SMatt Fleming 	asm volatile("xrstors (%r8)");
472d8b167f9SMatt Fleming 	asm volatile("xrstors (0x12345678)");
473d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
474d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
475d8b167f9SMatt Fleming 
476d8b167f9SMatt Fleming #else  /* #ifdef __x86_64__ */
477d8b167f9SMatt Fleming 
478d8b167f9SMatt Fleming 	/* bndmk m32, bnd */
479d8b167f9SMatt Fleming 
480d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax), %bnd0");
481d8b167f9SMatt Fleming 	asm volatile("bndmk (0x12345678), %bnd0");
482d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax), %bnd3");
483d8b167f9SMatt Fleming 	asm volatile("bndmk (%ecx,%eax,1), %bnd0");
484d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
485d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax,%ecx,1), %bnd0");
486d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax,%ecx,8), %bnd0");
487d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax), %bnd0");
488d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ebp), %bnd0");
489d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
490d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
491d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
492d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
493d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax), %bnd0");
494d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ebp), %bnd0");
495d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
496d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
497d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
498d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
499d8b167f9SMatt Fleming 
500d8b167f9SMatt Fleming 	/* bndcl r/m32, bnd */
501d8b167f9SMatt Fleming 
502d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax), %bnd0");
503d8b167f9SMatt Fleming 	asm volatile("bndcl (0x12345678), %bnd0");
504d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax), %bnd3");
505d8b167f9SMatt Fleming 	asm volatile("bndcl (%ecx,%eax,1), %bnd0");
506d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
507d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax,%ecx,1), %bnd0");
508d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax,%ecx,8), %bnd0");
509d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax), %bnd0");
510d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ebp), %bnd0");
511d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
512d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
513d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
514d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
515d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax), %bnd0");
516d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ebp), %bnd0");
517d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
518d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
519d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
520d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
521d8b167f9SMatt Fleming 	asm volatile("bndcl %eax, %bnd0");
522d8b167f9SMatt Fleming 
523d8b167f9SMatt Fleming 	/* bndcu r/m32, bnd */
524d8b167f9SMatt Fleming 
525d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax), %bnd0");
526d8b167f9SMatt Fleming 	asm volatile("bndcu (0x12345678), %bnd0");
527d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax), %bnd3");
528d8b167f9SMatt Fleming 	asm volatile("bndcu (%ecx,%eax,1), %bnd0");
529d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
530d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax,%ecx,1), %bnd0");
531d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax,%ecx,8), %bnd0");
532d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax), %bnd0");
533d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ebp), %bnd0");
534d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
535d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
536d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
537d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
538d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax), %bnd0");
539d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ebp), %bnd0");
540d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
541d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
542d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
543d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
544d8b167f9SMatt Fleming 	asm volatile("bndcu %eax, %bnd0");
545d8b167f9SMatt Fleming 
546d8b167f9SMatt Fleming 	/* bndcn r/m32, bnd */
547d8b167f9SMatt Fleming 
548d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax), %bnd0");
549d8b167f9SMatt Fleming 	asm volatile("bndcn (0x12345678), %bnd0");
550d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax), %bnd3");
551d8b167f9SMatt Fleming 	asm volatile("bndcn (%ecx,%eax,1), %bnd0");
552d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
553d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax,%ecx,1), %bnd0");
554d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax,%ecx,8), %bnd0");
555d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax), %bnd0");
556d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ebp), %bnd0");
557d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
558d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
559d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
560d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
561d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax), %bnd0");
562d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ebp), %bnd0");
563d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
564d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
565d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
566d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
567d8b167f9SMatt Fleming 	asm volatile("bndcn %eax, %bnd0");
568d8b167f9SMatt Fleming 
569d8b167f9SMatt Fleming 	/* bndmov m64, bnd */
570d8b167f9SMatt Fleming 
571d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax), %bnd0");
572d8b167f9SMatt Fleming 	asm volatile("bndmov (0x12345678), %bnd0");
573d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax), %bnd3");
574d8b167f9SMatt Fleming 	asm volatile("bndmov (%ecx,%eax,1), %bnd0");
575d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
576d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax,%ecx,1), %bnd0");
577d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax,%ecx,8), %bnd0");
578d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax), %bnd0");
579d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ebp), %bnd0");
580d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
581d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
582d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
583d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
584d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax), %bnd0");
585d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ebp), %bnd0");
586d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
587d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
588d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
589d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
590d8b167f9SMatt Fleming 
591d8b167f9SMatt Fleming 	/* bndmov bnd, m64 */
592d8b167f9SMatt Fleming 
593d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax)");
594d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (0x12345678)");
595d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd3, (%eax)");
596d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
597d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
598d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
599d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
600d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax)");
601d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ebp)");
602d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
603d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
604d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
605d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
606d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax)");
607d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
608d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
609d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
610d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
611d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
612d8b167f9SMatt Fleming 
613d8b167f9SMatt Fleming 	/* bndmov bnd2, bnd1 */
614d8b167f9SMatt Fleming 
615d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, %bnd1");
616d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd1, %bnd0");
617d8b167f9SMatt Fleming 
618d8b167f9SMatt Fleming 	/* bndldx mib, bnd */
619d8b167f9SMatt Fleming 
620d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax), %bnd0");
621d8b167f9SMatt Fleming 	asm volatile("bndldx (0x12345678), %bnd0");
622d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax), %bnd3");
623d8b167f9SMatt Fleming 	asm volatile("bndldx (%ecx,%eax,1), %bnd0");
624d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
625d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax,%ecx,1), %bnd0");
626d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%eax), %bnd0");
627d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ebp), %bnd0");
628d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
629d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
630d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
631d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%eax), %bnd0");
632d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ebp), %bnd0");
633d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
634d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
635d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
636d8b167f9SMatt Fleming 
637d8b167f9SMatt Fleming 	/* bndstx bnd, mib */
638d8b167f9SMatt Fleming 
639d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%eax)");
640d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (0x12345678)");
641d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd3, (%eax)");
642d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
643d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
644d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
645d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%eax)");
646d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ebp)");
647d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
648d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
649d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
650d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%eax)");
651d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
652d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
653d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
654d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
655d8b167f9SMatt Fleming 
656d8b167f9SMatt Fleming 	/* bnd prefix on call, ret, jmp and all jcc */
657d8b167f9SMatt Fleming 
658d8b167f9SMatt Fleming 	asm volatile("bnd call label1");  /* Expecting: call unconditional 0xfffffffc */
659d8b167f9SMatt Fleming 	asm volatile("bnd call *(%eax)"); /* Expecting: call indirect      0 */
660d8b167f9SMatt Fleming 	asm volatile("bnd ret");          /* Expecting: ret  indirect      0 */
661d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0xfffffffc */
662d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0xfffffffc */
663d8b167f9SMatt Fleming 	asm volatile("bnd jmp *(%ecx)");  /* Expecting: jmp  indirect      0 */
664d8b167f9SMatt Fleming 	asm volatile("bnd jne label1");   /* Expecting: jcc  conditional   0xfffffffc */
665d8b167f9SMatt Fleming 
666d8b167f9SMatt Fleming 	/* sha1rnds4 imm8, xmm2/m128, xmm1 */
667d8b167f9SMatt Fleming 
668d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
669d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
670d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
671d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
672d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
673d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
674d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
675d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
676d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
677d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
678d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
679d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
680d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
681d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
682d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
683d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
684d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
685d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
686d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
687d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
688d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
689d8b167f9SMatt Fleming 
690d8b167f9SMatt Fleming 	/* sha1nexte xmm2/m128, xmm1 */
691d8b167f9SMatt Fleming 
692d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm1, %xmm0");
693d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm2");
694d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax), %xmm0");
695d8b167f9SMatt Fleming 	asm volatile("sha1nexte (0x12345678), %xmm0");
696d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax), %xmm3");
697d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
698d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
699d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
700d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
701d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax), %xmm0");
702d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ebp), %xmm0");
703d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
704d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
705d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
706d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
707d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
708d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
709d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
710d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
711d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
712d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
713d8b167f9SMatt Fleming 
714d8b167f9SMatt Fleming 	/* sha1msg1 xmm2/m128, xmm1 */
715d8b167f9SMatt Fleming 
716d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm1, %xmm0");
717d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm2");
718d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax), %xmm0");
719d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (0x12345678), %xmm0");
720d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax), %xmm3");
721d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
722d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
723d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
724d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
725d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax), %xmm0");
726d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ebp), %xmm0");
727d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
728d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
729d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
730d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
731d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
732d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
733d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
734d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
735d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
736d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
737d8b167f9SMatt Fleming 
738d8b167f9SMatt Fleming 	/* sha1msg2 xmm2/m128, xmm1 */
739d8b167f9SMatt Fleming 
740d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm1, %xmm0");
741d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm2");
742d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax), %xmm0");
743d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (0x12345678), %xmm0");
744d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax), %xmm3");
745d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
746d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
747d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
748d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
749d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax), %xmm0");
750d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ebp), %xmm0");
751d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
752d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
753d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
754d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
755d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
756d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
757d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
758d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
759d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
760d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
761d8b167f9SMatt Fleming 
762d8b167f9SMatt Fleming 	/* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
763d8b167f9SMatt Fleming 	/* Note sha256rnds2 has an implicit operand 'xmm0' */
764d8b167f9SMatt Fleming 
765d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm4, %xmm1");
766d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm2");
767d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax), %xmm1");
768d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (0x12345678), %xmm1");
769d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax), %xmm3");
770d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
771d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
772d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
773d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
774d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax), %xmm1");
775d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
776d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
777d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
778d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
779d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
780d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
781d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
782d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
783d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
784d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
785d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
786d8b167f9SMatt Fleming 
787d8b167f9SMatt Fleming 	/* sha256msg1 xmm2/m128, xmm1 */
788d8b167f9SMatt Fleming 
789d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm1, %xmm0");
790d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm2");
791d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax), %xmm0");
792d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (0x12345678), %xmm0");
793d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax), %xmm3");
794d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
795d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
796d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
797d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
798d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax), %xmm0");
799d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ebp), %xmm0");
800d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
801d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
802d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
803d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
804d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
805d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
806d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
807d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
808d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
809d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
810d8b167f9SMatt Fleming 
811d8b167f9SMatt Fleming 	/* sha256msg2 xmm2/m128, xmm1 */
812d8b167f9SMatt Fleming 
813d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm1, %xmm0");
814d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm2");
815d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax), %xmm0");
816d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (0x12345678), %xmm0");
817d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax), %xmm3");
818d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
819d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
820d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
821d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
822d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax), %xmm0");
823d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ebp), %xmm0");
824d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
825d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
826d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
827d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
828d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
829d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
830d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
831d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
832d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
833d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
834d8b167f9SMatt Fleming 
835d8b167f9SMatt Fleming 	/* clflushopt m8 */
836d8b167f9SMatt Fleming 
837d8b167f9SMatt Fleming 	asm volatile("clflushopt (%eax)");
838d8b167f9SMatt Fleming 	asm volatile("clflushopt (0x12345678)");
839d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
840d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clflushopt */
841d8b167f9SMatt Fleming 	asm volatile("clflush (%eax)");
842d8b167f9SMatt Fleming 	asm volatile("sfence");
843d8b167f9SMatt Fleming 
844d8b167f9SMatt Fleming 	/* clwb m8 */
845d8b167f9SMatt Fleming 
846d8b167f9SMatt Fleming 	asm volatile("clwb (%eax)");
847d8b167f9SMatt Fleming 	asm volatile("clwb (0x12345678)");
848d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%eax,%ecx,8)");
849d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clwb */
850d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%eax)");
851d8b167f9SMatt Fleming 	asm volatile("mfence");
852d8b167f9SMatt Fleming 
853d8b167f9SMatt Fleming 	/* xsavec mem */
854d8b167f9SMatt Fleming 
855d8b167f9SMatt Fleming 	asm volatile("xsavec (%eax)");
856d8b167f9SMatt Fleming 	asm volatile("xsavec (0x12345678)");
857d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
858d8b167f9SMatt Fleming 
859d8b167f9SMatt Fleming 	/* xsaves mem */
860d8b167f9SMatt Fleming 
861d8b167f9SMatt Fleming 	asm volatile("xsaves (%eax)");
862d8b167f9SMatt Fleming 	asm volatile("xsaves (0x12345678)");
863d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
864d8b167f9SMatt Fleming 
865d8b167f9SMatt Fleming 	/* xrstors mem */
866d8b167f9SMatt Fleming 
867d8b167f9SMatt Fleming 	asm volatile("xrstors (%eax)");
868d8b167f9SMatt Fleming 	asm volatile("xrstors (0x12345678)");
869d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
870d8b167f9SMatt Fleming 
871d8b167f9SMatt Fleming #endif /* #ifndef __x86_64__ */
872d8b167f9SMatt Fleming 
873d8b167f9SMatt Fleming 	/* pcommit */
874d8b167f9SMatt Fleming 
875d8b167f9SMatt Fleming 	asm volatile("pcommit");
876d8b167f9SMatt Fleming 
877d8b167f9SMatt Fleming 	/* Following line is a marker for the awk script - do not change */
878d8b167f9SMatt Fleming 	asm volatile("rdtsc"); /* Stop here */
879d8b167f9SMatt Fleming 
880d8b167f9SMatt Fleming 	return 0;
881d8b167f9SMatt Fleming }
882