1d8b167f9SMatt Fleming /*
2d8b167f9SMatt Fleming  * This file contains instructions for testing by the test titled:
3d8b167f9SMatt Fleming  *
4d8b167f9SMatt Fleming  *         "Test x86 instruction decoder - new instructions"
5d8b167f9SMatt Fleming  *
6d8b167f9SMatt Fleming  * Note that the 'Expecting' comment lines are consumed by the
7d8b167f9SMatt Fleming  * gen-insn-x86-dat.awk script and have the format:
8d8b167f9SMatt Fleming  *
9d8b167f9SMatt Fleming  *         Expecting: <op> <branch> <rel>
10d8b167f9SMatt Fleming  *
11d8b167f9SMatt Fleming  * If this file is changed, remember to run the gen-insn-x86-dat.sh
12d8b167f9SMatt Fleming  * script and commit the result.
13d8b167f9SMatt Fleming  *
14d8b167f9SMatt Fleming  * Refer to insn-x86.c for more details.
15d8b167f9SMatt Fleming  */
16d8b167f9SMatt Fleming 
17d8b167f9SMatt Fleming int main(void)
18d8b167f9SMatt Fleming {
19d8b167f9SMatt Fleming 	/* Following line is a marker for the awk script - do not change */
20d8b167f9SMatt Fleming 	asm volatile("rdtsc"); /* Start here */
21d8b167f9SMatt Fleming 
22d8b167f9SMatt Fleming #ifdef __x86_64__
23d8b167f9SMatt Fleming 
24d8b167f9SMatt Fleming 	/* bndmk m64, bnd */
25d8b167f9SMatt Fleming 
26d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax), %bnd0");
27d8b167f9SMatt Fleming 	asm volatile("bndmk (%r8), %bnd0");
28d8b167f9SMatt Fleming 	asm volatile("bndmk (0x12345678), %bnd0");
29d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax), %bnd3");
30d8b167f9SMatt Fleming 	asm volatile("bndmk (%rcx,%rax,1), %bnd0");
31d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
32d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax,%rcx,1), %bnd0");
33d8b167f9SMatt Fleming 	asm volatile("bndmk (%rax,%rcx,8), %bnd0");
34d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax), %bnd0");
35d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rbp), %bnd0");
36d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
37d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
38d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
39d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
40d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax), %bnd0");
41d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rbp), %bnd0");
42d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
43d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
44d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
45d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
46d8b167f9SMatt Fleming 
47d8b167f9SMatt Fleming 	/* bndcl r/m64, bnd */
48d8b167f9SMatt Fleming 
49d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax), %bnd0");
50d8b167f9SMatt Fleming 	asm volatile("bndcl (%r8), %bnd0");
51d8b167f9SMatt Fleming 	asm volatile("bndcl (0x12345678), %bnd0");
52d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax), %bnd3");
53d8b167f9SMatt Fleming 	asm volatile("bndcl (%rcx,%rax,1), %bnd0");
54d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
55d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax,%rcx,1), %bnd0");
56d8b167f9SMatt Fleming 	asm volatile("bndcl (%rax,%rcx,8), %bnd0");
57d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax), %bnd0");
58d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rbp), %bnd0");
59d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
60d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
61d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
62d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
63d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax), %bnd0");
64d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rbp), %bnd0");
65d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
66d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
67d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
68d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
69d8b167f9SMatt Fleming 	asm volatile("bndcl %rax, %bnd0");
70d8b167f9SMatt Fleming 
71d8b167f9SMatt Fleming 	/* bndcu r/m64, bnd */
72d8b167f9SMatt Fleming 
73d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax), %bnd0");
74d8b167f9SMatt Fleming 	asm volatile("bndcu (%r8), %bnd0");
75d8b167f9SMatt Fleming 	asm volatile("bndcu (0x12345678), %bnd0");
76d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax), %bnd3");
77d8b167f9SMatt Fleming 	asm volatile("bndcu (%rcx,%rax,1), %bnd0");
78d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
79d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax,%rcx,1), %bnd0");
80d8b167f9SMatt Fleming 	asm volatile("bndcu (%rax,%rcx,8), %bnd0");
81d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax), %bnd0");
82d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rbp), %bnd0");
83d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
84d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
85d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
86d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
87d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax), %bnd0");
88d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rbp), %bnd0");
89d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
90d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
91d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
92d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
93d8b167f9SMatt Fleming 	asm volatile("bndcu %rax, %bnd0");
94d8b167f9SMatt Fleming 
95d8b167f9SMatt Fleming 	/* bndcn r/m64, bnd */
96d8b167f9SMatt Fleming 
97d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax), %bnd0");
98d8b167f9SMatt Fleming 	asm volatile("bndcn (%r8), %bnd0");
99d8b167f9SMatt Fleming 	asm volatile("bndcn (0x12345678), %bnd0");
100d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax), %bnd3");
101d8b167f9SMatt Fleming 	asm volatile("bndcn (%rcx,%rax,1), %bnd0");
102d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
103d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax,%rcx,1), %bnd0");
104d8b167f9SMatt Fleming 	asm volatile("bndcn (%rax,%rcx,8), %bnd0");
105d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax), %bnd0");
106d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rbp), %bnd0");
107d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
108d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
109d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
110d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
111d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax), %bnd0");
112d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rbp), %bnd0");
113d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
114d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
115d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
116d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
117d8b167f9SMatt Fleming 	asm volatile("bndcn %rax, %bnd0");
118d8b167f9SMatt Fleming 
119d8b167f9SMatt Fleming 	/* bndmov m128, bnd */
120d8b167f9SMatt Fleming 
121d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax), %bnd0");
122d8b167f9SMatt Fleming 	asm volatile("bndmov (%r8), %bnd0");
123d8b167f9SMatt Fleming 	asm volatile("bndmov (0x12345678), %bnd0");
124d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax), %bnd3");
125d8b167f9SMatt Fleming 	asm volatile("bndmov (%rcx,%rax,1), %bnd0");
126d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
127d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax,%rcx,1), %bnd0");
128d8b167f9SMatt Fleming 	asm volatile("bndmov (%rax,%rcx,8), %bnd0");
129d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax), %bnd0");
130d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rbp), %bnd0");
131d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
132d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
133d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
134d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
135d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax), %bnd0");
136d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rbp), %bnd0");
137d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
138d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
139d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
140d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
141d8b167f9SMatt Fleming 
142d8b167f9SMatt Fleming 	/* bndmov bnd, m128 */
143d8b167f9SMatt Fleming 
144d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax)");
145d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%r8)");
146d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (0x12345678)");
147d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd3, (%rax)");
148d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
149d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
150d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
151d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
152d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax)");
153d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rbp)");
154d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
155d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
156d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
157d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
158d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax)");
159d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
160d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
161d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
162d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
163d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
164d8b167f9SMatt Fleming 
165d8b167f9SMatt Fleming 	/* bndmov bnd2, bnd1 */
166d8b167f9SMatt Fleming 
167d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, %bnd1");
168d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd1, %bnd0");
169d8b167f9SMatt Fleming 
170d8b167f9SMatt Fleming 	/* bndldx mib, bnd */
171d8b167f9SMatt Fleming 
172d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax), %bnd0");
173d8b167f9SMatt Fleming 	asm volatile("bndldx (%r8), %bnd0");
174d8b167f9SMatt Fleming 	asm volatile("bndldx (0x12345678), %bnd0");
175d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax), %bnd3");
176d8b167f9SMatt Fleming 	asm volatile("bndldx (%rcx,%rax,1), %bnd0");
177d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
178d8b167f9SMatt Fleming 	asm volatile("bndldx (%rax,%rcx,1), %bnd0");
179d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rax), %bnd0");
180d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rbp), %bnd0");
181d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
182d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
183d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
184d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rax), %bnd0");
185d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rbp), %bnd0");
186d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
187d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
188d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
189d8b167f9SMatt Fleming 
190d8b167f9SMatt Fleming 	/* bndstx bnd, mib */
191d8b167f9SMatt Fleming 
192d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rax)");
193d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%r8)");
194d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (0x12345678)");
195d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd3, (%rax)");
196d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
197d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
198d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
199d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rax)");
200d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rbp)");
201d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
202d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
203d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
204d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rax)");
205d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
206d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
207d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
208d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
209d8b167f9SMatt Fleming 
210d8b167f9SMatt Fleming 	/* bnd prefix on call, ret, jmp and all jcc */
211d8b167f9SMatt Fleming 
212d8b167f9SMatt Fleming 	asm volatile("bnd call label1");  /* Expecting: call unconditional 0 */
213d8b167f9SMatt Fleming 	asm volatile("bnd call *(%eax)"); /* Expecting: call indirect      0 */
214d8b167f9SMatt Fleming 	asm volatile("bnd ret");          /* Expecting: ret  indirect      0 */
215d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0 */
216d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0 */
217d8b167f9SMatt Fleming 	asm volatile("bnd jmp *(%ecx)");  /* Expecting: jmp  indirect      0 */
218d8b167f9SMatt Fleming 	asm volatile("bnd jne label1");   /* Expecting: jcc  conditional   0 */
219d8b167f9SMatt Fleming 
220d8b167f9SMatt Fleming 	/* sha1rnds4 imm8, xmm2/m128, xmm1 */
221d8b167f9SMatt Fleming 
222d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
223d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
224d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
225d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
226d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
227d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
228d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
229d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
230d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
231d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
232d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
233d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
234d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
235d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
236d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
237d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
238d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
239d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
240d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
241d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
242d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
243d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
244d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
245d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
246d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
247d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
248d8b167f9SMatt Fleming 
249d8b167f9SMatt Fleming 	/* sha1nexte xmm2/m128, xmm1 */
250d8b167f9SMatt Fleming 
251d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm1, %xmm0");
252d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm2");
253d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm8, %xmm0");
254d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm8");
255d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm15, %xmm8");
256d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax), %xmm0");
257d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%r8), %xmm0");
258d8b167f9SMatt Fleming 	asm volatile("sha1nexte (0x12345678), %xmm0");
259d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax), %xmm3");
260d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
261d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
262d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
263d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
264d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax), %xmm0");
265d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rbp), %xmm0");
266d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
267d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
268d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
269d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
270d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
271d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
272d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
273d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
274d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
275d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
276d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
277d8b167f9SMatt Fleming 
278d8b167f9SMatt Fleming 	/* sha1msg1 xmm2/m128, xmm1 */
279d8b167f9SMatt Fleming 
280d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm1, %xmm0");
281d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm2");
282d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm8, %xmm0");
283d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm8");
284d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm15, %xmm8");
285d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax), %xmm0");
286d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%r8), %xmm0");
287d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (0x12345678), %xmm0");
288d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax), %xmm3");
289d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
290d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
291d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
292d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
293d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax), %xmm0");
294d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rbp), %xmm0");
295d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
296d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
297d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
298d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
299d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
300d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
301d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
302d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
303d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
304d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
305d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
306d8b167f9SMatt Fleming 
307d8b167f9SMatt Fleming 	/* sha1msg2 xmm2/m128, xmm1 */
308d8b167f9SMatt Fleming 
309d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm1, %xmm0");
310d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm2");
311d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm8, %xmm0");
312d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm8");
313d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm15, %xmm8");
314d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax), %xmm0");
315d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%r8), %xmm0");
316d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (0x12345678), %xmm0");
317d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax), %xmm3");
318d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
319d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
320d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
321d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
322d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax), %xmm0");
323d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rbp), %xmm0");
324d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
325d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
326d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
327d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
328d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
329d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
330d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
331d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
332d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
333d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
334d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
335d8b167f9SMatt Fleming 
336d8b167f9SMatt Fleming 	/* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
337d8b167f9SMatt Fleming 	/* Note sha256rnds2 has an implicit operand 'xmm0' */
338d8b167f9SMatt Fleming 
339d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm4, %xmm1");
340d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm2");
341d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm8, %xmm1");
342d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm8");
343d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm15, %xmm8");
344d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax), %xmm1");
345d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%r8), %xmm1");
346d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (0x12345678), %xmm1");
347d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax), %xmm3");
348d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
349d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
350d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
351d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
352d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax), %xmm1");
353d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
354d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
355d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
356d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
357d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
358d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
359d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
360d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
361d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
362d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
363d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
364d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
365d8b167f9SMatt Fleming 
366d8b167f9SMatt Fleming 	/* sha256msg1 xmm2/m128, xmm1 */
367d8b167f9SMatt Fleming 
368d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm1, %xmm0");
369d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm2");
370d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm8, %xmm0");
371d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm8");
372d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm15, %xmm8");
373d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax), %xmm0");
374d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%r8), %xmm0");
375d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (0x12345678), %xmm0");
376d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax), %xmm3");
377d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
378d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
379d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
380d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
381d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax), %xmm0");
382d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rbp), %xmm0");
383d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
384d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
385d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
386d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
387d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
388d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
389d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
390d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
391d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
392d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
393d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
394d8b167f9SMatt Fleming 
395d8b167f9SMatt Fleming 	/* sha256msg2 xmm2/m128, xmm1 */
396d8b167f9SMatt Fleming 
397d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm1, %xmm0");
398d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm2");
399d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm8, %xmm0");
400d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm8");
401d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm15, %xmm8");
402d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax), %xmm0");
403d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%r8), %xmm0");
404d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (0x12345678), %xmm0");
405d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax), %xmm3");
406d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
407d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
408d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
409d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
410d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax), %xmm0");
411d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rbp), %xmm0");
412d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
413d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
414d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
415d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
416d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
417d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
418d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
419d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
420d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
421d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
422d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
423d8b167f9SMatt Fleming 
424d8b167f9SMatt Fleming 	/* clflushopt m8 */
425d8b167f9SMatt Fleming 
426d8b167f9SMatt Fleming 	asm volatile("clflushopt (%rax)");
427d8b167f9SMatt Fleming 	asm volatile("clflushopt (%r8)");
428d8b167f9SMatt Fleming 	asm volatile("clflushopt (0x12345678)");
429d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
430d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
431d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clflushopt */
432d8b167f9SMatt Fleming 	asm volatile("clflush (%rax)");
433d8b167f9SMatt Fleming 	asm volatile("clflush (%r8)");
434d8b167f9SMatt Fleming 	asm volatile("sfence");
435d8b167f9SMatt Fleming 
436d8b167f9SMatt Fleming 	/* clwb m8 */
437d8b167f9SMatt Fleming 
438d8b167f9SMatt Fleming 	asm volatile("clwb (%rax)");
439d8b167f9SMatt Fleming 	asm volatile("clwb (%r8)");
440d8b167f9SMatt Fleming 	asm volatile("clwb (0x12345678)");
441d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%rax,%rcx,8)");
442d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%r8,%rcx,8)");
443d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clwb */
444d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%rax)");
445d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%r8)");
446d8b167f9SMatt Fleming 	asm volatile("mfence");
447d8b167f9SMatt Fleming 
448d8b167f9SMatt Fleming 	/* xsavec mem */
449d8b167f9SMatt Fleming 
450d8b167f9SMatt Fleming 	asm volatile("xsavec (%rax)");
451d8b167f9SMatt Fleming 	asm volatile("xsavec (%r8)");
452d8b167f9SMatt Fleming 	asm volatile("xsavec (0x12345678)");
453d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
454d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
455d8b167f9SMatt Fleming 
456d8b167f9SMatt Fleming 	/* xsaves mem */
457d8b167f9SMatt Fleming 
458d8b167f9SMatt Fleming 	asm volatile("xsaves (%rax)");
459d8b167f9SMatt Fleming 	asm volatile("xsaves (%r8)");
460d8b167f9SMatt Fleming 	asm volatile("xsaves (0x12345678)");
461d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
462d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
463d8b167f9SMatt Fleming 
464d8b167f9SMatt Fleming 	/* xrstors mem */
465d8b167f9SMatt Fleming 
466d8b167f9SMatt Fleming 	asm volatile("xrstors (%rax)");
467d8b167f9SMatt Fleming 	asm volatile("xrstors (%r8)");
468d8b167f9SMatt Fleming 	asm volatile("xrstors (0x12345678)");
469d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
470d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
471d8b167f9SMatt Fleming 
472d8b167f9SMatt Fleming #else  /* #ifdef __x86_64__ */
473d8b167f9SMatt Fleming 
474d8b167f9SMatt Fleming 	/* bndmk m32, bnd */
475d8b167f9SMatt Fleming 
476d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax), %bnd0");
477d8b167f9SMatt Fleming 	asm volatile("bndmk (0x12345678), %bnd0");
478d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax), %bnd3");
479d8b167f9SMatt Fleming 	asm volatile("bndmk (%ecx,%eax,1), %bnd0");
480d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
481d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax,%ecx,1), %bnd0");
482d8b167f9SMatt Fleming 	asm volatile("bndmk (%eax,%ecx,8), %bnd0");
483d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax), %bnd0");
484d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ebp), %bnd0");
485d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
486d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
487d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
488d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
489d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax), %bnd0");
490d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ebp), %bnd0");
491d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
492d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
493d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
494d8b167f9SMatt Fleming 	asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
495d8b167f9SMatt Fleming 
496d8b167f9SMatt Fleming 	/* bndcl r/m32, bnd */
497d8b167f9SMatt Fleming 
498d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax), %bnd0");
499d8b167f9SMatt Fleming 	asm volatile("bndcl (0x12345678), %bnd0");
500d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax), %bnd3");
501d8b167f9SMatt Fleming 	asm volatile("bndcl (%ecx,%eax,1), %bnd0");
502d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
503d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax,%ecx,1), %bnd0");
504d8b167f9SMatt Fleming 	asm volatile("bndcl (%eax,%ecx,8), %bnd0");
505d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax), %bnd0");
506d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ebp), %bnd0");
507d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
508d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
509d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
510d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
511d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax), %bnd0");
512d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ebp), %bnd0");
513d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
514d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
515d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
516d8b167f9SMatt Fleming 	asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
517d8b167f9SMatt Fleming 	asm volatile("bndcl %eax, %bnd0");
518d8b167f9SMatt Fleming 
519d8b167f9SMatt Fleming 	/* bndcu r/m32, bnd */
520d8b167f9SMatt Fleming 
521d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax), %bnd0");
522d8b167f9SMatt Fleming 	asm volatile("bndcu (0x12345678), %bnd0");
523d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax), %bnd3");
524d8b167f9SMatt Fleming 	asm volatile("bndcu (%ecx,%eax,1), %bnd0");
525d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
526d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax,%ecx,1), %bnd0");
527d8b167f9SMatt Fleming 	asm volatile("bndcu (%eax,%ecx,8), %bnd0");
528d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax), %bnd0");
529d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ebp), %bnd0");
530d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
531d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
532d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
533d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
534d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax), %bnd0");
535d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ebp), %bnd0");
536d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
537d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
538d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
539d8b167f9SMatt Fleming 	asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
540d8b167f9SMatt Fleming 	asm volatile("bndcu %eax, %bnd0");
541d8b167f9SMatt Fleming 
542d8b167f9SMatt Fleming 	/* bndcn r/m32, bnd */
543d8b167f9SMatt Fleming 
544d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax), %bnd0");
545d8b167f9SMatt Fleming 	asm volatile("bndcn (0x12345678), %bnd0");
546d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax), %bnd3");
547d8b167f9SMatt Fleming 	asm volatile("bndcn (%ecx,%eax,1), %bnd0");
548d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
549d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax,%ecx,1), %bnd0");
550d8b167f9SMatt Fleming 	asm volatile("bndcn (%eax,%ecx,8), %bnd0");
551d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax), %bnd0");
552d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ebp), %bnd0");
553d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
554d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
555d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
556d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
557d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax), %bnd0");
558d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ebp), %bnd0");
559d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
560d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
561d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
562d8b167f9SMatt Fleming 	asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
563d8b167f9SMatt Fleming 	asm volatile("bndcn %eax, %bnd0");
564d8b167f9SMatt Fleming 
565d8b167f9SMatt Fleming 	/* bndmov m64, bnd */
566d8b167f9SMatt Fleming 
567d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax), %bnd0");
568d8b167f9SMatt Fleming 	asm volatile("bndmov (0x12345678), %bnd0");
569d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax), %bnd3");
570d8b167f9SMatt Fleming 	asm volatile("bndmov (%ecx,%eax,1), %bnd0");
571d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
572d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax,%ecx,1), %bnd0");
573d8b167f9SMatt Fleming 	asm volatile("bndmov (%eax,%ecx,8), %bnd0");
574d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax), %bnd0");
575d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ebp), %bnd0");
576d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
577d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
578d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
579d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
580d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax), %bnd0");
581d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ebp), %bnd0");
582d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
583d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
584d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
585d8b167f9SMatt Fleming 	asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
586d8b167f9SMatt Fleming 
587d8b167f9SMatt Fleming 	/* bndmov bnd, m64 */
588d8b167f9SMatt Fleming 
589d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax)");
590d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (0x12345678)");
591d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd3, (%eax)");
592d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
593d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
594d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
595d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
596d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax)");
597d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ebp)");
598d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
599d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
600d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
601d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
602d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax)");
603d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
604d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
605d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
606d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
607d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
608d8b167f9SMatt Fleming 
609d8b167f9SMatt Fleming 	/* bndmov bnd2, bnd1 */
610d8b167f9SMatt Fleming 
611d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd0, %bnd1");
612d8b167f9SMatt Fleming 	asm volatile("bndmov %bnd1, %bnd0");
613d8b167f9SMatt Fleming 
614d8b167f9SMatt Fleming 	/* bndldx mib, bnd */
615d8b167f9SMatt Fleming 
616d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax), %bnd0");
617d8b167f9SMatt Fleming 	asm volatile("bndldx (0x12345678), %bnd0");
618d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax), %bnd3");
619d8b167f9SMatt Fleming 	asm volatile("bndldx (%ecx,%eax,1), %bnd0");
620d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
621d8b167f9SMatt Fleming 	asm volatile("bndldx (%eax,%ecx,1), %bnd0");
622d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%eax), %bnd0");
623d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ebp), %bnd0");
624d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
625d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
626d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
627d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%eax), %bnd0");
628d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ebp), %bnd0");
629d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
630d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
631d8b167f9SMatt Fleming 	asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
632d8b167f9SMatt Fleming 
633d8b167f9SMatt Fleming 	/* bndstx bnd, mib */
634d8b167f9SMatt Fleming 
635d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%eax)");
636d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (0x12345678)");
637d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd3, (%eax)");
638d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
639d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
640d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
641d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%eax)");
642d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ebp)");
643d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
644d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
645d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
646d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%eax)");
647d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
648d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
649d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
650d8b167f9SMatt Fleming 	asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
651d8b167f9SMatt Fleming 
652d8b167f9SMatt Fleming 	/* bnd prefix on call, ret, jmp and all jcc */
653d8b167f9SMatt Fleming 
654d8b167f9SMatt Fleming 	asm volatile("bnd call label1");  /* Expecting: call unconditional 0xfffffffc */
655d8b167f9SMatt Fleming 	asm volatile("bnd call *(%eax)"); /* Expecting: call indirect      0 */
656d8b167f9SMatt Fleming 	asm volatile("bnd ret");          /* Expecting: ret  indirect      0 */
657d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0xfffffffc */
658d8b167f9SMatt Fleming 	asm volatile("bnd jmp label1");   /* Expecting: jmp  unconditional 0xfffffffc */
659d8b167f9SMatt Fleming 	asm volatile("bnd jmp *(%ecx)");  /* Expecting: jmp  indirect      0 */
660d8b167f9SMatt Fleming 	asm volatile("bnd jne label1");   /* Expecting: jcc  conditional   0xfffffffc */
661d8b167f9SMatt Fleming 
662d8b167f9SMatt Fleming 	/* sha1rnds4 imm8, xmm2/m128, xmm1 */
663d8b167f9SMatt Fleming 
664d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
665d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
666d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
667d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
668d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
669d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
670d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
671d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
672d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
673d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
674d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
675d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
676d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
677d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
678d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
679d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
680d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
681d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
682d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
683d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
684d8b167f9SMatt Fleming 	asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
685d8b167f9SMatt Fleming 
686d8b167f9SMatt Fleming 	/* sha1nexte xmm2/m128, xmm1 */
687d8b167f9SMatt Fleming 
688d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm1, %xmm0");
689d8b167f9SMatt Fleming 	asm volatile("sha1nexte %xmm7, %xmm2");
690d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax), %xmm0");
691d8b167f9SMatt Fleming 	asm volatile("sha1nexte (0x12345678), %xmm0");
692d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax), %xmm3");
693d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
694d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
695d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
696d8b167f9SMatt Fleming 	asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
697d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax), %xmm0");
698d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ebp), %xmm0");
699d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
700d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
701d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
702d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
703d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
704d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
705d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
706d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
707d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
708d8b167f9SMatt Fleming 	asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
709d8b167f9SMatt Fleming 
710d8b167f9SMatt Fleming 	/* sha1msg1 xmm2/m128, xmm1 */
711d8b167f9SMatt Fleming 
712d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm1, %xmm0");
713d8b167f9SMatt Fleming 	asm volatile("sha1msg1 %xmm7, %xmm2");
714d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax), %xmm0");
715d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (0x12345678), %xmm0");
716d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax), %xmm3");
717d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
718d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
719d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
720d8b167f9SMatt Fleming 	asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
721d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax), %xmm0");
722d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ebp), %xmm0");
723d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
724d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
725d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
726d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
727d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
728d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
729d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
730d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
731d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
732d8b167f9SMatt Fleming 	asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
733d8b167f9SMatt Fleming 
734d8b167f9SMatt Fleming 	/* sha1msg2 xmm2/m128, xmm1 */
735d8b167f9SMatt Fleming 
736d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm1, %xmm0");
737d8b167f9SMatt Fleming 	asm volatile("sha1msg2 %xmm7, %xmm2");
738d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax), %xmm0");
739d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (0x12345678), %xmm0");
740d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax), %xmm3");
741d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
742d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
743d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
744d8b167f9SMatt Fleming 	asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
745d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax), %xmm0");
746d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ebp), %xmm0");
747d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
748d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
749d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
750d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
751d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
752d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
753d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
754d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
755d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
756d8b167f9SMatt Fleming 	asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
757d8b167f9SMatt Fleming 
758d8b167f9SMatt Fleming 	/* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
759d8b167f9SMatt Fleming 	/* Note sha256rnds2 has an implicit operand 'xmm0' */
760d8b167f9SMatt Fleming 
761d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm4, %xmm1");
762d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 %xmm7, %xmm2");
763d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax), %xmm1");
764d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (0x12345678), %xmm1");
765d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax), %xmm3");
766d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
767d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
768d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
769d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
770d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax), %xmm1");
771d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
772d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
773d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
774d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
775d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
776d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
777d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
778d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
779d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
780d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
781d8b167f9SMatt Fleming 	asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
782d8b167f9SMatt Fleming 
783d8b167f9SMatt Fleming 	/* sha256msg1 xmm2/m128, xmm1 */
784d8b167f9SMatt Fleming 
785d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm1, %xmm0");
786d8b167f9SMatt Fleming 	asm volatile("sha256msg1 %xmm7, %xmm2");
787d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax), %xmm0");
788d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (0x12345678), %xmm0");
789d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax), %xmm3");
790d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
791d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
792d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
793d8b167f9SMatt Fleming 	asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
794d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax), %xmm0");
795d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ebp), %xmm0");
796d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
797d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
798d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
799d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
800d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
801d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
802d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
803d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
804d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
805d8b167f9SMatt Fleming 	asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
806d8b167f9SMatt Fleming 
807d8b167f9SMatt Fleming 	/* sha256msg2 xmm2/m128, xmm1 */
808d8b167f9SMatt Fleming 
809d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm1, %xmm0");
810d8b167f9SMatt Fleming 	asm volatile("sha256msg2 %xmm7, %xmm2");
811d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax), %xmm0");
812d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (0x12345678), %xmm0");
813d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax), %xmm3");
814d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
815d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
816d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
817d8b167f9SMatt Fleming 	asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
818d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax), %xmm0");
819d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ebp), %xmm0");
820d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
821d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
822d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
823d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
824d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
825d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
826d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
827d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
828d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
829d8b167f9SMatt Fleming 	asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
830d8b167f9SMatt Fleming 
831d8b167f9SMatt Fleming 	/* clflushopt m8 */
832d8b167f9SMatt Fleming 
833d8b167f9SMatt Fleming 	asm volatile("clflushopt (%eax)");
834d8b167f9SMatt Fleming 	asm volatile("clflushopt (0x12345678)");
835d8b167f9SMatt Fleming 	asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
836d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clflushopt */
837d8b167f9SMatt Fleming 	asm volatile("clflush (%eax)");
838d8b167f9SMatt Fleming 	asm volatile("sfence");
839d8b167f9SMatt Fleming 
840d8b167f9SMatt Fleming 	/* clwb m8 */
841d8b167f9SMatt Fleming 
842d8b167f9SMatt Fleming 	asm volatile("clwb (%eax)");
843d8b167f9SMatt Fleming 	asm volatile("clwb (0x12345678)");
844d8b167f9SMatt Fleming 	asm volatile("clwb 0x12345678(%eax,%ecx,8)");
845d8b167f9SMatt Fleming 	/* Also check instructions in the same group encoding as clwb */
846d8b167f9SMatt Fleming 	asm volatile("xsaveopt (%eax)");
847d8b167f9SMatt Fleming 	asm volatile("mfence");
848d8b167f9SMatt Fleming 
849d8b167f9SMatt Fleming 	/* xsavec mem */
850d8b167f9SMatt Fleming 
851d8b167f9SMatt Fleming 	asm volatile("xsavec (%eax)");
852d8b167f9SMatt Fleming 	asm volatile("xsavec (0x12345678)");
853d8b167f9SMatt Fleming 	asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
854d8b167f9SMatt Fleming 
855d8b167f9SMatt Fleming 	/* xsaves mem */
856d8b167f9SMatt Fleming 
857d8b167f9SMatt Fleming 	asm volatile("xsaves (%eax)");
858d8b167f9SMatt Fleming 	asm volatile("xsaves (0x12345678)");
859d8b167f9SMatt Fleming 	asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
860d8b167f9SMatt Fleming 
861d8b167f9SMatt Fleming 	/* xrstors mem */
862d8b167f9SMatt Fleming 
863d8b167f9SMatt Fleming 	asm volatile("xrstors (%eax)");
864d8b167f9SMatt Fleming 	asm volatile("xrstors (0x12345678)");
865d8b167f9SMatt Fleming 	asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
866d8b167f9SMatt Fleming 
867d8b167f9SMatt Fleming #endif /* #ifndef __x86_64__ */
868d8b167f9SMatt Fleming 
869d8b167f9SMatt Fleming 	/* pcommit */
870d8b167f9SMatt Fleming 
871d8b167f9SMatt Fleming 	asm volatile("pcommit");
872d8b167f9SMatt Fleming 
873d8b167f9SMatt Fleming 	/* Following line is a marker for the awk script - do not change */
874d8b167f9SMatt Fleming 	asm volatile("rdtsc"); /* Stop here */
875d8b167f9SMatt Fleming 
876d8b167f9SMatt Fleming 	return 0;
877d8b167f9SMatt Fleming }
878