1 #ifndef _ASM_X86_SPECIAL_INSNS_H
2 #define _ASM_X86_SPECIAL_INSNS_H
3 
4 
5 #ifdef __KERNEL__
6 
7 #include <asm/nops.h>
8 
9 /*
10  * Volatile isn't enough to prevent the compiler from reordering the
11  * read/write functions for the control registers and messing everything up.
12  * A memory clobber would solve the problem, but would prevent reordering of
13  * all loads stores around it, which can hurt performance. Solution is to
14  * use a variable and mimic reads and writes to it to enforce serialization
15  */
16 extern unsigned long __force_order;
17 
18 static inline unsigned long native_read_cr0(void)
19 {
20 	unsigned long val;
21 	asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
22 	return val;
23 }
24 
25 static inline void native_write_cr0(unsigned long val)
26 {
27 	asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
28 }
29 
30 static inline unsigned long native_read_cr2(void)
31 {
32 	unsigned long val;
33 	asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
34 	return val;
35 }
36 
37 static inline void native_write_cr2(unsigned long val)
38 {
39 	asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
40 }
41 
42 static inline unsigned long __native_read_cr3(void)
43 {
44 	unsigned long val;
45 	asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
46 	return val;
47 }
48 
49 static inline void native_write_cr3(unsigned long val)
50 {
51 	asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
52 }
53 
54 static inline unsigned long native_read_cr4(void)
55 {
56 	unsigned long val;
57 #ifdef CONFIG_X86_32
58 	/*
59 	 * This could fault if CR4 does not exist.  Non-existent CR4
60 	 * is functionally equivalent to CR4 == 0.  Keep it simple and pretend
61 	 * that CR4 == 0 on CPUs that don't have CR4.
62 	 */
63 	asm volatile("1: mov %%cr4, %0\n"
64 		     "2:\n"
65 		     _ASM_EXTABLE(1b, 2b)
66 		     : "=r" (val), "=m" (__force_order) : "0" (0));
67 #else
68 	/* CR4 always exists on x86_64. */
69 	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
70 #endif
71 	return val;
72 }
73 
74 static inline void native_write_cr4(unsigned long val)
75 {
76 	asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
77 }
78 
79 #ifdef CONFIG_X86_64
80 static inline unsigned long native_read_cr8(void)
81 {
82 	unsigned long cr8;
83 	asm volatile("movq %%cr8,%0" : "=r" (cr8));
84 	return cr8;
85 }
86 
87 static inline void native_write_cr8(unsigned long val)
88 {
89 	asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
90 }
91 #endif
92 
93 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS
94 static inline u32 __read_pkru(void)
95 {
96 	u32 ecx = 0;
97 	u32 edx, pkru;
98 
99 	/*
100 	 * "rdpkru" instruction.  Places PKRU contents in to EAX,
101 	 * clears EDX and requires that ecx=0.
102 	 */
103 	asm volatile(".byte 0x0f,0x01,0xee\n\t"
104 		     : "=a" (pkru), "=d" (edx)
105 		     : "c" (ecx));
106 	return pkru;
107 }
108 
109 static inline void __write_pkru(u32 pkru)
110 {
111 	u32 ecx = 0, edx = 0;
112 
113 	/*
114 	 * "wrpkru" instruction.  Loads contents in EAX to PKRU,
115 	 * requires that ecx = edx = 0.
116 	 */
117 	asm volatile(".byte 0x0f,0x01,0xef\n\t"
118 		     : : "a" (pkru), "c"(ecx), "d"(edx));
119 }
120 #else
121 static inline u32 __read_pkru(void)
122 {
123 	return 0;
124 }
125 
126 static inline void __write_pkru(u32 pkru)
127 {
128 }
129 #endif
130 
131 static inline void native_wbinvd(void)
132 {
133 	asm volatile("wbinvd": : :"memory");
134 }
135 
136 extern asmlinkage void native_load_gs_index(unsigned);
137 
138 #ifdef CONFIG_PARAVIRT
139 #include <asm/paravirt.h>
140 #else
141 
142 static inline unsigned long read_cr0(void)
143 {
144 	return native_read_cr0();
145 }
146 
147 static inline void write_cr0(unsigned long x)
148 {
149 	native_write_cr0(x);
150 }
151 
152 static inline unsigned long read_cr2(void)
153 {
154 	return native_read_cr2();
155 }
156 
157 static inline void write_cr2(unsigned long x)
158 {
159 	native_write_cr2(x);
160 }
161 
162 /*
163  * Careful!  CR3 contains more than just an address.  You probably want
164  * read_cr3_pa() instead.
165  */
166 static inline unsigned long __read_cr3(void)
167 {
168 	return __native_read_cr3();
169 }
170 
171 static inline void write_cr3(unsigned long x)
172 {
173 	native_write_cr3(x);
174 }
175 
176 static inline unsigned long __read_cr4(void)
177 {
178 	return native_read_cr4();
179 }
180 
181 static inline void __write_cr4(unsigned long x)
182 {
183 	native_write_cr4(x);
184 }
185 
186 static inline void wbinvd(void)
187 {
188 	native_wbinvd();
189 }
190 
191 #ifdef CONFIG_X86_64
192 
193 static inline unsigned long read_cr8(void)
194 {
195 	return native_read_cr8();
196 }
197 
198 static inline void write_cr8(unsigned long x)
199 {
200 	native_write_cr8(x);
201 }
202 
203 static inline void load_gs_index(unsigned selector)
204 {
205 	native_load_gs_index(selector);
206 }
207 
208 #endif
209 
210 #endif/* CONFIG_PARAVIRT */
211 
212 static inline void clflush(volatile void *__p)
213 {
214 	asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
215 }
216 
217 static inline void clflushopt(volatile void *__p)
218 {
219 	alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0",
220 		       ".byte 0x66; clflush %P0",
221 		       X86_FEATURE_CLFLUSHOPT,
222 		       "+m" (*(volatile char __force *)__p));
223 }
224 
225 static inline void clwb(volatile void *__p)
226 {
227 	volatile struct { char x[64]; } *p = __p;
228 
229 	asm volatile(ALTERNATIVE_2(
230 		".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])",
231 		".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */
232 		X86_FEATURE_CLFLUSHOPT,
233 		".byte 0x66, 0x0f, 0xae, 0x30",  /* clwb (%%rax) */
234 		X86_FEATURE_CLWB)
235 		: [p] "+m" (*p)
236 		: [pax] "a" (p));
237 }
238 
239 #define nop() asm volatile ("nop")
240 
241 
242 #endif /* __KERNEL__ */
243 
244 #endif /* _ASM_X86_SPECIAL_INSNS_H */
245