1 #ifndef _ASM_X86_SPECIAL_INSNS_H 2 #define _ASM_X86_SPECIAL_INSNS_H 3 4 5 #ifdef __KERNEL__ 6 7 #include <asm/nops.h> 8 9 static inline void native_clts(void) 10 { 11 asm volatile("clts"); 12 } 13 14 /* 15 * Volatile isn't enough to prevent the compiler from reordering the 16 * read/write functions for the control registers and messing everything up. 17 * A memory clobber would solve the problem, but would prevent reordering of 18 * all loads stores around it, which can hurt performance. Solution is to 19 * use a variable and mimic reads and writes to it to enforce serialization 20 */ 21 extern unsigned long __force_order; 22 23 static inline unsigned long native_read_cr0(void) 24 { 25 unsigned long val; 26 asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order)); 27 return val; 28 } 29 30 static inline void native_write_cr0(unsigned long val) 31 { 32 asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order)); 33 } 34 35 static inline unsigned long native_read_cr2(void) 36 { 37 unsigned long val; 38 asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order)); 39 return val; 40 } 41 42 static inline void native_write_cr2(unsigned long val) 43 { 44 asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order)); 45 } 46 47 static inline unsigned long native_read_cr3(void) 48 { 49 unsigned long val; 50 asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order)); 51 return val; 52 } 53 54 static inline void native_write_cr3(unsigned long val) 55 { 56 asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order)); 57 } 58 59 static inline unsigned long native_read_cr4(void) 60 { 61 unsigned long val; 62 #ifdef CONFIG_X86_32 63 /* 64 * This could fault if CR4 does not exist. Non-existent CR4 65 * is functionally equivalent to CR4 == 0. Keep it simple and pretend 66 * that CR4 == 0 on CPUs that don't have CR4. 67 */ 68 asm volatile("1: mov %%cr4, %0\n" 69 "2:\n" 70 _ASM_EXTABLE(1b, 2b) 71 : "=r" (val), "=m" (__force_order) : "0" (0)); 72 #else 73 /* CR4 always exists on x86_64. */ 74 asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order)); 75 #endif 76 return val; 77 } 78 79 static inline void native_write_cr4(unsigned long val) 80 { 81 asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order)); 82 } 83 84 #ifdef CONFIG_X86_64 85 static inline unsigned long native_read_cr8(void) 86 { 87 unsigned long cr8; 88 asm volatile("movq %%cr8,%0" : "=r" (cr8)); 89 return cr8; 90 } 91 92 static inline void native_write_cr8(unsigned long val) 93 { 94 asm volatile("movq %0,%%cr8" :: "r" (val) : "memory"); 95 } 96 #endif 97 98 #ifdef CONFIG_X86_INTEL_MEMORY_PROTECTION_KEYS 99 static inline u32 __read_pkru(void) 100 { 101 u32 ecx = 0; 102 u32 edx, pkru; 103 104 /* 105 * "rdpkru" instruction. Places PKRU contents in to EAX, 106 * clears EDX and requires that ecx=0. 107 */ 108 asm volatile(".byte 0x0f,0x01,0xee\n\t" 109 : "=a" (pkru), "=d" (edx) 110 : "c" (ecx)); 111 return pkru; 112 } 113 114 static inline void __write_pkru(u32 pkru) 115 { 116 u32 ecx = 0, edx = 0; 117 118 /* 119 * "wrpkru" instruction. Loads contents in EAX to PKRU, 120 * requires that ecx = edx = 0. 121 */ 122 asm volatile(".byte 0x0f,0x01,0xef\n\t" 123 : : "a" (pkru), "c"(ecx), "d"(edx)); 124 } 125 #else 126 static inline u32 __read_pkru(void) 127 { 128 return 0; 129 } 130 131 static inline void __write_pkru(u32 pkru) 132 { 133 } 134 #endif 135 136 static inline void native_wbinvd(void) 137 { 138 asm volatile("wbinvd": : :"memory"); 139 } 140 141 extern asmlinkage void native_load_gs_index(unsigned); 142 143 #ifdef CONFIG_PARAVIRT 144 #include <asm/paravirt.h> 145 #else 146 147 static inline unsigned long read_cr0(void) 148 { 149 return native_read_cr0(); 150 } 151 152 static inline void write_cr0(unsigned long x) 153 { 154 native_write_cr0(x); 155 } 156 157 static inline unsigned long read_cr2(void) 158 { 159 return native_read_cr2(); 160 } 161 162 static inline void write_cr2(unsigned long x) 163 { 164 native_write_cr2(x); 165 } 166 167 static inline unsigned long read_cr3(void) 168 { 169 return native_read_cr3(); 170 } 171 172 static inline void write_cr3(unsigned long x) 173 { 174 native_write_cr3(x); 175 } 176 177 static inline unsigned long __read_cr4(void) 178 { 179 return native_read_cr4(); 180 } 181 182 static inline void __write_cr4(unsigned long x) 183 { 184 native_write_cr4(x); 185 } 186 187 static inline void wbinvd(void) 188 { 189 native_wbinvd(); 190 } 191 192 #ifdef CONFIG_X86_64 193 194 static inline unsigned long read_cr8(void) 195 { 196 return native_read_cr8(); 197 } 198 199 static inline void write_cr8(unsigned long x) 200 { 201 native_write_cr8(x); 202 } 203 204 static inline void load_gs_index(unsigned selector) 205 { 206 native_load_gs_index(selector); 207 } 208 209 #endif 210 211 /* Clear the 'TS' bit */ 212 static inline void clts(void) 213 { 214 native_clts(); 215 } 216 217 #endif/* CONFIG_PARAVIRT */ 218 219 #define stts() write_cr0(read_cr0() | X86_CR0_TS) 220 221 static inline void clflush(volatile void *__p) 222 { 223 asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p)); 224 } 225 226 static inline void clflushopt(volatile void *__p) 227 { 228 alternative_io(".byte " __stringify(NOP_DS_PREFIX) "; clflush %P0", 229 ".byte 0x66; clflush %P0", 230 X86_FEATURE_CLFLUSHOPT, 231 "+m" (*(volatile char __force *)__p)); 232 } 233 234 static inline void clwb(volatile void *__p) 235 { 236 volatile struct { char x[64]; } *p = __p; 237 238 asm volatile(ALTERNATIVE_2( 239 ".byte " __stringify(NOP_DS_PREFIX) "; clflush (%[pax])", 240 ".byte 0x66; clflush (%[pax])", /* clflushopt (%%rax) */ 241 X86_FEATURE_CLFLUSHOPT, 242 ".byte 0x66, 0x0f, 0xae, 0x30", /* clwb (%%rax) */ 243 X86_FEATURE_CLWB) 244 : [p] "+m" (*p) 245 : [pax] "a" (p)); 246 } 247 248 #define nop() asm volatile ("nop") 249 250 251 #endif /* __KERNEL__ */ 252 253 #endif /* _ASM_X86_SPECIAL_INSNS_H */ 254