1 #ifndef _ASM_X86_CMPXCHG_64_H 2 #define _ASM_X86_CMPXCHG_64_H 3 4 static inline void set_64bit(volatile u64 *ptr, u64 val) 5 { 6 *ptr = val; 7 } 8 9 #define __HAVE_ARCH_CMPXCHG 1 10 11 #define cmpxchg64(ptr, o, n) \ 12 ({ \ 13 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 14 cmpxchg((ptr), (o), (n)); \ 15 }) 16 17 #define cmpxchg64_local(ptr, o, n) \ 18 ({ \ 19 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 20 cmpxchg_local((ptr), (o), (n)); \ 21 }) 22 23 #define cmpxchg16b(ptr, o1, o2, n1, n2) \ 24 ({ \ 25 char __ret; \ 26 __typeof__(o2) __junk; \ 27 __typeof__(*(ptr)) __old1 = (o1); \ 28 __typeof__(o2) __old2 = (o2); \ 29 __typeof__(*(ptr)) __new1 = (n1); \ 30 __typeof__(o2) __new2 = (n2); \ 31 asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1" \ 32 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ 33 : "b"(__new1), "c"(__new2), \ 34 "a"(__old1), "d"(__old2)); \ 35 __ret; }) 36 37 38 #define cmpxchg16b_local(ptr, o1, o2, n1, n2) \ 39 ({ \ 40 char __ret; \ 41 __typeof__(o2) __junk; \ 42 __typeof__(*(ptr)) __old1 = (o1); \ 43 __typeof__(o2) __old2 = (o2); \ 44 __typeof__(*(ptr)) __new1 = (n1); \ 45 __typeof__(o2) __new2 = (n2); \ 46 asm volatile("cmpxchg16b %2;setz %1" \ 47 : "=d"(__junk), "=a"(__ret), "+m" (*ptr) \ 48 : "b"(__new1), "c"(__new2), \ 49 "a"(__old1), "d"(__old2)); \ 50 __ret; }) 51 52 #define cmpxchg_double(ptr, o1, o2, n1, n2) \ 53 ({ \ 54 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 55 VM_BUG_ON((unsigned long)(ptr) % 16); \ 56 cmpxchg16b((ptr), (o1), (o2), (n1), (n2)); \ 57 }) 58 59 #define cmpxchg_double_local(ptr, o1, o2, n1, n2) \ 60 ({ \ 61 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ 62 VM_BUG_ON((unsigned long)(ptr) % 16); \ 63 cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2)); \ 64 }) 65 66 #define system_has_cmpxchg_double() cpu_has_cx16 67 68 #endif /* _ASM_X86_CMPXCHG_64_H */ 69