xref: /openbmc/linux/arch/x86/include/asm/cmpxchg_64.h (revision a09d2831)
1 #ifndef _ASM_X86_CMPXCHG_64_H
2 #define _ASM_X86_CMPXCHG_64_H
3 
4 #include <asm/alternative.h> /* Provides LOCK_PREFIX */
5 
6 #define __xg(x) ((volatile long *)(x))
7 
8 static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
9 {
10 	*ptr = val;
11 }
12 
13 #define _set_64bit set_64bit
14 
15 extern void __xchg_wrong_size(void);
16 extern void __cmpxchg_wrong_size(void);
17 
18 /*
19  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20  * Note 2: xchg has side effect, so that attribute volatile is necessary,
21  *	  but generally the primitive is invalid, *ptr is output argument. --ANK
22  */
23 #define __xchg(x, ptr, size)						\
24 ({									\
25 	__typeof(*(ptr)) __x = (x);					\
26 	switch (size) {							\
27 	case 1:								\
28 		asm volatile("xchgb %b0,%1"				\
29 			     : "=q" (__x)				\
30 			     : "m" (*__xg(ptr)), "0" (__x)		\
31 			     : "memory");				\
32 		break;							\
33 	case 2:								\
34 		asm volatile("xchgw %w0,%1"				\
35 			     : "=r" (__x)				\
36 			     : "m" (*__xg(ptr)), "0" (__x)		\
37 			     : "memory");				\
38 		break;							\
39 	case 4:								\
40 		asm volatile("xchgl %k0,%1"				\
41 			     : "=r" (__x)				\
42 			     : "m" (*__xg(ptr)), "0" (__x)		\
43 			     : "memory");				\
44 		break;							\
45 	case 8:								\
46 		asm volatile("xchgq %0,%1"				\
47 			     : "=r" (__x)				\
48 			     : "m" (*__xg(ptr)), "0" (__x)		\
49 			     : "memory");				\
50 		break;							\
51 	default:							\
52 		__xchg_wrong_size();					\
53 	}								\
54 	__x;								\
55 })
56 
57 #define xchg(ptr, v)							\
58 	__xchg((v), (ptr), sizeof(*ptr))
59 
60 #define __HAVE_ARCH_CMPXCHG 1
61 
62 /*
63  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
64  * store NEW in MEM.  Return the initial value in MEM.  Success is
65  * indicated by comparing RETURN with OLD.
66  */
67 #define __raw_cmpxchg(ptr, old, new, size, lock)			\
68 ({									\
69 	__typeof__(*(ptr)) __ret;					\
70 	__typeof__(*(ptr)) __old = (old);				\
71 	__typeof__(*(ptr)) __new = (new);				\
72 	switch (size) {							\
73 	case 1:								\
74 		asm volatile(lock "cmpxchgb %b1,%2"			\
75 			     : "=a"(__ret)				\
76 			     : "q"(__new), "m"(*__xg(ptr)), "0"(__old)	\
77 			     : "memory");				\
78 		break;							\
79 	case 2:								\
80 		asm volatile(lock "cmpxchgw %w1,%2"			\
81 			     : "=a"(__ret)				\
82 			     : "r"(__new), "m"(*__xg(ptr)), "0"(__old)	\
83 			     : "memory");				\
84 		break;							\
85 	case 4:								\
86 		asm volatile(lock "cmpxchgl %k1,%2"			\
87 			     : "=a"(__ret)				\
88 			     : "r"(__new), "m"(*__xg(ptr)), "0"(__old)	\
89 			     : "memory");				\
90 		break;							\
91 	case 8:								\
92 		asm volatile(lock "cmpxchgq %1,%2"			\
93 			     : "=a"(__ret)				\
94 			     : "r"(__new), "m"(*__xg(ptr)), "0"(__old)	\
95 			     : "memory");				\
96 		break;							\
97 	default:							\
98 		__cmpxchg_wrong_size();					\
99 	}								\
100 	__ret;								\
101 })
102 
103 #define __cmpxchg(ptr, old, new, size)					\
104 	__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
105 
106 #define __sync_cmpxchg(ptr, old, new, size)				\
107 	__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
108 
109 #define __cmpxchg_local(ptr, old, new, size)				\
110 	__raw_cmpxchg((ptr), (old), (new), (size), "")
111 
112 #define cmpxchg(ptr, old, new)						\
113 	__cmpxchg((ptr), (old), (new), sizeof(*ptr))
114 
115 #define sync_cmpxchg(ptr, old, new)					\
116 	__sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
117 
118 #define cmpxchg_local(ptr, old, new)					\
119 	__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
120 
121 #define cmpxchg64(ptr, o, n)						\
122 ({									\
123 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
124 	cmpxchg((ptr), (o), (n));					\
125 })
126 
127 #define cmpxchg64_local(ptr, o, n)					\
128 ({									\
129 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
130 	cmpxchg_local((ptr), (o), (n));					\
131 })
132 
133 #endif /* _ASM_X86_CMPXCHG_64_H */
134