xref: /openbmc/linux/arch/x86/include/asm/cmpxchg_64.h (revision 3824abd1279ef75f791c43a6b1e3162ae0692b42)
11965aae3SH. Peter Anvin #ifndef _ASM_X86_CMPXCHG_64_H
21965aae3SH. Peter Anvin #define _ASM_X86_CMPXCHG_64_H
3bb898558SAl Viro 
4bb898558SAl Viro #include <asm/alternative.h> /* Provides LOCK_PREFIX */
5bb898558SAl Viro 
669309a05SH. Peter Anvin static inline void set_64bit(volatile u64 *ptr, u64 val)
7bb898558SAl Viro {
8bb898558SAl Viro 	*ptr = val;
9bb898558SAl Viro }
10bb898558SAl Viro 
11f3834b9eSPeter Zijlstra extern void __xchg_wrong_size(void);
12f3834b9eSPeter Zijlstra extern void __cmpxchg_wrong_size(void);
13f3834b9eSPeter Zijlstra 
14bb898558SAl Viro /*
154532b305SH. Peter Anvin  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
164532b305SH. Peter Anvin  * Since this is generally used to protect other memory information, we
174532b305SH. Peter Anvin  * use "asm volatile" and "memory" clobbers to prevent gcc from moving
184532b305SH. Peter Anvin  * information around.
19bb898558SAl Viro  */
20f3834b9eSPeter Zijlstra #define __xchg(x, ptr, size)						\
21f3834b9eSPeter Zijlstra ({									\
22f3834b9eSPeter Zijlstra 	__typeof(*(ptr)) __x = (x);					\
23f3834b9eSPeter Zijlstra 	switch (size) {							\
24f3834b9eSPeter Zijlstra 	case 1:								\
254532b305SH. Peter Anvin 	{								\
264532b305SH. Peter Anvin 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
274532b305SH. Peter Anvin 		asm volatile("xchgb %0,%1"				\
284532b305SH. Peter Anvin 			     : "=q" (__x), "+m" (*__ptr)		\
29113fc5a6SH. Peter Anvin 			     : "0" (__x)				\
30f3834b9eSPeter Zijlstra 			     : "memory");				\
31f3834b9eSPeter Zijlstra 		break;							\
324532b305SH. Peter Anvin 	}								\
33f3834b9eSPeter Zijlstra 	case 2:								\
344532b305SH. Peter Anvin 	{								\
354532b305SH. Peter Anvin 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
364532b305SH. Peter Anvin 		asm volatile("xchgw %0,%1"				\
374532b305SH. Peter Anvin 			     : "=r" (__x), "+m" (*__ptr)		\
38113fc5a6SH. Peter Anvin 			     : "0" (__x)				\
39f3834b9eSPeter Zijlstra 			     : "memory");				\
40f3834b9eSPeter Zijlstra 		break;							\
414532b305SH. Peter Anvin 	}								\
42f3834b9eSPeter Zijlstra 	case 4:								\
434532b305SH. Peter Anvin 	{								\
444532b305SH. Peter Anvin 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
454532b305SH. Peter Anvin 		asm volatile("xchgl %0,%1"				\
464532b305SH. Peter Anvin 			     : "=r" (__x), "+m" (*__ptr)		\
47113fc5a6SH. Peter Anvin 			     : "0" (__x)				\
48f3834b9eSPeter Zijlstra 			     : "memory");				\
49f3834b9eSPeter Zijlstra 		break;							\
504532b305SH. Peter Anvin 	}								\
51f3834b9eSPeter Zijlstra 	case 8:								\
524532b305SH. Peter Anvin 	{								\
534532b305SH. Peter Anvin 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
54f3834b9eSPeter Zijlstra 		asm volatile("xchgq %0,%1"				\
554532b305SH. Peter Anvin 			     : "=r" (__x), "+m" (*__ptr)		\
56113fc5a6SH. Peter Anvin 			     : "0" (__x)				\
57f3834b9eSPeter Zijlstra 			     : "memory");				\
58f3834b9eSPeter Zijlstra 		break;							\
594532b305SH. Peter Anvin 	}								\
60f3834b9eSPeter Zijlstra 	default:							\
61f3834b9eSPeter Zijlstra 		__xchg_wrong_size();					\
62f3834b9eSPeter Zijlstra 	}								\
63f3834b9eSPeter Zijlstra 	__x;								\
64f3834b9eSPeter Zijlstra })
65f3834b9eSPeter Zijlstra 
66f3834b9eSPeter Zijlstra #define xchg(ptr, v)							\
67f3834b9eSPeter Zijlstra 	__xchg((v), (ptr), sizeof(*ptr))
68f3834b9eSPeter Zijlstra 
69f3834b9eSPeter Zijlstra #define __HAVE_ARCH_CMPXCHG 1
70bb898558SAl Viro 
71bb898558SAl Viro /*
72bb898558SAl Viro  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
73bb898558SAl Viro  * store NEW in MEM.  Return the initial value in MEM.  Success is
74bb898558SAl Viro  * indicated by comparing RETURN with OLD.
75bb898558SAl Viro  */
76f3834b9eSPeter Zijlstra #define __raw_cmpxchg(ptr, old, new, size, lock)			\
77f3834b9eSPeter Zijlstra ({									\
78f3834b9eSPeter Zijlstra 	__typeof__(*(ptr)) __ret;					\
79f3834b9eSPeter Zijlstra 	__typeof__(*(ptr)) __old = (old);				\
80f3834b9eSPeter Zijlstra 	__typeof__(*(ptr)) __new = (new);				\
81f3834b9eSPeter Zijlstra 	switch (size) {							\
82f3834b9eSPeter Zijlstra 	case 1:								\
834532b305SH. Peter Anvin 	{								\
844532b305SH. Peter Anvin 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
854532b305SH. Peter Anvin 		asm volatile(lock "cmpxchgb %2,%1"			\
864532b305SH. Peter Anvin 			     : "=a" (__ret), "+m" (*__ptr)		\
87113fc5a6SH. Peter Anvin 			     : "q" (__new), "0" (__old)			\
88f3834b9eSPeter Zijlstra 			     : "memory");				\
89f3834b9eSPeter Zijlstra 		break;							\
904532b305SH. Peter Anvin 	}								\
91f3834b9eSPeter Zijlstra 	case 2:								\
924532b305SH. Peter Anvin 	{								\
934532b305SH. Peter Anvin 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
944532b305SH. Peter Anvin 		asm volatile(lock "cmpxchgw %2,%1"			\
954532b305SH. Peter Anvin 			     : "=a" (__ret), "+m" (*__ptr)		\
96113fc5a6SH. Peter Anvin 			     : "r" (__new), "0" (__old)			\
97f3834b9eSPeter Zijlstra 			     : "memory");				\
98f3834b9eSPeter Zijlstra 		break;							\
994532b305SH. Peter Anvin 	}								\
100f3834b9eSPeter Zijlstra 	case 4:								\
1014532b305SH. Peter Anvin 	{								\
1024532b305SH. Peter Anvin 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
1034532b305SH. Peter Anvin 		asm volatile(lock "cmpxchgl %2,%1"			\
1044532b305SH. Peter Anvin 			     : "=a" (__ret), "+m" (*__ptr)		\
105113fc5a6SH. Peter Anvin 			     : "r" (__new), "0" (__old)			\
106f3834b9eSPeter Zijlstra 			     : "memory");				\
107f3834b9eSPeter Zijlstra 		break;							\
1084532b305SH. Peter Anvin 	}								\
109f3834b9eSPeter Zijlstra 	case 8:								\
1104532b305SH. Peter Anvin 	{								\
1114532b305SH. Peter Anvin 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
112113fc5a6SH. Peter Anvin 		asm volatile(lock "cmpxchgq %2,%1"			\
1134532b305SH. Peter Anvin 			     : "=a" (__ret), "+m" (*__ptr)		\
114113fc5a6SH. Peter Anvin 			     : "r" (__new), "0" (__old)			\
115f3834b9eSPeter Zijlstra 			     : "memory");				\
116f3834b9eSPeter Zijlstra 		break;							\
1174532b305SH. Peter Anvin 	}								\
118f3834b9eSPeter Zijlstra 	default:							\
119f3834b9eSPeter Zijlstra 		__cmpxchg_wrong_size();					\
120f3834b9eSPeter Zijlstra 	}								\
121f3834b9eSPeter Zijlstra 	__ret;								\
122f3834b9eSPeter Zijlstra })
123bb898558SAl Viro 
124f3834b9eSPeter Zijlstra #define __cmpxchg(ptr, old, new, size)					\
125f3834b9eSPeter Zijlstra 	__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
126bb898558SAl Viro 
127f3834b9eSPeter Zijlstra #define __sync_cmpxchg(ptr, old, new, size)				\
128f3834b9eSPeter Zijlstra 	__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
129bb898558SAl Viro 
130f3834b9eSPeter Zijlstra #define __cmpxchg_local(ptr, old, new, size)				\
131f3834b9eSPeter Zijlstra 	__raw_cmpxchg((ptr), (old), (new), (size), "")
132bb898558SAl Viro 
133f3834b9eSPeter Zijlstra #define cmpxchg(ptr, old, new)						\
134f3834b9eSPeter Zijlstra 	__cmpxchg((ptr), (old), (new), sizeof(*ptr))
135bb898558SAl Viro 
136f3834b9eSPeter Zijlstra #define sync_cmpxchg(ptr, old, new)					\
137f3834b9eSPeter Zijlstra 	__sync_cmpxchg((ptr), (old), (new), sizeof(*ptr))
138f3834b9eSPeter Zijlstra 
139f3834b9eSPeter Zijlstra #define cmpxchg_local(ptr, old, new)					\
140f3834b9eSPeter Zijlstra 	__cmpxchg_local((ptr), (old), (new), sizeof(*ptr))
141f3834b9eSPeter Zijlstra 
142bb898558SAl Viro #define cmpxchg64(ptr, o, n)						\
143bb898558SAl Viro ({									\
144bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
145bb898558SAl Viro 	cmpxchg((ptr), (o), (n));					\
146bb898558SAl Viro })
147f3834b9eSPeter Zijlstra 
148bb898558SAl Viro #define cmpxchg64_local(ptr, o, n)					\
149bb898558SAl Viro ({									\
150bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
151bb898558SAl Viro 	cmpxchg_local((ptr), (o), (n));					\
152bb898558SAl Viro })
153bb898558SAl Viro 
154*3824abd1SChristoph Lameter #define cmpxchg16b(ptr, o1, o2, n1, n2)				\
155*3824abd1SChristoph Lameter ({								\
156*3824abd1SChristoph Lameter 	char __ret;						\
157*3824abd1SChristoph Lameter 	__typeof__(o2) __junk;					\
158*3824abd1SChristoph Lameter 	__typeof__(*(ptr)) __old1 = (o1);			\
159*3824abd1SChristoph Lameter 	__typeof__(o2) __old2 = (o2);				\
160*3824abd1SChristoph Lameter 	__typeof__(*(ptr)) __new1 = (n1);			\
161*3824abd1SChristoph Lameter 	__typeof__(o2) __new2 = (n2);				\
162*3824abd1SChristoph Lameter 	asm volatile(LOCK_PREFIX "cmpxchg16b %2;setz %1"	\
163*3824abd1SChristoph Lameter 		       : "=d"(__junk), "=a"(__ret), "+m" (*ptr)	\
164*3824abd1SChristoph Lameter 		       : "b"(__new1), "c"(__new2),		\
165*3824abd1SChristoph Lameter 		         "a"(__old1), "d"(__old2));		\
166*3824abd1SChristoph Lameter 	__ret; })
167*3824abd1SChristoph Lameter 
168*3824abd1SChristoph Lameter 
169*3824abd1SChristoph Lameter #define cmpxchg16b_local(ptr, o1, o2, n1, n2)			\
170*3824abd1SChristoph Lameter ({								\
171*3824abd1SChristoph Lameter 	char __ret;						\
172*3824abd1SChristoph Lameter 	__typeof__(o2) __junk;					\
173*3824abd1SChristoph Lameter 	__typeof__(*(ptr)) __old1 = (o1);			\
174*3824abd1SChristoph Lameter 	__typeof__(o2) __old2 = (o2);				\
175*3824abd1SChristoph Lameter 	__typeof__(*(ptr)) __new1 = (n1);			\
176*3824abd1SChristoph Lameter 	__typeof__(o2) __new2 = (n2);				\
177*3824abd1SChristoph Lameter 	asm volatile("cmpxchg16b %2;setz %1"			\
178*3824abd1SChristoph Lameter 		       : "=d"(__junk), "=a"(__ret), "+m" (*ptr)	\
179*3824abd1SChristoph Lameter 		       : "b"(__new1), "c"(__new2),		\
180*3824abd1SChristoph Lameter 		         "a"(__old1), "d"(__old2));		\
181*3824abd1SChristoph Lameter 	__ret; })
182*3824abd1SChristoph Lameter 
183*3824abd1SChristoph Lameter #define cmpxchg_double(ptr, o1, o2, n1, n2)				\
184*3824abd1SChristoph Lameter ({									\
185*3824abd1SChristoph Lameter 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
186*3824abd1SChristoph Lameter 	VM_BUG_ON((unsigned long)(ptr) % 16);				\
187*3824abd1SChristoph Lameter 	cmpxchg16b((ptr), (o1), (o2), (n1), (n2));			\
188*3824abd1SChristoph Lameter })
189*3824abd1SChristoph Lameter 
190*3824abd1SChristoph Lameter #define cmpxchg_double_local(ptr, o1, o2, n1, n2)			\
191*3824abd1SChristoph Lameter ({									\
192*3824abd1SChristoph Lameter 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
193*3824abd1SChristoph Lameter 	VM_BUG_ON((unsigned long)(ptr) % 16);				\
194*3824abd1SChristoph Lameter 	cmpxchg16b_local((ptr), (o1), (o2), (n1), (n2));		\
195*3824abd1SChristoph Lameter })
196*3824abd1SChristoph Lameter 
197*3824abd1SChristoph Lameter #define system_has_cmpxchg_double() cpu_has_cx16
198*3824abd1SChristoph Lameter 
1991965aae3SH. Peter Anvin #endif /* _ASM_X86_CMPXCHG_64_H */
200