xref: /openbmc/linux/arch/x86/include/asm/cmpxchg.h (revision febe950d)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2e9826380SJeremy Fitzhardinge #ifndef ASM_X86_CMPXCHG_H
3e9826380SJeremy Fitzhardinge #define ASM_X86_CMPXCHG_H
4e9826380SJeremy Fitzhardinge 
561e2cd0aSJeremy Fitzhardinge #include <linux/compiler.h>
6cd4d09ecSBorislav Petkov #include <asm/cpufeatures.h>
7e9826380SJeremy Fitzhardinge #include <asm/alternative.h> /* Provides LOCK_PREFIX */
8e9826380SJeremy Fitzhardinge 
961e2cd0aSJeremy Fitzhardinge /*
10a97673a1SIngo Molnar  * Non-existent functions to indicate usage errors at link time
1161e2cd0aSJeremy Fitzhardinge  * (or compile-time if the compiler implements __compiletime_error().
1261e2cd0aSJeremy Fitzhardinge  */
1361e2cd0aSJeremy Fitzhardinge extern void __xchg_wrong_size(void)
1461e2cd0aSJeremy Fitzhardinge 	__compiletime_error("Bad argument size for xchg");
1561e2cd0aSJeremy Fitzhardinge extern void __cmpxchg_wrong_size(void)
1661e2cd0aSJeremy Fitzhardinge 	__compiletime_error("Bad argument size for cmpxchg");
1761e2cd0aSJeremy Fitzhardinge extern void __xadd_wrong_size(void)
1861e2cd0aSJeremy Fitzhardinge 	__compiletime_error("Bad argument size for xadd");
193d94ae0cSJeremy Fitzhardinge extern void __add_wrong_size(void)
203d94ae0cSJeremy Fitzhardinge 	__compiletime_error("Bad argument size for add");
21e9826380SJeremy Fitzhardinge 
22e9826380SJeremy Fitzhardinge /*
23e9826380SJeremy Fitzhardinge  * Constants for operation sizes. On 32-bit, the 64-bit size it set to
24e9826380SJeremy Fitzhardinge  * -1 because sizeof will never return -1, thereby making those switch
25d9f6e12fSIngo Molnar  * case statements guaranteed dead code which the compiler will
26e9826380SJeremy Fitzhardinge  * eliminate, and allowing the "missing symbol in the default case" to
27e9826380SJeremy Fitzhardinge  * indicate a usage error.
28e9826380SJeremy Fitzhardinge  */
29e9826380SJeremy Fitzhardinge #define __X86_CASE_B	1
30e9826380SJeremy Fitzhardinge #define __X86_CASE_W	2
31e9826380SJeremy Fitzhardinge #define __X86_CASE_L	4
32e9826380SJeremy Fitzhardinge #ifdef CONFIG_64BIT
33e9826380SJeremy Fitzhardinge #define __X86_CASE_Q	8
34e9826380SJeremy Fitzhardinge #else
35e9826380SJeremy Fitzhardinge #define	__X86_CASE_Q	-1		/* sizeof will never return -1 */
36e9826380SJeremy Fitzhardinge #endif
37e9826380SJeremy Fitzhardinge 
38e9826380SJeremy Fitzhardinge /*
3931a8394eSJeremy Fitzhardinge  * An exchange-type operation, which takes a value and a pointer, and
407f5281aeSLi Zhong  * returns the old value.
4131a8394eSJeremy Fitzhardinge  */
4231a8394eSJeremy Fitzhardinge #define __xchg_op(ptr, arg, op, lock)					\
4331a8394eSJeremy Fitzhardinge 	({								\
4431a8394eSJeremy Fitzhardinge 	        __typeof__ (*(ptr)) __ret = (arg);			\
4531a8394eSJeremy Fitzhardinge 		switch (sizeof(*(ptr))) {				\
4631a8394eSJeremy Fitzhardinge 		case __X86_CASE_B:					\
4731a8394eSJeremy Fitzhardinge 			asm volatile (lock #op "b %b0, %1\n"		\
482ca052a3SJeremy Fitzhardinge 				      : "+q" (__ret), "+m" (*(ptr))	\
4931a8394eSJeremy Fitzhardinge 				      : : "memory", "cc");		\
5031a8394eSJeremy Fitzhardinge 			break;						\
5131a8394eSJeremy Fitzhardinge 		case __X86_CASE_W:					\
5231a8394eSJeremy Fitzhardinge 			asm volatile (lock #op "w %w0, %1\n"		\
5331a8394eSJeremy Fitzhardinge 				      : "+r" (__ret), "+m" (*(ptr))	\
5431a8394eSJeremy Fitzhardinge 				      : : "memory", "cc");		\
5531a8394eSJeremy Fitzhardinge 			break;						\
5631a8394eSJeremy Fitzhardinge 		case __X86_CASE_L:					\
5731a8394eSJeremy Fitzhardinge 			asm volatile (lock #op "l %0, %1\n"		\
5831a8394eSJeremy Fitzhardinge 				      : "+r" (__ret), "+m" (*(ptr))	\
5931a8394eSJeremy Fitzhardinge 				      : : "memory", "cc");		\
6031a8394eSJeremy Fitzhardinge 			break;						\
6131a8394eSJeremy Fitzhardinge 		case __X86_CASE_Q:					\
6231a8394eSJeremy Fitzhardinge 			asm volatile (lock #op "q %q0, %1\n"		\
6331a8394eSJeremy Fitzhardinge 				      : "+r" (__ret), "+m" (*(ptr))	\
6431a8394eSJeremy Fitzhardinge 				      : : "memory", "cc");		\
6531a8394eSJeremy Fitzhardinge 			break;						\
6631a8394eSJeremy Fitzhardinge 		default:						\
6731a8394eSJeremy Fitzhardinge 			__ ## op ## _wrong_size();			\
6831a8394eSJeremy Fitzhardinge 		}							\
6931a8394eSJeremy Fitzhardinge 		__ret;							\
7031a8394eSJeremy Fitzhardinge 	})
7131a8394eSJeremy Fitzhardinge 
7231a8394eSJeremy Fitzhardinge /*
73e9826380SJeremy Fitzhardinge  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway.
74e9826380SJeremy Fitzhardinge  * Since this is generally used to protect other memory information, we
75e9826380SJeremy Fitzhardinge  * use "asm volatile" and "memory" clobbers to prevent gcc from moving
76e9826380SJeremy Fitzhardinge  * information around.
77e9826380SJeremy Fitzhardinge  */
78f9881cc4SMark Rutland #define arch_xchg(ptr, v)	__xchg_op((ptr), (v), xchg, "")
79e9826380SJeremy Fitzhardinge 
80e9826380SJeremy Fitzhardinge /*
81e9826380SJeremy Fitzhardinge  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
82e9826380SJeremy Fitzhardinge  * store NEW in MEM.  Return the initial value in MEM.  Success is
83e9826380SJeremy Fitzhardinge  * indicated by comparing RETURN with OLD.
84e9826380SJeremy Fitzhardinge  */
85e9826380SJeremy Fitzhardinge #define __raw_cmpxchg(ptr, old, new, size, lock)			\
86e9826380SJeremy Fitzhardinge ({									\
87e9826380SJeremy Fitzhardinge 	__typeof__(*(ptr)) __ret;					\
88e9826380SJeremy Fitzhardinge 	__typeof__(*(ptr)) __old = (old);				\
89e9826380SJeremy Fitzhardinge 	__typeof__(*(ptr)) __new = (new);				\
90e9826380SJeremy Fitzhardinge 	switch (size) {							\
91e9826380SJeremy Fitzhardinge 	case __X86_CASE_B:						\
92e9826380SJeremy Fitzhardinge 	{								\
93e9826380SJeremy Fitzhardinge 		volatile u8 *__ptr = (volatile u8 *)(ptr);		\
94e9826380SJeremy Fitzhardinge 		asm volatile(lock "cmpxchgb %2,%1"			\
95e9826380SJeremy Fitzhardinge 			     : "=a" (__ret), "+m" (*__ptr)		\
96e9826380SJeremy Fitzhardinge 			     : "q" (__new), "0" (__old)			\
97e9826380SJeremy Fitzhardinge 			     : "memory");				\
98e9826380SJeremy Fitzhardinge 		break;							\
99e9826380SJeremy Fitzhardinge 	}								\
100e9826380SJeremy Fitzhardinge 	case __X86_CASE_W:						\
101e9826380SJeremy Fitzhardinge 	{								\
102e9826380SJeremy Fitzhardinge 		volatile u16 *__ptr = (volatile u16 *)(ptr);		\
103e9826380SJeremy Fitzhardinge 		asm volatile(lock "cmpxchgw %2,%1"			\
104e9826380SJeremy Fitzhardinge 			     : "=a" (__ret), "+m" (*__ptr)		\
105e9826380SJeremy Fitzhardinge 			     : "r" (__new), "0" (__old)			\
106e9826380SJeremy Fitzhardinge 			     : "memory");				\
107e9826380SJeremy Fitzhardinge 		break;							\
108e9826380SJeremy Fitzhardinge 	}								\
109e9826380SJeremy Fitzhardinge 	case __X86_CASE_L:						\
110e9826380SJeremy Fitzhardinge 	{								\
111e9826380SJeremy Fitzhardinge 		volatile u32 *__ptr = (volatile u32 *)(ptr);		\
112e9826380SJeremy Fitzhardinge 		asm volatile(lock "cmpxchgl %2,%1"			\
113e9826380SJeremy Fitzhardinge 			     : "=a" (__ret), "+m" (*__ptr)		\
114e9826380SJeremy Fitzhardinge 			     : "r" (__new), "0" (__old)			\
115e9826380SJeremy Fitzhardinge 			     : "memory");				\
116e9826380SJeremy Fitzhardinge 		break;							\
117e9826380SJeremy Fitzhardinge 	}								\
118e9826380SJeremy Fitzhardinge 	case __X86_CASE_Q:						\
119e9826380SJeremy Fitzhardinge 	{								\
120e9826380SJeremy Fitzhardinge 		volatile u64 *__ptr = (volatile u64 *)(ptr);		\
121e9826380SJeremy Fitzhardinge 		asm volatile(lock "cmpxchgq %2,%1"			\
122e9826380SJeremy Fitzhardinge 			     : "=a" (__ret), "+m" (*__ptr)		\
123e9826380SJeremy Fitzhardinge 			     : "r" (__new), "0" (__old)			\
124e9826380SJeremy Fitzhardinge 			     : "memory");				\
125e9826380SJeremy Fitzhardinge 		break;							\
126e9826380SJeremy Fitzhardinge 	}								\
127e9826380SJeremy Fitzhardinge 	default:							\
128e9826380SJeremy Fitzhardinge 		__cmpxchg_wrong_size();					\
129e9826380SJeremy Fitzhardinge 	}								\
130e9826380SJeremy Fitzhardinge 	__ret;								\
131e9826380SJeremy Fitzhardinge })
132e9826380SJeremy Fitzhardinge 
133e9826380SJeremy Fitzhardinge #define __cmpxchg(ptr, old, new, size)					\
134e9826380SJeremy Fitzhardinge 	__raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
135e9826380SJeremy Fitzhardinge 
136e9826380SJeremy Fitzhardinge #define __sync_cmpxchg(ptr, old, new, size)				\
137e9826380SJeremy Fitzhardinge 	__raw_cmpxchg((ptr), (old), (new), (size), "lock; ")
138e9826380SJeremy Fitzhardinge 
139e9826380SJeremy Fitzhardinge #define __cmpxchg_local(ptr, old, new, size)				\
140e9826380SJeremy Fitzhardinge 	__raw_cmpxchg((ptr), (old), (new), (size), "")
141e9826380SJeremy Fitzhardinge 
142bb898558SAl Viro #ifdef CONFIG_X86_32
143a1ce3928SDavid Howells # include <asm/cmpxchg_32.h>
144bb898558SAl Viro #else
145a1ce3928SDavid Howells # include <asm/cmpxchg_64.h>
146bb898558SAl Viro #endif
147e9826380SJeremy Fitzhardinge 
1488bf705d1SDmitry Vyukov #define arch_cmpxchg(ptr, old, new)					\
149fc395b92SJan Beulich 	__cmpxchg(ptr, old, new, sizeof(*(ptr)))
150e9826380SJeremy Fitzhardinge 
1518bf705d1SDmitry Vyukov #define arch_sync_cmpxchg(ptr, old, new)				\
152fc395b92SJan Beulich 	__sync_cmpxchg(ptr, old, new, sizeof(*(ptr)))
153e9826380SJeremy Fitzhardinge 
1548bf705d1SDmitry Vyukov #define arch_cmpxchg_local(ptr, old, new)				\
155fc395b92SJan Beulich 	__cmpxchg_local(ptr, old, new, sizeof(*(ptr)))
156e9826380SJeremy Fitzhardinge 
157a9ebf306SPeter Zijlstra 
158a9ebf306SPeter Zijlstra #define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock)		\
159a9ebf306SPeter Zijlstra ({									\
160a9ebf306SPeter Zijlstra 	bool success;							\
161007d185bSDmitry Vyukov 	__typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);		\
162a9ebf306SPeter Zijlstra 	__typeof__(*(_ptr)) __old = *_old;				\
163a9ebf306SPeter Zijlstra 	__typeof__(*(_ptr)) __new = (_new);				\
164a9ebf306SPeter Zijlstra 	switch (size) {							\
165a9ebf306SPeter Zijlstra 	case __X86_CASE_B:						\
166a9ebf306SPeter Zijlstra 	{								\
167a9ebf306SPeter Zijlstra 		volatile u8 *__ptr = (volatile u8 *)(_ptr);		\
168a9ebf306SPeter Zijlstra 		asm volatile(lock "cmpxchgb %[new], %[ptr]"		\
169a9ebf306SPeter Zijlstra 			     CC_SET(z)					\
170a9ebf306SPeter Zijlstra 			     : CC_OUT(z) (success),			\
171a9ebf306SPeter Zijlstra 			       [ptr] "+m" (*__ptr),			\
172a9ebf306SPeter Zijlstra 			       [old] "+a" (__old)			\
173a9ebf306SPeter Zijlstra 			     : [new] "q" (__new)			\
174a9ebf306SPeter Zijlstra 			     : "memory");				\
175a9ebf306SPeter Zijlstra 		break;							\
176a9ebf306SPeter Zijlstra 	}								\
177a9ebf306SPeter Zijlstra 	case __X86_CASE_W:						\
178a9ebf306SPeter Zijlstra 	{								\
179a9ebf306SPeter Zijlstra 		volatile u16 *__ptr = (volatile u16 *)(_ptr);		\
180a9ebf306SPeter Zijlstra 		asm volatile(lock "cmpxchgw %[new], %[ptr]"		\
181a9ebf306SPeter Zijlstra 			     CC_SET(z)					\
182a9ebf306SPeter Zijlstra 			     : CC_OUT(z) (success),			\
183a9ebf306SPeter Zijlstra 			       [ptr] "+m" (*__ptr),			\
184a9ebf306SPeter Zijlstra 			       [old] "+a" (__old)			\
185a9ebf306SPeter Zijlstra 			     : [new] "r" (__new)			\
186a9ebf306SPeter Zijlstra 			     : "memory");				\
187a9ebf306SPeter Zijlstra 		break;							\
188a9ebf306SPeter Zijlstra 	}								\
189a9ebf306SPeter Zijlstra 	case __X86_CASE_L:						\
190a9ebf306SPeter Zijlstra 	{								\
191a9ebf306SPeter Zijlstra 		volatile u32 *__ptr = (volatile u32 *)(_ptr);		\
192a9ebf306SPeter Zijlstra 		asm volatile(lock "cmpxchgl %[new], %[ptr]"		\
193a9ebf306SPeter Zijlstra 			     CC_SET(z)					\
194a9ebf306SPeter Zijlstra 			     : CC_OUT(z) (success),			\
195a9ebf306SPeter Zijlstra 			       [ptr] "+m" (*__ptr),			\
196a9ebf306SPeter Zijlstra 			       [old] "+a" (__old)			\
197a9ebf306SPeter Zijlstra 			     : [new] "r" (__new)			\
198a9ebf306SPeter Zijlstra 			     : "memory");				\
199a9ebf306SPeter Zijlstra 		break;							\
200a9ebf306SPeter Zijlstra 	}								\
201a9ebf306SPeter Zijlstra 	case __X86_CASE_Q:						\
202a9ebf306SPeter Zijlstra 	{								\
203a9ebf306SPeter Zijlstra 		volatile u64 *__ptr = (volatile u64 *)(_ptr);		\
204a9ebf306SPeter Zijlstra 		asm volatile(lock "cmpxchgq %[new], %[ptr]"		\
205a9ebf306SPeter Zijlstra 			     CC_SET(z)					\
206a9ebf306SPeter Zijlstra 			     : CC_OUT(z) (success),			\
207a9ebf306SPeter Zijlstra 			       [ptr] "+m" (*__ptr),			\
208a9ebf306SPeter Zijlstra 			       [old] "+a" (__old)			\
209a9ebf306SPeter Zijlstra 			     : [new] "r" (__new)			\
210a9ebf306SPeter Zijlstra 			     : "memory");				\
211a9ebf306SPeter Zijlstra 		break;							\
212a9ebf306SPeter Zijlstra 	}								\
213a9ebf306SPeter Zijlstra 	default:							\
214a9ebf306SPeter Zijlstra 		__cmpxchg_wrong_size();					\
215a9ebf306SPeter Zijlstra 	}								\
21644fe8445SPeter Zijlstra 	if (unlikely(!success))						\
217a9ebf306SPeter Zijlstra 		*_old = __old;						\
21844fe8445SPeter Zijlstra 	likely(success);						\
219a9ebf306SPeter Zijlstra })
220a9ebf306SPeter Zijlstra 
221a9ebf306SPeter Zijlstra #define __try_cmpxchg(ptr, pold, new, size)				\
222a9ebf306SPeter Zijlstra 	__raw_try_cmpxchg((ptr), (pold), (new), (size), LOCK_PREFIX)
223a9ebf306SPeter Zijlstra 
224*5cd4c268SUros Bizjak #define __try_cmpxchg_local(ptr, pold, new, size)			\
225*5cd4c268SUros Bizjak 	__raw_try_cmpxchg((ptr), (pold), (new), (size), "")
226*5cd4c268SUros Bizjak 
22729f006fdSPeter Zijlstra #define arch_try_cmpxchg(ptr, pold, new) 				\
228a9ebf306SPeter Zijlstra 	__try_cmpxchg((ptr), (pold), (new), sizeof(*(ptr)))
229a9ebf306SPeter Zijlstra 
230*5cd4c268SUros Bizjak #define arch_try_cmpxchg_local(ptr, pold, new)				\
231*5cd4c268SUros Bizjak 	__try_cmpxchg_local((ptr), (pold), (new), sizeof(*(ptr)))
232*5cd4c268SUros Bizjak 
233433b3520SJeremy Fitzhardinge /*
234433b3520SJeremy Fitzhardinge  * xadd() adds "inc" to "*ptr" and atomically returns the previous
235433b3520SJeremy Fitzhardinge  * value of "*ptr".
236433b3520SJeremy Fitzhardinge  *
237433b3520SJeremy Fitzhardinge  * xadd() is locked when multiple CPUs are online
238433b3520SJeremy Fitzhardinge  */
23931a8394eSJeremy Fitzhardinge #define __xadd(ptr, inc, lock)	__xchg_op((ptr), (inc), xadd, lock)
240433b3520SJeremy Fitzhardinge #define xadd(ptr, inc)		__xadd((ptr), (inc), LOCK_PREFIX)
2413d94ae0cSJeremy Fitzhardinge 
242e9826380SJeremy Fitzhardinge #endif	/* ASM_X86_CMPXCHG_H */
243