xref: /openbmc/linux/arch/x86/include/asm/cmpxchg_64.h (revision 5bef0035)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
21965aae3SH. Peter Anvin #ifndef _ASM_X86_CMPXCHG_64_H
31965aae3SH. Peter Anvin #define _ASM_X86_CMPXCHG_64_H
4bb898558SAl Viro 
58bf705d1SDmitry Vyukov #define arch_cmpxchg64(ptr, o, n)					\
6bb898558SAl Viro ({									\
7bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
800d5551cSMark Rutland 	arch_cmpxchg((ptr), (o), (n));					\
9bb898558SAl Viro })
10f3834b9eSPeter Zijlstra 
118bf705d1SDmitry Vyukov #define arch_cmpxchg64_local(ptr, o, n)					\
12bb898558SAl Viro ({									\
13bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
1400d5551cSMark Rutland 	arch_cmpxchg_local((ptr), (o), (n));				\
15bb898558SAl Viro })
16bb898558SAl Viro 
17c2df0a6aSUros Bizjak #define arch_try_cmpxchg64(ptr, po, n)					\
18c2df0a6aSUros Bizjak ({									\
19c2df0a6aSUros Bizjak 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
20c2df0a6aSUros Bizjak 	arch_try_cmpxchg((ptr), (po), (n));				\
21c2df0a6aSUros Bizjak })
22c2df0a6aSUros Bizjak 
23b23e139dSPeter Zijlstra union __u128_halves {
24b23e139dSPeter Zijlstra 	u128 full;
25b23e139dSPeter Zijlstra 	struct {
26b23e139dSPeter Zijlstra 		u64 low, high;
27b23e139dSPeter Zijlstra 	};
28b23e139dSPeter Zijlstra };
29b23e139dSPeter Zijlstra 
30b23e139dSPeter Zijlstra #define __arch_cmpxchg128(_ptr, _old, _new, _lock)			\
31b23e139dSPeter Zijlstra ({									\
32b23e139dSPeter Zijlstra 	union __u128_halves o = { .full = (_old), },			\
33b23e139dSPeter Zijlstra 			    n = { .full = (_new), };			\
34b23e139dSPeter Zijlstra 									\
35b23e139dSPeter Zijlstra 	asm volatile(_lock "cmpxchg16b %[ptr]"				\
36b23e139dSPeter Zijlstra 		     : [ptr] "+m" (*(_ptr)),				\
37b23e139dSPeter Zijlstra 		       "+a" (o.low), "+d" (o.high)			\
38b23e139dSPeter Zijlstra 		     : "b" (n.low), "c" (n.high)			\
39b23e139dSPeter Zijlstra 		     : "memory");					\
40b23e139dSPeter Zijlstra 									\
41b23e139dSPeter Zijlstra 	o.full;								\
42b23e139dSPeter Zijlstra })
43b23e139dSPeter Zijlstra 
arch_cmpxchg128(volatile u128 * ptr,u128 old,u128 new)44b23e139dSPeter Zijlstra static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new)
45b23e139dSPeter Zijlstra {
46b23e139dSPeter Zijlstra 	return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX);
47b23e139dSPeter Zijlstra }
48*5bef0035SMark Rutland #define arch_cmpxchg128 arch_cmpxchg128
49b23e139dSPeter Zijlstra 
arch_cmpxchg128_local(volatile u128 * ptr,u128 old,u128 new)50b23e139dSPeter Zijlstra static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new)
51b23e139dSPeter Zijlstra {
52b23e139dSPeter Zijlstra 	return __arch_cmpxchg128(ptr, old, new,);
53b23e139dSPeter Zijlstra }
54*5bef0035SMark Rutland #define arch_cmpxchg128_local arch_cmpxchg128_local
55b23e139dSPeter Zijlstra 
56b23e139dSPeter Zijlstra #define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock)			\
57b23e139dSPeter Zijlstra ({									\
58b23e139dSPeter Zijlstra 	union __u128_halves o = { .full = *(_oldp), },			\
59b23e139dSPeter Zijlstra 			    n = { .full = (_new), };			\
60b23e139dSPeter Zijlstra 	bool ret;							\
61b23e139dSPeter Zijlstra 									\
62b23e139dSPeter Zijlstra 	asm volatile(_lock "cmpxchg16b %[ptr]"				\
63b23e139dSPeter Zijlstra 		     CC_SET(e)						\
64b23e139dSPeter Zijlstra 		     : CC_OUT(e) (ret),					\
65b23e139dSPeter Zijlstra 		       [ptr] "+m" (*ptr),				\
66b23e139dSPeter Zijlstra 		       "+a" (o.low), "+d" (o.high)			\
67b23e139dSPeter Zijlstra 		     : "b" (n.low), "c" (n.high)			\
68b23e139dSPeter Zijlstra 		     : "memory");					\
69b23e139dSPeter Zijlstra 									\
70b23e139dSPeter Zijlstra 	if (unlikely(!ret))						\
71b23e139dSPeter Zijlstra 		*(_oldp) = o.full;					\
72b23e139dSPeter Zijlstra 									\
73b23e139dSPeter Zijlstra 	likely(ret);							\
74b23e139dSPeter Zijlstra })
75b23e139dSPeter Zijlstra 
arch_try_cmpxchg128(volatile u128 * ptr,u128 * oldp,u128 new)76b23e139dSPeter Zijlstra static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new)
77b23e139dSPeter Zijlstra {
78b23e139dSPeter Zijlstra 	return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX);
79b23e139dSPeter Zijlstra }
80*5bef0035SMark Rutland #define arch_try_cmpxchg128 arch_try_cmpxchg128
81b23e139dSPeter Zijlstra 
arch_try_cmpxchg128_local(volatile u128 * ptr,u128 * oldp,u128 new)82b23e139dSPeter Zijlstra static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new)
83b23e139dSPeter Zijlstra {
84b23e139dSPeter Zijlstra 	return __arch_try_cmpxchg128(ptr, oldp, new,);
85b23e139dSPeter Zijlstra }
86*5bef0035SMark Rutland #define arch_try_cmpxchg128_local arch_try_cmpxchg128_local
87b23e139dSPeter Zijlstra 
88b23e139dSPeter Zijlstra #define system_has_cmpxchg128()		boot_cpu_has(X86_FEATURE_CX16)
893824abd1SChristoph Lameter 
901965aae3SH. Peter Anvin #endif /* _ASM_X86_CMPXCHG_64_H */
91