xref: /openbmc/linux/arch/x86/include/asm/cmpxchg_64.h (revision 1965aae3c98397aad957412413c07e97b1bd4e64)
1*1965aae3SH. Peter Anvin #ifndef _ASM_X86_CMPXCHG_64_H
2*1965aae3SH. Peter Anvin #define _ASM_X86_CMPXCHG_64_H
3bb898558SAl Viro 
4bb898558SAl Viro #include <asm/alternative.h> /* Provides LOCK_PREFIX */
5bb898558SAl Viro 
6bb898558SAl Viro #define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
7bb898558SAl Viro 						 (ptr), sizeof(*(ptr))))
8bb898558SAl Viro 
9bb898558SAl Viro #define __xg(x) ((volatile long *)(x))
10bb898558SAl Viro 
11bb898558SAl Viro static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
12bb898558SAl Viro {
13bb898558SAl Viro 	*ptr = val;
14bb898558SAl Viro }
15bb898558SAl Viro 
16bb898558SAl Viro #define _set_64bit set_64bit
17bb898558SAl Viro 
18bb898558SAl Viro /*
19bb898558SAl Viro  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20bb898558SAl Viro  * Note 2: xchg has side effect, so that attribute volatile is necessary,
21bb898558SAl Viro  *	  but generally the primitive is invalid, *ptr is output argument. --ANK
22bb898558SAl Viro  */
23bb898558SAl Viro static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
24bb898558SAl Viro 				   int size)
25bb898558SAl Viro {
26bb898558SAl Viro 	switch (size) {
27bb898558SAl Viro 	case 1:
28bb898558SAl Viro 		asm volatile("xchgb %b0,%1"
29bb898558SAl Viro 			     : "=q" (x)
30bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
31bb898558SAl Viro 			     : "memory");
32bb898558SAl Viro 		break;
33bb898558SAl Viro 	case 2:
34bb898558SAl Viro 		asm volatile("xchgw %w0,%1"
35bb898558SAl Viro 			     : "=r" (x)
36bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
37bb898558SAl Viro 			     : "memory");
38bb898558SAl Viro 		break;
39bb898558SAl Viro 	case 4:
40bb898558SAl Viro 		asm volatile("xchgl %k0,%1"
41bb898558SAl Viro 			     : "=r" (x)
42bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
43bb898558SAl Viro 			     : "memory");
44bb898558SAl Viro 		break;
45bb898558SAl Viro 	case 8:
46bb898558SAl Viro 		asm volatile("xchgq %0,%1"
47bb898558SAl Viro 			     : "=r" (x)
48bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
49bb898558SAl Viro 			     : "memory");
50bb898558SAl Viro 		break;
51bb898558SAl Viro 	}
52bb898558SAl Viro 	return x;
53bb898558SAl Viro }
54bb898558SAl Viro 
55bb898558SAl Viro /*
56bb898558SAl Viro  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
57bb898558SAl Viro  * store NEW in MEM.  Return the initial value in MEM.  Success is
58bb898558SAl Viro  * indicated by comparing RETURN with OLD.
59bb898558SAl Viro  */
60bb898558SAl Viro 
61bb898558SAl Viro #define __HAVE_ARCH_CMPXCHG 1
62bb898558SAl Viro 
63bb898558SAl Viro static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
64bb898558SAl Viro 				      unsigned long new, int size)
65bb898558SAl Viro {
66bb898558SAl Viro 	unsigned long prev;
67bb898558SAl Viro 	switch (size) {
68bb898558SAl Viro 	case 1:
69bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
70bb898558SAl Viro 			     : "=a"(prev)
71bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
72bb898558SAl Viro 			     : "memory");
73bb898558SAl Viro 		return prev;
74bb898558SAl Viro 	case 2:
75bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
76bb898558SAl Viro 			     : "=a"(prev)
77bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
78bb898558SAl Viro 			     : "memory");
79bb898558SAl Viro 		return prev;
80bb898558SAl Viro 	case 4:
81bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
82bb898558SAl Viro 			     : "=a"(prev)
83bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
84bb898558SAl Viro 			     : "memory");
85bb898558SAl Viro 		return prev;
86bb898558SAl Viro 	case 8:
87bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
88bb898558SAl Viro 			     : "=a"(prev)
89bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
90bb898558SAl Viro 			     : "memory");
91bb898558SAl Viro 		return prev;
92bb898558SAl Viro 	}
93bb898558SAl Viro 	return old;
94bb898558SAl Viro }
95bb898558SAl Viro 
96bb898558SAl Viro /*
97bb898558SAl Viro  * Always use locked operations when touching memory shared with a
98bb898558SAl Viro  * hypervisor, since the system may be SMP even if the guest kernel
99bb898558SAl Viro  * isn't.
100bb898558SAl Viro  */
101bb898558SAl Viro static inline unsigned long __sync_cmpxchg(volatile void *ptr,
102bb898558SAl Viro 					   unsigned long old,
103bb898558SAl Viro 					   unsigned long new, int size)
104bb898558SAl Viro {
105bb898558SAl Viro 	unsigned long prev;
106bb898558SAl Viro 	switch (size) {
107bb898558SAl Viro 	case 1:
108bb898558SAl Viro 		asm volatile("lock; cmpxchgb %b1,%2"
109bb898558SAl Viro 			     : "=a"(prev)
110bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
111bb898558SAl Viro 			     : "memory");
112bb898558SAl Viro 		return prev;
113bb898558SAl Viro 	case 2:
114bb898558SAl Viro 		asm volatile("lock; cmpxchgw %w1,%2"
115bb898558SAl Viro 			     : "=a"(prev)
116bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
117bb898558SAl Viro 			     : "memory");
118bb898558SAl Viro 		return prev;
119bb898558SAl Viro 	case 4:
120bb898558SAl Viro 		asm volatile("lock; cmpxchgl %1,%2"
121bb898558SAl Viro 			     : "=a"(prev)
122bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
123bb898558SAl Viro 			     : "memory");
124bb898558SAl Viro 		return prev;
125bb898558SAl Viro 	}
126bb898558SAl Viro 	return old;
127bb898558SAl Viro }
128bb898558SAl Viro 
129bb898558SAl Viro static inline unsigned long __cmpxchg_local(volatile void *ptr,
130bb898558SAl Viro 					    unsigned long old,
131bb898558SAl Viro 					    unsigned long new, int size)
132bb898558SAl Viro {
133bb898558SAl Viro 	unsigned long prev;
134bb898558SAl Viro 	switch (size) {
135bb898558SAl Viro 	case 1:
136bb898558SAl Viro 		asm volatile("cmpxchgb %b1,%2"
137bb898558SAl Viro 			     : "=a"(prev)
138bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
139bb898558SAl Viro 			     : "memory");
140bb898558SAl Viro 		return prev;
141bb898558SAl Viro 	case 2:
142bb898558SAl Viro 		asm volatile("cmpxchgw %w1,%2"
143bb898558SAl Viro 			     : "=a"(prev)
144bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
145bb898558SAl Viro 			     : "memory");
146bb898558SAl Viro 		return prev;
147bb898558SAl Viro 	case 4:
148bb898558SAl Viro 		asm volatile("cmpxchgl %k1,%2"
149bb898558SAl Viro 			     : "=a"(prev)
150bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
151bb898558SAl Viro 			     : "memory");
152bb898558SAl Viro 		return prev;
153bb898558SAl Viro 	case 8:
154bb898558SAl Viro 		asm volatile("cmpxchgq %1,%2"
155bb898558SAl Viro 			     : "=a"(prev)
156bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
157bb898558SAl Viro 			     : "memory");
158bb898558SAl Viro 		return prev;
159bb898558SAl Viro 	}
160bb898558SAl Viro 	return old;
161bb898558SAl Viro }
162bb898558SAl Viro 
163bb898558SAl Viro #define cmpxchg(ptr, o, n)						\
164bb898558SAl Viro 	((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	\
165bb898558SAl Viro 				       (unsigned long)(n), sizeof(*(ptr))))
166bb898558SAl Viro #define cmpxchg64(ptr, o, n)						\
167bb898558SAl Viro ({									\
168bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
169bb898558SAl Viro 	cmpxchg((ptr), (o), (n));					\
170bb898558SAl Viro })
171bb898558SAl Viro #define cmpxchg_local(ptr, o, n)					\
172bb898558SAl Viro 	((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),	\
173bb898558SAl Viro 					     (unsigned long)(n),	\
174bb898558SAl Viro 					     sizeof(*(ptr))))
175bb898558SAl Viro #define sync_cmpxchg(ptr, o, n)						\
176bb898558SAl Viro 	((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o),	\
177bb898558SAl Viro 					    (unsigned long)(n),		\
178bb898558SAl Viro 					    sizeof(*(ptr))))
179bb898558SAl Viro #define cmpxchg64_local(ptr, o, n)					\
180bb898558SAl Viro ({									\
181bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
182bb898558SAl Viro 	cmpxchg_local((ptr), (o), (n));					\
183bb898558SAl Viro })
184bb898558SAl Viro 
185*1965aae3SH. Peter Anvin #endif /* _ASM_X86_CMPXCHG_64_H */
186