xref: /openbmc/linux/arch/x86/include/asm/cmpxchg_64.h (revision bb8985586b7a906e116db835c64773b7a7d51663)
1*bb898558SAl Viro #ifndef ASM_X86__CMPXCHG_64_H
2*bb898558SAl Viro #define ASM_X86__CMPXCHG_64_H
3*bb898558SAl Viro 
4*bb898558SAl Viro #include <asm/alternative.h> /* Provides LOCK_PREFIX */
5*bb898558SAl Viro 
6*bb898558SAl Viro #define xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), \
7*bb898558SAl Viro 						 (ptr), sizeof(*(ptr))))
8*bb898558SAl Viro 
9*bb898558SAl Viro #define __xg(x) ((volatile long *)(x))
10*bb898558SAl Viro 
11*bb898558SAl Viro static inline void set_64bit(volatile unsigned long *ptr, unsigned long val)
12*bb898558SAl Viro {
13*bb898558SAl Viro 	*ptr = val;
14*bb898558SAl Viro }
15*bb898558SAl Viro 
16*bb898558SAl Viro #define _set_64bit set_64bit
17*bb898558SAl Viro 
18*bb898558SAl Viro /*
19*bb898558SAl Viro  * Note: no "lock" prefix even on SMP: xchg always implies lock anyway
20*bb898558SAl Viro  * Note 2: xchg has side effect, so that attribute volatile is necessary,
21*bb898558SAl Viro  *	  but generally the primitive is invalid, *ptr is output argument. --ANK
22*bb898558SAl Viro  */
23*bb898558SAl Viro static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
24*bb898558SAl Viro 				   int size)
25*bb898558SAl Viro {
26*bb898558SAl Viro 	switch (size) {
27*bb898558SAl Viro 	case 1:
28*bb898558SAl Viro 		asm volatile("xchgb %b0,%1"
29*bb898558SAl Viro 			     : "=q" (x)
30*bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
31*bb898558SAl Viro 			     : "memory");
32*bb898558SAl Viro 		break;
33*bb898558SAl Viro 	case 2:
34*bb898558SAl Viro 		asm volatile("xchgw %w0,%1"
35*bb898558SAl Viro 			     : "=r" (x)
36*bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
37*bb898558SAl Viro 			     : "memory");
38*bb898558SAl Viro 		break;
39*bb898558SAl Viro 	case 4:
40*bb898558SAl Viro 		asm volatile("xchgl %k0,%1"
41*bb898558SAl Viro 			     : "=r" (x)
42*bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
43*bb898558SAl Viro 			     : "memory");
44*bb898558SAl Viro 		break;
45*bb898558SAl Viro 	case 8:
46*bb898558SAl Viro 		asm volatile("xchgq %0,%1"
47*bb898558SAl Viro 			     : "=r" (x)
48*bb898558SAl Viro 			     : "m" (*__xg(ptr)), "0" (x)
49*bb898558SAl Viro 			     : "memory");
50*bb898558SAl Viro 		break;
51*bb898558SAl Viro 	}
52*bb898558SAl Viro 	return x;
53*bb898558SAl Viro }
54*bb898558SAl Viro 
55*bb898558SAl Viro /*
56*bb898558SAl Viro  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
57*bb898558SAl Viro  * store NEW in MEM.  Return the initial value in MEM.  Success is
58*bb898558SAl Viro  * indicated by comparing RETURN with OLD.
59*bb898558SAl Viro  */
60*bb898558SAl Viro 
61*bb898558SAl Viro #define __HAVE_ARCH_CMPXCHG 1
62*bb898558SAl Viro 
63*bb898558SAl Viro static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
64*bb898558SAl Viro 				      unsigned long new, int size)
65*bb898558SAl Viro {
66*bb898558SAl Viro 	unsigned long prev;
67*bb898558SAl Viro 	switch (size) {
68*bb898558SAl Viro 	case 1:
69*bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgb %b1,%2"
70*bb898558SAl Viro 			     : "=a"(prev)
71*bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
72*bb898558SAl Viro 			     : "memory");
73*bb898558SAl Viro 		return prev;
74*bb898558SAl Viro 	case 2:
75*bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgw %w1,%2"
76*bb898558SAl Viro 			     : "=a"(prev)
77*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
78*bb898558SAl Viro 			     : "memory");
79*bb898558SAl Viro 		return prev;
80*bb898558SAl Viro 	case 4:
81*bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgl %k1,%2"
82*bb898558SAl Viro 			     : "=a"(prev)
83*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
84*bb898558SAl Viro 			     : "memory");
85*bb898558SAl Viro 		return prev;
86*bb898558SAl Viro 	case 8:
87*bb898558SAl Viro 		asm volatile(LOCK_PREFIX "cmpxchgq %1,%2"
88*bb898558SAl Viro 			     : "=a"(prev)
89*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
90*bb898558SAl Viro 			     : "memory");
91*bb898558SAl Viro 		return prev;
92*bb898558SAl Viro 	}
93*bb898558SAl Viro 	return old;
94*bb898558SAl Viro }
95*bb898558SAl Viro 
96*bb898558SAl Viro /*
97*bb898558SAl Viro  * Always use locked operations when touching memory shared with a
98*bb898558SAl Viro  * hypervisor, since the system may be SMP even if the guest kernel
99*bb898558SAl Viro  * isn't.
100*bb898558SAl Viro  */
101*bb898558SAl Viro static inline unsigned long __sync_cmpxchg(volatile void *ptr,
102*bb898558SAl Viro 					   unsigned long old,
103*bb898558SAl Viro 					   unsigned long new, int size)
104*bb898558SAl Viro {
105*bb898558SAl Viro 	unsigned long prev;
106*bb898558SAl Viro 	switch (size) {
107*bb898558SAl Viro 	case 1:
108*bb898558SAl Viro 		asm volatile("lock; cmpxchgb %b1,%2"
109*bb898558SAl Viro 			     : "=a"(prev)
110*bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
111*bb898558SAl Viro 			     : "memory");
112*bb898558SAl Viro 		return prev;
113*bb898558SAl Viro 	case 2:
114*bb898558SAl Viro 		asm volatile("lock; cmpxchgw %w1,%2"
115*bb898558SAl Viro 			     : "=a"(prev)
116*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
117*bb898558SAl Viro 			     : "memory");
118*bb898558SAl Viro 		return prev;
119*bb898558SAl Viro 	case 4:
120*bb898558SAl Viro 		asm volatile("lock; cmpxchgl %1,%2"
121*bb898558SAl Viro 			     : "=a"(prev)
122*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
123*bb898558SAl Viro 			     : "memory");
124*bb898558SAl Viro 		return prev;
125*bb898558SAl Viro 	}
126*bb898558SAl Viro 	return old;
127*bb898558SAl Viro }
128*bb898558SAl Viro 
129*bb898558SAl Viro static inline unsigned long __cmpxchg_local(volatile void *ptr,
130*bb898558SAl Viro 					    unsigned long old,
131*bb898558SAl Viro 					    unsigned long new, int size)
132*bb898558SAl Viro {
133*bb898558SAl Viro 	unsigned long prev;
134*bb898558SAl Viro 	switch (size) {
135*bb898558SAl Viro 	case 1:
136*bb898558SAl Viro 		asm volatile("cmpxchgb %b1,%2"
137*bb898558SAl Viro 			     : "=a"(prev)
138*bb898558SAl Viro 			     : "q"(new), "m"(*__xg(ptr)), "0"(old)
139*bb898558SAl Viro 			     : "memory");
140*bb898558SAl Viro 		return prev;
141*bb898558SAl Viro 	case 2:
142*bb898558SAl Viro 		asm volatile("cmpxchgw %w1,%2"
143*bb898558SAl Viro 			     : "=a"(prev)
144*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
145*bb898558SAl Viro 			     : "memory");
146*bb898558SAl Viro 		return prev;
147*bb898558SAl Viro 	case 4:
148*bb898558SAl Viro 		asm volatile("cmpxchgl %k1,%2"
149*bb898558SAl Viro 			     : "=a"(prev)
150*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
151*bb898558SAl Viro 			     : "memory");
152*bb898558SAl Viro 		return prev;
153*bb898558SAl Viro 	case 8:
154*bb898558SAl Viro 		asm volatile("cmpxchgq %1,%2"
155*bb898558SAl Viro 			     : "=a"(prev)
156*bb898558SAl Viro 			     : "r"(new), "m"(*__xg(ptr)), "0"(old)
157*bb898558SAl Viro 			     : "memory");
158*bb898558SAl Viro 		return prev;
159*bb898558SAl Viro 	}
160*bb898558SAl Viro 	return old;
161*bb898558SAl Viro }
162*bb898558SAl Viro 
163*bb898558SAl Viro #define cmpxchg(ptr, o, n)						\
164*bb898558SAl Viro 	((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),	\
165*bb898558SAl Viro 				       (unsigned long)(n), sizeof(*(ptr))))
166*bb898558SAl Viro #define cmpxchg64(ptr, o, n)						\
167*bb898558SAl Viro ({									\
168*bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
169*bb898558SAl Viro 	cmpxchg((ptr), (o), (n));					\
170*bb898558SAl Viro })
171*bb898558SAl Viro #define cmpxchg_local(ptr, o, n)					\
172*bb898558SAl Viro 	((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),	\
173*bb898558SAl Viro 					     (unsigned long)(n),	\
174*bb898558SAl Viro 					     sizeof(*(ptr))))
175*bb898558SAl Viro #define sync_cmpxchg(ptr, o, n)						\
176*bb898558SAl Viro 	((__typeof__(*(ptr)))__sync_cmpxchg((ptr), (unsigned long)(o),	\
177*bb898558SAl Viro 					    (unsigned long)(n),		\
178*bb898558SAl Viro 					    sizeof(*(ptr))))
179*bb898558SAl Viro #define cmpxchg64_local(ptr, o, n)					\
180*bb898558SAl Viro ({									\
181*bb898558SAl Viro 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
182*bb898558SAl Viro 	cmpxchg_local((ptr), (o), (n));					\
183*bb898558SAl Viro })
184*bb898558SAl Viro 
185*bb898558SAl Viro #endif /* ASM_X86__CMPXCHG_64_H */
186