xref: /openbmc/linux/arch/mips/include/asm/cmpxchg.h (revision b8d312aa)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10 
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15 
16 /*
17  * Using a branch-likely instruction to check the result of an sc instruction
18  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19  * cause ll-sc sequences to execute non-atomically.
20  */
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26 
27 /*
28  * These functions doesn't exist, so if they are called you'll either:
29  *
30  * - Get an error at compile-time due to __compiletime_error, if supported by
31  *   your compiler.
32  *
33  * or:
34  *
35  * - Get an error at link-time due to the call to the missing function.
36  */
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 	__compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __cmpxchg64_unsupported(void)
40 	__compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
41 extern unsigned long __xchg_called_with_bad_pointer(void)
42 	__compiletime_error("Bad argument size for xchg");
43 
44 #define __xchg_asm(ld, st, m, val)					\
45 ({									\
46 	__typeof(*(m)) __ret;						\
47 									\
48 	if (kernel_uses_llsc) {						\
49 		__asm__ __volatile__(					\
50 		"	.set	push				\n"	\
51 		"	.set	noat				\n"	\
52 		"	.set	push				\n"	\
53 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
54 		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
55 		"	.set	pop				\n"	\
56 		"	move	$1, %z3				\n"	\
57 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
58 		"	" st "	$1, %1				\n"	\
59 		"\t" __scbeqz "	$1, 1b				\n"	\
60 		"	.set	pop				\n"	\
61 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
62 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
63 		: "memory");						\
64 	} else {							\
65 		unsigned long __flags;					\
66 									\
67 		raw_local_irq_save(__flags);				\
68 		__ret = *m;						\
69 		*m = val;						\
70 		raw_local_irq_restore(__flags);				\
71 	}								\
72 									\
73 	__ret;								\
74 })
75 
76 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
77 				  unsigned int size);
78 
79 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
80 				   int size)
81 {
82 	switch (size) {
83 	case 1:
84 	case 2:
85 		return __xchg_small(ptr, x, size);
86 
87 	case 4:
88 		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
89 
90 	case 8:
91 		if (!IS_ENABLED(CONFIG_64BIT))
92 			return __xchg_called_with_bad_pointer();
93 
94 		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
95 
96 	default:
97 		return __xchg_called_with_bad_pointer();
98 	}
99 }
100 
101 #define xchg(ptr, x)							\
102 ({									\
103 	__typeof__(*(ptr)) __res;					\
104 									\
105 	smp_mb__before_llsc();						\
106 									\
107 	__res = (__typeof__(*(ptr)))					\
108 		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
109 									\
110 	smp_llsc_mb();							\
111 									\
112 	__res;								\
113 })
114 
115 #define __cmpxchg_asm(ld, st, m, old, new)				\
116 ({									\
117 	__typeof(*(m)) __ret;						\
118 									\
119 	if (kernel_uses_llsc) {						\
120 		__asm__ __volatile__(					\
121 		"	.set	push				\n"	\
122 		"	.set	noat				\n"	\
123 		"	.set	push				\n"	\
124 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
125 		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
126 		"	bne	%0, %z3, 2f			\n"	\
127 		"	.set	pop				\n"	\
128 		"	move	$1, %z4				\n"	\
129 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
130 		"	" st "	$1, %1				\n"	\
131 		"\t" __scbeqz "	$1, 1b				\n"	\
132 		"	.set	pop				\n"	\
133 		"2:						\n"	\
134 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
135 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)		\
136 		: "memory");						\
137 	} else {							\
138 		unsigned long __flags;					\
139 									\
140 		raw_local_irq_save(__flags);				\
141 		__ret = *m;						\
142 		if (__ret == old)					\
143 			*m = new;					\
144 		raw_local_irq_restore(__flags);				\
145 	}								\
146 									\
147 	__ret;								\
148 })
149 
150 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
151 				     unsigned long new, unsigned int size);
152 
153 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
154 				      unsigned long new, unsigned int size)
155 {
156 	switch (size) {
157 	case 1:
158 	case 2:
159 		return __cmpxchg_small(ptr, old, new, size);
160 
161 	case 4:
162 		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
163 				     (u32)old, new);
164 
165 	case 8:
166 		/* lld/scd are only available for MIPS64 */
167 		if (!IS_ENABLED(CONFIG_64BIT))
168 			return __cmpxchg_called_with_bad_pointer();
169 
170 		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
171 				     (u64)old, new);
172 
173 	default:
174 		return __cmpxchg_called_with_bad_pointer();
175 	}
176 }
177 
178 #define cmpxchg_local(ptr, old, new)					\
179 	((__typeof__(*(ptr)))						\
180 		__cmpxchg((ptr),					\
181 			  (unsigned long)(__typeof__(*(ptr)))(old),	\
182 			  (unsigned long)(__typeof__(*(ptr)))(new),	\
183 			  sizeof(*(ptr))))
184 
185 #define cmpxchg(ptr, old, new)						\
186 ({									\
187 	__typeof__(*(ptr)) __res;					\
188 									\
189 	smp_mb__before_llsc();						\
190 	__res = cmpxchg_local((ptr), (old), (new));			\
191 	smp_llsc_mb();							\
192 									\
193 	__res;								\
194 })
195 
196 #ifdef CONFIG_64BIT
197 #define cmpxchg64_local(ptr, o, n)					\
198   ({									\
199 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
200 	cmpxchg_local((ptr), (o), (n));					\
201   })
202 
203 #define cmpxchg64(ptr, o, n)						\
204   ({									\
205 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
206 	cmpxchg((ptr), (o), (n));					\
207   })
208 #else
209 
210 # include <asm-generic/cmpxchg-local.h>
211 # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
212 
213 # ifdef CONFIG_SMP
214 
215 static inline unsigned long __cmpxchg64(volatile void *ptr,
216 					unsigned long long old,
217 					unsigned long long new)
218 {
219 	unsigned long long tmp, ret;
220 	unsigned long flags;
221 
222 	/*
223 	 * The assembly below has to combine 32 bit values into a 64 bit
224 	 * register, and split 64 bit values from one register into two. If we
225 	 * were to take an interrupt in the middle of this we'd only save the
226 	 * least significant 32 bits of each register & probably clobber the
227 	 * most significant 32 bits of the 64 bit values we're using. In order
228 	 * to avoid this we must disable interrupts.
229 	 */
230 	local_irq_save(flags);
231 
232 	asm volatile(
233 	"	.set	push				\n"
234 	"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"
235 	/* Load 64 bits from ptr */
236 	"1:	lld	%L0, %3		# __cmpxchg64	\n"
237 	/*
238 	 * Split the 64 bit value we loaded into the 2 registers that hold the
239 	 * ret variable.
240 	 */
241 	"	dsra	%M0, %L0, 32			\n"
242 	"	sll	%L0, %L0, 0			\n"
243 	/*
244 	 * Compare ret against old, breaking out of the loop if they don't
245 	 * match.
246 	 */
247 	"	bne	%M0, %M4, 2f			\n"
248 	"	bne	%L0, %L4, 2f			\n"
249 	/*
250 	 * Combine the 32 bit halves from the 2 registers that hold the new
251 	 * variable into a single 64 bit register.
252 	 */
253 #  if MIPS_ISA_REV >= 2
254 	"	move	%L1, %L5			\n"
255 	"	dins	%L1, %M5, 32, 32		\n"
256 #  else
257 	"	dsll	%L1, %L5, 32			\n"
258 	"	dsrl	%L1, %L1, 32			\n"
259 	"	.set	noat				\n"
260 	"	dsll	$at, %M5, 32			\n"
261 	"	or	%L1, %L1, $at			\n"
262 	"	.set	at				\n"
263 #  endif
264 	/* Attempt to store new at ptr */
265 	"	scd	%L1, %2				\n"
266 	/* If we failed, loop! */
267 	"\t" __scbeqz "	%L1, 1b				\n"
268 	"	.set	pop				\n"
269 	"2:						\n"
270 	: "=&r"(ret),
271 	  "=&r"(tmp),
272 	  "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
273 	: GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
274 	  "r" (old),
275 	  "r" (new)
276 	: "memory");
277 
278 	local_irq_restore(flags);
279 	return ret;
280 }
281 
282 #  define cmpxchg64(ptr, o, n) ({					\
283 	unsigned long long __old = (__typeof__(*(ptr)))(o);		\
284 	unsigned long long __new = (__typeof__(*(ptr)))(n);		\
285 	__typeof__(*(ptr)) __res;					\
286 									\
287 	/*								\
288 	 * We can only use cmpxchg64 if we know that the CPU supports	\
289 	 * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported	\
290 	 * will cause a build error unless cpu_has_64bits is a		\
291 	 * compile-time constant 1.					\
292 	 */								\
293 	if (cpu_has_64bits && kernel_uses_llsc)				\
294 		__res = __cmpxchg64((ptr), __old, __new);		\
295 	else								\
296 		__res = __cmpxchg64_unsupported();			\
297 									\
298 	__res;								\
299 })
300 
301 # else /* !CONFIG_SMP */
302 #  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
303 # endif /* !CONFIG_SMP */
304 #endif /* !CONFIG_64BIT */
305 
306 #undef __scbeqz
307 
308 #endif /* __ASM_CMPXCHG_H */
309