xref: /openbmc/linux/arch/mips/include/asm/cmpxchg.h (revision 3dc4b6fb)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10 
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15 
16 /*
17  * Using a branch-likely instruction to check the result of an sc instruction
18  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19  * cause ll-sc sequences to execute non-atomically.
20  */
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26 
27 /*
28  * These functions doesn't exist, so if they are called you'll either:
29  *
30  * - Get an error at compile-time due to __compiletime_error, if supported by
31  *   your compiler.
32  *
33  * or:
34  *
35  * - Get an error at link-time due to the call to the missing function.
36  */
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 	__compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __cmpxchg64_unsupported(void)
40 	__compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
41 extern unsigned long __xchg_called_with_bad_pointer(void)
42 	__compiletime_error("Bad argument size for xchg");
43 
44 #define __xchg_asm(ld, st, m, val)					\
45 ({									\
46 	__typeof(*(m)) __ret;						\
47 									\
48 	if (kernel_uses_llsc) {						\
49 		loongson_llsc_mb();					\
50 		__asm__ __volatile__(					\
51 		"	.set	push				\n"	\
52 		"	.set	noat				\n"	\
53 		"	.set	push				\n"	\
54 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
55 		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
56 		"	.set	pop				\n"	\
57 		"	move	$1, %z3				\n"	\
58 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
59 		"	" st "	$1, %1				\n"	\
60 		"\t" __scbeqz "	$1, 1b				\n"	\
61 		"	.set	pop				\n"	\
62 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
63 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
64 		: __LLSC_CLOBBER);					\
65 	} else {							\
66 		unsigned long __flags;					\
67 									\
68 		raw_local_irq_save(__flags);				\
69 		__ret = *m;						\
70 		*m = val;						\
71 		raw_local_irq_restore(__flags);				\
72 	}								\
73 									\
74 	__ret;								\
75 })
76 
77 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
78 				  unsigned int size);
79 
80 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
81 				   int size)
82 {
83 	switch (size) {
84 	case 1:
85 	case 2:
86 		return __xchg_small(ptr, x, size);
87 
88 	case 4:
89 		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
90 
91 	case 8:
92 		if (!IS_ENABLED(CONFIG_64BIT))
93 			return __xchg_called_with_bad_pointer();
94 
95 		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
96 
97 	default:
98 		return __xchg_called_with_bad_pointer();
99 	}
100 }
101 
102 #define xchg(ptr, x)							\
103 ({									\
104 	__typeof__(*(ptr)) __res;					\
105 									\
106 	smp_mb__before_llsc();						\
107 									\
108 	__res = (__typeof__(*(ptr)))					\
109 		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
110 									\
111 	smp_llsc_mb();							\
112 									\
113 	__res;								\
114 })
115 
116 #define __cmpxchg_asm(ld, st, m, old, new)				\
117 ({									\
118 	__typeof(*(m)) __ret;						\
119 									\
120 	if (kernel_uses_llsc) {						\
121 		loongson_llsc_mb();					\
122 		__asm__ __volatile__(					\
123 		"	.set	push				\n"	\
124 		"	.set	noat				\n"	\
125 		"	.set	push				\n"	\
126 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
127 		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
128 		"	bne	%0, %z3, 2f			\n"	\
129 		"	.set	pop				\n"	\
130 		"	move	$1, %z4				\n"	\
131 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
132 		"	" st "	$1, %1				\n"	\
133 		"\t" __scbeqz "	$1, 1b				\n"	\
134 		"	.set	pop				\n"	\
135 		"2:						\n"	\
136 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
137 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)	\
138 		: __LLSC_CLOBBER);					\
139 		loongson_llsc_mb();					\
140 	} else {							\
141 		unsigned long __flags;					\
142 									\
143 		raw_local_irq_save(__flags);				\
144 		__ret = *m;						\
145 		if (__ret == old)					\
146 			*m = new;					\
147 		raw_local_irq_restore(__flags);				\
148 	}								\
149 									\
150 	__ret;								\
151 })
152 
153 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
154 				     unsigned long new, unsigned int size);
155 
156 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
157 				      unsigned long new, unsigned int size)
158 {
159 	switch (size) {
160 	case 1:
161 	case 2:
162 		return __cmpxchg_small(ptr, old, new, size);
163 
164 	case 4:
165 		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
166 				     (u32)old, new);
167 
168 	case 8:
169 		/* lld/scd are only available for MIPS64 */
170 		if (!IS_ENABLED(CONFIG_64BIT))
171 			return __cmpxchg_called_with_bad_pointer();
172 
173 		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
174 				     (u64)old, new);
175 
176 	default:
177 		return __cmpxchg_called_with_bad_pointer();
178 	}
179 }
180 
181 #define cmpxchg_local(ptr, old, new)					\
182 	((__typeof__(*(ptr)))						\
183 		__cmpxchg((ptr),					\
184 			  (unsigned long)(__typeof__(*(ptr)))(old),	\
185 			  (unsigned long)(__typeof__(*(ptr)))(new),	\
186 			  sizeof(*(ptr))))
187 
188 #define cmpxchg(ptr, old, new)						\
189 ({									\
190 	__typeof__(*(ptr)) __res;					\
191 									\
192 	smp_mb__before_llsc();						\
193 	__res = cmpxchg_local((ptr), (old), (new));			\
194 	smp_llsc_mb();							\
195 									\
196 	__res;								\
197 })
198 
199 #ifdef CONFIG_64BIT
200 #define cmpxchg64_local(ptr, o, n)					\
201   ({									\
202 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
203 	cmpxchg_local((ptr), (o), (n));					\
204   })
205 
206 #define cmpxchg64(ptr, o, n)						\
207   ({									\
208 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
209 	cmpxchg((ptr), (o), (n));					\
210   })
211 #else
212 
213 # include <asm-generic/cmpxchg-local.h>
214 # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
215 
216 # ifdef CONFIG_SMP
217 
218 static inline unsigned long __cmpxchg64(volatile void *ptr,
219 					unsigned long long old,
220 					unsigned long long new)
221 {
222 	unsigned long long tmp, ret;
223 	unsigned long flags;
224 
225 	/*
226 	 * The assembly below has to combine 32 bit values into a 64 bit
227 	 * register, and split 64 bit values from one register into two. If we
228 	 * were to take an interrupt in the middle of this we'd only save the
229 	 * least significant 32 bits of each register & probably clobber the
230 	 * most significant 32 bits of the 64 bit values we're using. In order
231 	 * to avoid this we must disable interrupts.
232 	 */
233 	local_irq_save(flags);
234 
235 	loongson_llsc_mb();
236 	asm volatile(
237 	"	.set	push				\n"
238 	"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"
239 	/* Load 64 bits from ptr */
240 	"1:	lld	%L0, %3		# __cmpxchg64	\n"
241 	/*
242 	 * Split the 64 bit value we loaded into the 2 registers that hold the
243 	 * ret variable.
244 	 */
245 	"	dsra	%M0, %L0, 32			\n"
246 	"	sll	%L0, %L0, 0			\n"
247 	/*
248 	 * Compare ret against old, breaking out of the loop if they don't
249 	 * match.
250 	 */
251 	"	bne	%M0, %M4, 2f			\n"
252 	"	bne	%L0, %L4, 2f			\n"
253 	/*
254 	 * Combine the 32 bit halves from the 2 registers that hold the new
255 	 * variable into a single 64 bit register.
256 	 */
257 #  if MIPS_ISA_REV >= 2
258 	"	move	%L1, %L5			\n"
259 	"	dins	%L1, %M5, 32, 32		\n"
260 #  else
261 	"	dsll	%L1, %L5, 32			\n"
262 	"	dsrl	%L1, %L1, 32			\n"
263 	"	.set	noat				\n"
264 	"	dsll	$at, %M5, 32			\n"
265 	"	or	%L1, %L1, $at			\n"
266 	"	.set	at				\n"
267 #  endif
268 	/* Attempt to store new at ptr */
269 	"	scd	%L1, %2				\n"
270 	/* If we failed, loop! */
271 	"\t" __scbeqz "	%L1, 1b				\n"
272 	"	.set	pop				\n"
273 	"2:						\n"
274 	: "=&r"(ret),
275 	  "=&r"(tmp),
276 	  "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
277 	: GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
278 	  "r" (old),
279 	  "r" (new)
280 	: "memory");
281 	loongson_llsc_mb();
282 
283 	local_irq_restore(flags);
284 	return ret;
285 }
286 
287 #  define cmpxchg64(ptr, o, n) ({					\
288 	unsigned long long __old = (__typeof__(*(ptr)))(o);		\
289 	unsigned long long __new = (__typeof__(*(ptr)))(n);		\
290 	__typeof__(*(ptr)) __res;					\
291 									\
292 	/*								\
293 	 * We can only use cmpxchg64 if we know that the CPU supports	\
294 	 * 64-bits, ie. lld & scd. Our call to __cmpxchg64_unsupported	\
295 	 * will cause a build error unless cpu_has_64bits is a		\
296 	 * compile-time constant 1.					\
297 	 */								\
298 	if (cpu_has_64bits && kernel_uses_llsc) {			\
299 		smp_mb__before_llsc();					\
300 		__res = __cmpxchg64((ptr), __old, __new);		\
301 		smp_llsc_mb();						\
302 	} else {							\
303 		__res = __cmpxchg64_unsupported();			\
304 	}								\
305 									\
306 	__res;								\
307 })
308 
309 # else /* !CONFIG_SMP */
310 #  define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
311 # endif /* !CONFIG_SMP */
312 #endif /* !CONFIG_64BIT */
313 
314 #undef __scbeqz
315 
316 #endif /* __ASM_CMPXCHG_H */
317