xref: /openbmc/linux/arch/mips/include/asm/cmpxchg.h (revision 31af04cd)
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
7  */
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10 
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15 
16 /*
17  * Using a branch-likely instruction to check the result of an sc instruction
18  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
19  * cause ll-sc sequences to execute non-atomically.
20  */
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26 
27 /*
28  * These functions doesn't exist, so if they are called you'll either:
29  *
30  * - Get an error at compile-time due to __compiletime_error, if supported by
31  *   your compiler.
32  *
33  * or:
34  *
35  * - Get an error at link-time due to the call to the missing function.
36  */
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 	__compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __xchg_called_with_bad_pointer(void)
40 	__compiletime_error("Bad argument size for xchg");
41 
42 #define __xchg_asm(ld, st, m, val)					\
43 ({									\
44 	__typeof(*(m)) __ret;						\
45 									\
46 	if (kernel_uses_llsc) {						\
47 		__asm__ __volatile__(					\
48 		"	.set	push				\n"	\
49 		"	.set	noat				\n"	\
50 		"	.set	push				\n"	\
51 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
52 		"1:	" ld "	%0, %2		# __xchg_asm	\n"	\
53 		"	.set	pop				\n"	\
54 		"	move	$1, %z3				\n"	\
55 		"	.set	" MIPS_ISA_ARCH_LEVEL "		\n"	\
56 		"	" st "	$1, %1				\n"	\
57 		"\t" __scbeqz "	$1, 1b				\n"	\
58 		"	.set	pop				\n"	\
59 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
60 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (val)			\
61 		: "memory");						\
62 	} else {							\
63 		unsigned long __flags;					\
64 									\
65 		raw_local_irq_save(__flags);				\
66 		__ret = *m;						\
67 		*m = val;						\
68 		raw_local_irq_restore(__flags);				\
69 	}								\
70 									\
71 	__ret;								\
72 })
73 
74 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
75 				  unsigned int size);
76 
77 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
78 				   int size)
79 {
80 	switch (size) {
81 	case 1:
82 	case 2:
83 		return __xchg_small(ptr, x, size);
84 
85 	case 4:
86 		return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
87 
88 	case 8:
89 		if (!IS_ENABLED(CONFIG_64BIT))
90 			return __xchg_called_with_bad_pointer();
91 
92 		return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
93 
94 	default:
95 		return __xchg_called_with_bad_pointer();
96 	}
97 }
98 
99 #define xchg(ptr, x)							\
100 ({									\
101 	__typeof__(*(ptr)) __res;					\
102 									\
103 	smp_mb__before_llsc();						\
104 									\
105 	__res = (__typeof__(*(ptr)))					\
106 		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
107 									\
108 	smp_llsc_mb();							\
109 									\
110 	__res;								\
111 })
112 
113 #define __cmpxchg_asm(ld, st, m, old, new)				\
114 ({									\
115 	__typeof(*(m)) __ret;						\
116 									\
117 	if (kernel_uses_llsc) {						\
118 		__asm__ __volatile__(					\
119 		"	.set	push				\n"	\
120 		"	.set	noat				\n"	\
121 		"	.set	push				\n"	\
122 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
123 		"1:	" ld "	%0, %2		# __cmpxchg_asm \n"	\
124 		"	bne	%0, %z3, 2f			\n"	\
125 		"	.set	pop				\n"	\
126 		"	move	$1, %z4				\n"	\
127 		"	.set	"MIPS_ISA_ARCH_LEVEL"		\n"	\
128 		"	" st "	$1, %1				\n"	\
129 		"\t" __scbeqz "	$1, 1b				\n"	\
130 		"	.set	pop				\n"	\
131 		"2:						\n"	\
132 		: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m)		\
133 		: GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new)		\
134 		: "memory");						\
135 	} else {							\
136 		unsigned long __flags;					\
137 									\
138 		raw_local_irq_save(__flags);				\
139 		__ret = *m;						\
140 		if (__ret == old)					\
141 			*m = new;					\
142 		raw_local_irq_restore(__flags);				\
143 	}								\
144 									\
145 	__ret;								\
146 })
147 
148 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
149 				     unsigned long new, unsigned int size);
150 
151 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
152 				      unsigned long new, unsigned int size)
153 {
154 	switch (size) {
155 	case 1:
156 	case 2:
157 		return __cmpxchg_small(ptr, old, new, size);
158 
159 	case 4:
160 		return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
161 				     (u32)old, new);
162 
163 	case 8:
164 		/* lld/scd are only available for MIPS64 */
165 		if (!IS_ENABLED(CONFIG_64BIT))
166 			return __cmpxchg_called_with_bad_pointer();
167 
168 		return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
169 				     (u64)old, new);
170 
171 	default:
172 		return __cmpxchg_called_with_bad_pointer();
173 	}
174 }
175 
176 #define cmpxchg_local(ptr, old, new)					\
177 	((__typeof__(*(ptr)))						\
178 		__cmpxchg((ptr),					\
179 			  (unsigned long)(__typeof__(*(ptr)))(old),	\
180 			  (unsigned long)(__typeof__(*(ptr)))(new),	\
181 			  sizeof(*(ptr))))
182 
183 #define cmpxchg(ptr, old, new)						\
184 ({									\
185 	__typeof__(*(ptr)) __res;					\
186 									\
187 	smp_mb__before_llsc();						\
188 	__res = cmpxchg_local((ptr), (old), (new));			\
189 	smp_llsc_mb();							\
190 									\
191 	__res;								\
192 })
193 
194 #ifdef CONFIG_64BIT
195 #define cmpxchg64_local(ptr, o, n)					\
196   ({									\
197 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
198 	cmpxchg_local((ptr), (o), (n));					\
199   })
200 
201 #define cmpxchg64(ptr, o, n)						\
202   ({									\
203 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
204 	cmpxchg((ptr), (o), (n));					\
205   })
206 #else
207 #include <asm-generic/cmpxchg-local.h>
208 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
209 #ifndef CONFIG_SMP
210 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
211 #endif
212 #endif
213 
214 #undef __scbeqz
215 
216 #endif /* __ASM_CMPXCHG_H */
217