xref: /openbmc/linux/arch/mips/include/asm/atomic.h (revision e3d786a3)
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16 
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/compiler.h>
21 #include <asm/cpu-features.h>
22 #include <asm/cmpxchg.h>
23 #include <asm/war.h>
24 
25 /*
26  * Using a branch-likely instruction to check the result of an sc instruction
27  * works around a bug present in R10000 CPUs prior to revision 3.0 that could
28  * cause ll-sc sequences to execute non-atomically.
29  */
30 #if R10000_LLSC_WAR
31 # define __scbeqz "beqzl"
32 #else
33 # define __scbeqz "beqz"
34 #endif
35 
36 #define ATOMIC_INIT(i)	  { (i) }
37 
38 /*
39  * atomic_read - read atomic variable
40  * @v: pointer of type atomic_t
41  *
42  * Atomically reads the value of @v.
43  */
44 #define atomic_read(v)		READ_ONCE((v)->counter)
45 
46 /*
47  * atomic_set - set atomic variable
48  * @v: pointer of type atomic_t
49  * @i: required value
50  *
51  * Atomically sets the value of @v to @i.
52  */
53 #define atomic_set(v, i)	WRITE_ONCE((v)->counter, (i))
54 
55 #define ATOMIC_OP(op, c_op, asm_op)					      \
56 static __inline__ void atomic_##op(int i, atomic_t * v)			      \
57 {									      \
58 	if (kernel_uses_llsc) {						      \
59 		int temp;						      \
60 									      \
61 		__asm__ __volatile__(					      \
62 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
63 		"1:	ll	%0, %1		# atomic_" #op "	\n"   \
64 		"	" #asm_op " %0, %2				\n"   \
65 		"	sc	%0, %1					\n"   \
66 		"\t" __scbeqz "	%0, 1b					\n"   \
67 		"	.set	mips0					\n"   \
68 		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
69 		: "Ir" (i));						      \
70 	} else {							      \
71 		unsigned long flags;					      \
72 									      \
73 		raw_local_irq_save(flags);				      \
74 		v->counter c_op i;					      \
75 		raw_local_irq_restore(flags);				      \
76 	}								      \
77 }
78 
79 #define ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
80 static __inline__ int atomic_##op##_return_relaxed(int i, atomic_t * v)	      \
81 {									      \
82 	int result;							      \
83 									      \
84 	if (kernel_uses_llsc) {						      \
85 		int temp;						      \
86 									      \
87 		__asm__ __volatile__(					      \
88 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
89 		"1:	ll	%1, %2		# atomic_" #op "_return	\n"   \
90 		"	" #asm_op " %0, %1, %3				\n"   \
91 		"	sc	%0, %2					\n"   \
92 		"\t" __scbeqz "	%0, 1b					\n"   \
93 		"	" #asm_op " %0, %1, %3				\n"   \
94 		"	.set	mips0					\n"   \
95 		: "=&r" (result), "=&r" (temp),				      \
96 		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
97 		: "Ir" (i));						      \
98 	} else {							      \
99 		unsigned long flags;					      \
100 									      \
101 		raw_local_irq_save(flags);				      \
102 		result = v->counter;					      \
103 		result c_op i;						      \
104 		v->counter = result;					      \
105 		raw_local_irq_restore(flags);				      \
106 	}								      \
107 									      \
108 	return result;							      \
109 }
110 
111 #define ATOMIC_FETCH_OP(op, c_op, asm_op)				      \
112 static __inline__ int atomic_fetch_##op##_relaxed(int i, atomic_t * v)	      \
113 {									      \
114 	int result;							      \
115 									      \
116 	if (kernel_uses_llsc) {						      \
117 		int temp;						      \
118 									      \
119 		__asm__ __volatile__(					      \
120 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
121 		"1:	ll	%1, %2		# atomic_fetch_" #op "	\n"   \
122 		"	" #asm_op " %0, %1, %3				\n"   \
123 		"	sc	%0, %2					\n"   \
124 		"\t" __scbeqz "	%0, 1b					\n"   \
125 		"	.set	mips0					\n"   \
126 		"	move	%0, %1					\n"   \
127 		: "=&r" (result), "=&r" (temp),				      \
128 		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
129 		: "Ir" (i));						      \
130 	} else {							      \
131 		unsigned long flags;					      \
132 									      \
133 		raw_local_irq_save(flags);				      \
134 		result = v->counter;					      \
135 		v->counter c_op i;					      \
136 		raw_local_irq_restore(flags);				      \
137 	}								      \
138 									      \
139 	return result;							      \
140 }
141 
142 #define ATOMIC_OPS(op, c_op, asm_op)					      \
143 	ATOMIC_OP(op, c_op, asm_op)					      \
144 	ATOMIC_OP_RETURN(op, c_op, asm_op)				      \
145 	ATOMIC_FETCH_OP(op, c_op, asm_op)
146 
147 ATOMIC_OPS(add, +=, addu)
148 ATOMIC_OPS(sub, -=, subu)
149 
150 #define atomic_add_return_relaxed	atomic_add_return_relaxed
151 #define atomic_sub_return_relaxed	atomic_sub_return_relaxed
152 #define atomic_fetch_add_relaxed	atomic_fetch_add_relaxed
153 #define atomic_fetch_sub_relaxed	atomic_fetch_sub_relaxed
154 
155 #undef ATOMIC_OPS
156 #define ATOMIC_OPS(op, c_op, asm_op)					      \
157 	ATOMIC_OP(op, c_op, asm_op)					      \
158 	ATOMIC_FETCH_OP(op, c_op, asm_op)
159 
160 ATOMIC_OPS(and, &=, and)
161 ATOMIC_OPS(or, |=, or)
162 ATOMIC_OPS(xor, ^=, xor)
163 
164 #define atomic_fetch_and_relaxed	atomic_fetch_and_relaxed
165 #define atomic_fetch_or_relaxed		atomic_fetch_or_relaxed
166 #define atomic_fetch_xor_relaxed	atomic_fetch_xor_relaxed
167 
168 #undef ATOMIC_OPS
169 #undef ATOMIC_FETCH_OP
170 #undef ATOMIC_OP_RETURN
171 #undef ATOMIC_OP
172 
173 /*
174  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
175  * @i: integer value to subtract
176  * @v: pointer of type atomic_t
177  *
178  * Atomically test @v and subtract @i if @v is greater or equal than @i.
179  * The function returns the old value of @v minus @i.
180  */
181 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
182 {
183 	int result;
184 
185 	smp_mb__before_llsc();
186 
187 	if (kernel_uses_llsc) {
188 		int temp;
189 
190 		__asm__ __volatile__(
191 		"	.set	"MIPS_ISA_LEVEL"			\n"
192 		"1:	ll	%1, %2		# atomic_sub_if_positive\n"
193 		"	.set	mips0					\n"
194 		"	subu	%0, %1, %3				\n"
195 		"	move	%1, %0					\n"
196 		"	bltz	%0, 1f					\n"
197 		"	.set	"MIPS_ISA_LEVEL"			\n"
198 		"	sc	%1, %2					\n"
199 		"\t" __scbeqz "	%1, 1b					\n"
200 		"1:							\n"
201 		"	.set	mips0					\n"
202 		: "=&r" (result), "=&r" (temp),
203 		  "+" GCC_OFF_SMALL_ASM() (v->counter)
204 		: "Ir" (i));
205 	} else {
206 		unsigned long flags;
207 
208 		raw_local_irq_save(flags);
209 		result = v->counter;
210 		result -= i;
211 		if (result >= 0)
212 			v->counter = result;
213 		raw_local_irq_restore(flags);
214 	}
215 
216 	smp_llsc_mb();
217 
218 	return result;
219 }
220 
221 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
222 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
223 
224 /*
225  * atomic_dec_if_positive - decrement by 1 if old value positive
226  * @v: pointer of type atomic_t
227  */
228 #define atomic_dec_if_positive(v)	atomic_sub_if_positive(1, v)
229 
230 #ifdef CONFIG_64BIT
231 
232 #define ATOMIC64_INIT(i)    { (i) }
233 
234 /*
235  * atomic64_read - read atomic variable
236  * @v: pointer of type atomic64_t
237  *
238  */
239 #define atomic64_read(v)	READ_ONCE((v)->counter)
240 
241 /*
242  * atomic64_set - set atomic variable
243  * @v: pointer of type atomic64_t
244  * @i: required value
245  */
246 #define atomic64_set(v, i)	WRITE_ONCE((v)->counter, (i))
247 
248 #define ATOMIC64_OP(op, c_op, asm_op)					      \
249 static __inline__ void atomic64_##op(long i, atomic64_t * v)		      \
250 {									      \
251 	if (kernel_uses_llsc) {						      \
252 		long temp;						      \
253 									      \
254 		__asm__ __volatile__(					      \
255 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
256 		"1:	lld	%0, %1		# atomic64_" #op "	\n"   \
257 		"	" #asm_op " %0, %2				\n"   \
258 		"	scd	%0, %1					\n"   \
259 		"\t" __scbeqz "	%0, 1b					\n"   \
260 		"	.set	mips0					\n"   \
261 		: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter)	      \
262 		: "Ir" (i));						      \
263 	} else {							      \
264 		unsigned long flags;					      \
265 									      \
266 		raw_local_irq_save(flags);				      \
267 		v->counter c_op i;					      \
268 		raw_local_irq_restore(flags);				      \
269 	}								      \
270 }
271 
272 #define ATOMIC64_OP_RETURN(op, c_op, asm_op)				      \
273 static __inline__ long atomic64_##op##_return_relaxed(long i, atomic64_t * v) \
274 {									      \
275 	long result;							      \
276 									      \
277 	if (kernel_uses_llsc) {						      \
278 		long temp;						      \
279 									      \
280 		__asm__ __volatile__(					      \
281 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
282 		"1:	lld	%1, %2		# atomic64_" #op "_return\n"  \
283 		"	" #asm_op " %0, %1, %3				\n"   \
284 		"	scd	%0, %2					\n"   \
285 		"\t" __scbeqz "	%0, 1b					\n"   \
286 		"	" #asm_op " %0, %1, %3				\n"   \
287 		"	.set	mips0					\n"   \
288 		: "=&r" (result), "=&r" (temp),				      \
289 		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
290 		: "Ir" (i));						      \
291 	} else {							      \
292 		unsigned long flags;					      \
293 									      \
294 		raw_local_irq_save(flags);				      \
295 		result = v->counter;					      \
296 		result c_op i;						      \
297 		v->counter = result;					      \
298 		raw_local_irq_restore(flags);				      \
299 	}								      \
300 									      \
301 	return result;							      \
302 }
303 
304 #define ATOMIC64_FETCH_OP(op, c_op, asm_op)				      \
305 static __inline__ long atomic64_fetch_##op##_relaxed(long i, atomic64_t * v)  \
306 {									      \
307 	long result;							      \
308 									      \
309 	if (kernel_uses_llsc && R10000_LLSC_WAR) {			      \
310 		long temp;						      \
311 									      \
312 		__asm__ __volatile__(					      \
313 		"	.set	"MIPS_ISA_LEVEL"			\n"   \
314 		"1:	lld	%1, %2		# atomic64_fetch_" #op "\n"   \
315 		"	" #asm_op " %0, %1, %3				\n"   \
316 		"	scd	%0, %2					\n"   \
317 		"\t" __scbeqz "	%0, 1b					\n"   \
318 		"	move	%0, %1					\n"   \
319 		"	.set	mips0					\n"   \
320 		: "=&r" (result), "=&r" (temp),				      \
321 		  "+" GCC_OFF_SMALL_ASM() (v->counter)			      \
322 		: "Ir" (i));						      \
323 	} else {							      \
324 		unsigned long flags;					      \
325 									      \
326 		raw_local_irq_save(flags);				      \
327 		result = v->counter;					      \
328 		v->counter c_op i;					      \
329 		raw_local_irq_restore(flags);				      \
330 	}								      \
331 									      \
332 	return result;							      \
333 }
334 
335 #define ATOMIC64_OPS(op, c_op, asm_op)					      \
336 	ATOMIC64_OP(op, c_op, asm_op)					      \
337 	ATOMIC64_OP_RETURN(op, c_op, asm_op)				      \
338 	ATOMIC64_FETCH_OP(op, c_op, asm_op)
339 
340 ATOMIC64_OPS(add, +=, daddu)
341 ATOMIC64_OPS(sub, -=, dsubu)
342 
343 #define atomic64_add_return_relaxed	atomic64_add_return_relaxed
344 #define atomic64_sub_return_relaxed	atomic64_sub_return_relaxed
345 #define atomic64_fetch_add_relaxed	atomic64_fetch_add_relaxed
346 #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub_relaxed
347 
348 #undef ATOMIC64_OPS
349 #define ATOMIC64_OPS(op, c_op, asm_op)					      \
350 	ATOMIC64_OP(op, c_op, asm_op)					      \
351 	ATOMIC64_FETCH_OP(op, c_op, asm_op)
352 
353 ATOMIC64_OPS(and, &=, and)
354 ATOMIC64_OPS(or, |=, or)
355 ATOMIC64_OPS(xor, ^=, xor)
356 
357 #define atomic64_fetch_and_relaxed	atomic64_fetch_and_relaxed
358 #define atomic64_fetch_or_relaxed	atomic64_fetch_or_relaxed
359 #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor_relaxed
360 
361 #undef ATOMIC64_OPS
362 #undef ATOMIC64_FETCH_OP
363 #undef ATOMIC64_OP_RETURN
364 #undef ATOMIC64_OP
365 
366 /*
367  * atomic64_sub_if_positive - conditionally subtract integer from atomic
368  *                            variable
369  * @i: integer value to subtract
370  * @v: pointer of type atomic64_t
371  *
372  * Atomically test @v and subtract @i if @v is greater or equal than @i.
373  * The function returns the old value of @v minus @i.
374  */
375 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
376 {
377 	long result;
378 
379 	smp_mb__before_llsc();
380 
381 	if (kernel_uses_llsc) {
382 		long temp;
383 
384 		__asm__ __volatile__(
385 		"	.set	"MIPS_ISA_LEVEL"			\n"
386 		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
387 		"	dsubu	%0, %1, %3				\n"
388 		"	move	%1, %0					\n"
389 		"	bltz	%0, 1f					\n"
390 		"	scd	%1, %2					\n"
391 		"\t" __scbeqz "	%1, 1b					\n"
392 		"1:							\n"
393 		"	.set	mips0					\n"
394 		: "=&r" (result), "=&r" (temp),
395 		  "+" GCC_OFF_SMALL_ASM() (v->counter)
396 		: "Ir" (i));
397 	} else {
398 		unsigned long flags;
399 
400 		raw_local_irq_save(flags);
401 		result = v->counter;
402 		result -= i;
403 		if (result >= 0)
404 			v->counter = result;
405 		raw_local_irq_restore(flags);
406 	}
407 
408 	smp_llsc_mb();
409 
410 	return result;
411 }
412 
413 #define atomic64_cmpxchg(v, o, n) \
414 	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
415 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
416 
417 /*
418  * atomic64_dec_if_positive - decrement by 1 if old value positive
419  * @v: pointer of type atomic64_t
420  */
421 #define atomic64_dec_if_positive(v)	atomic64_sub_if_positive(1, v)
422 
423 #endif /* CONFIG_64BIT */
424 
425 #endif /* _ASM_ATOMIC_H */
426