xref: /openbmc/linux/arch/powerpc/include/asm/atomic.h (revision a06c488d)
1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
3 
4 /*
5  * PowerPC atomic operations
6  */
7 
8 #ifdef __KERNEL__
9 #include <linux/types.h>
10 #include <asm/cmpxchg.h>
11 #include <asm/barrier.h>
12 
13 #define ATOMIC_INIT(i)		{ (i) }
14 
15 static __inline__ int atomic_read(const atomic_t *v)
16 {
17 	int t;
18 
19 	__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20 
21 	return t;
22 }
23 
24 static __inline__ void atomic_set(atomic_t *v, int i)
25 {
26 	__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27 }
28 
29 #define ATOMIC_OP(op, asm_op)						\
30 static __inline__ void atomic_##op(int a, atomic_t *v)			\
31 {									\
32 	int t;								\
33 									\
34 	__asm__ __volatile__(						\
35 "1:	lwarx	%0,0,%3		# atomic_" #op "\n"			\
36 	#asm_op " %0,%2,%0\n"						\
37 	PPC405_ERR77(0,%3)						\
38 "	stwcx.	%0,0,%3 \n"						\
39 "	bne-	1b\n"							\
40 	: "=&r" (t), "+m" (v->counter)					\
41 	: "r" (a), "r" (&v->counter)					\
42 	: "cc");							\
43 }									\
44 
45 #define ATOMIC_OP_RETURN(op, asm_op)					\
46 static __inline__ int atomic_##op##_return(int a, atomic_t *v)		\
47 {									\
48 	int t;								\
49 									\
50 	__asm__ __volatile__(						\
51 	PPC_ATOMIC_ENTRY_BARRIER					\
52 "1:	lwarx	%0,0,%2		# atomic_" #op "_return\n"		\
53 	#asm_op " %0,%1,%0\n"						\
54 	PPC405_ERR77(0,%2)						\
55 "	stwcx.	%0,0,%2 \n"						\
56 "	bne-	1b\n"							\
57 	PPC_ATOMIC_EXIT_BARRIER						\
58 	: "=&r" (t)							\
59 	: "r" (a), "r" (&v->counter)					\
60 	: "cc", "memory");						\
61 									\
62 	return t;							\
63 }
64 
65 #define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
66 
67 ATOMIC_OPS(add, add)
68 ATOMIC_OPS(sub, subf)
69 
70 ATOMIC_OP(and, and)
71 ATOMIC_OP(or, or)
72 ATOMIC_OP(xor, xor)
73 
74 #undef ATOMIC_OPS
75 #undef ATOMIC_OP_RETURN
76 #undef ATOMIC_OP
77 
78 #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
79 
80 static __inline__ void atomic_inc(atomic_t *v)
81 {
82 	int t;
83 
84 	__asm__ __volatile__(
85 "1:	lwarx	%0,0,%2		# atomic_inc\n\
86 	addic	%0,%0,1\n"
87 	PPC405_ERR77(0,%2)
88 "	stwcx.	%0,0,%2 \n\
89 	bne-	1b"
90 	: "=&r" (t), "+m" (v->counter)
91 	: "r" (&v->counter)
92 	: "cc", "xer");
93 }
94 
95 static __inline__ int atomic_inc_return(atomic_t *v)
96 {
97 	int t;
98 
99 	__asm__ __volatile__(
100 	PPC_ATOMIC_ENTRY_BARRIER
101 "1:	lwarx	%0,0,%1		# atomic_inc_return\n\
102 	addic	%0,%0,1\n"
103 	PPC405_ERR77(0,%1)
104 "	stwcx.	%0,0,%1 \n\
105 	bne-	1b"
106 	PPC_ATOMIC_EXIT_BARRIER
107 	: "=&r" (t)
108 	: "r" (&v->counter)
109 	: "cc", "xer", "memory");
110 
111 	return t;
112 }
113 
114 /*
115  * atomic_inc_and_test - increment and test
116  * @v: pointer of type atomic_t
117  *
118  * Atomically increments @v by 1
119  * and returns true if the result is zero, or false for all
120  * other cases.
121  */
122 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
123 
124 static __inline__ void atomic_dec(atomic_t *v)
125 {
126 	int t;
127 
128 	__asm__ __volatile__(
129 "1:	lwarx	%0,0,%2		# atomic_dec\n\
130 	addic	%0,%0,-1\n"
131 	PPC405_ERR77(0,%2)\
132 "	stwcx.	%0,0,%2\n\
133 	bne-	1b"
134 	: "=&r" (t), "+m" (v->counter)
135 	: "r" (&v->counter)
136 	: "cc", "xer");
137 }
138 
139 static __inline__ int atomic_dec_return(atomic_t *v)
140 {
141 	int t;
142 
143 	__asm__ __volatile__(
144 	PPC_ATOMIC_ENTRY_BARRIER
145 "1:	lwarx	%0,0,%1		# atomic_dec_return\n\
146 	addic	%0,%0,-1\n"
147 	PPC405_ERR77(0,%1)
148 "	stwcx.	%0,0,%1\n\
149 	bne-	1b"
150 	PPC_ATOMIC_EXIT_BARRIER
151 	: "=&r" (t)
152 	: "r" (&v->counter)
153 	: "cc", "xer", "memory");
154 
155 	return t;
156 }
157 
158 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
159 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
160 
161 /**
162  * __atomic_add_unless - add unless the number is a given value
163  * @v: pointer of type atomic_t
164  * @a: the amount to add to v...
165  * @u: ...unless v is equal to u.
166  *
167  * Atomically adds @a to @v, so long as it was not @u.
168  * Returns the old value of @v.
169  */
170 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
171 {
172 	int t;
173 
174 	__asm__ __volatile__ (
175 	PPC_ATOMIC_ENTRY_BARRIER
176 "1:	lwarx	%0,0,%1		# __atomic_add_unless\n\
177 	cmpw	0,%0,%3 \n\
178 	beq-	2f \n\
179 	add	%0,%2,%0 \n"
180 	PPC405_ERR77(0,%2)
181 "	stwcx.	%0,0,%1 \n\
182 	bne-	1b \n"
183 	PPC_ATOMIC_EXIT_BARRIER
184 "	subf	%0,%2,%0 \n\
185 2:"
186 	: "=&r" (t)
187 	: "r" (&v->counter), "r" (a), "r" (u)
188 	: "cc", "memory");
189 
190 	return t;
191 }
192 
193 /**
194  * atomic_inc_not_zero - increment unless the number is zero
195  * @v: pointer of type atomic_t
196  *
197  * Atomically increments @v by 1, so long as @v is non-zero.
198  * Returns non-zero if @v was non-zero, and zero otherwise.
199  */
200 static __inline__ int atomic_inc_not_zero(atomic_t *v)
201 {
202 	int t1, t2;
203 
204 	__asm__ __volatile__ (
205 	PPC_ATOMIC_ENTRY_BARRIER
206 "1:	lwarx	%0,0,%2		# atomic_inc_not_zero\n\
207 	cmpwi	0,%0,0\n\
208 	beq-	2f\n\
209 	addic	%1,%0,1\n"
210 	PPC405_ERR77(0,%2)
211 "	stwcx.	%1,0,%2\n\
212 	bne-	1b\n"
213 	PPC_ATOMIC_EXIT_BARRIER
214 	"\n\
215 2:"
216 	: "=&r" (t1), "=&r" (t2)
217 	: "r" (&v->counter)
218 	: "cc", "xer", "memory");
219 
220 	return t1;
221 }
222 #define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
223 
224 #define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)
225 #define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0)
226 
227 /*
228  * Atomically test *v and decrement if it is greater than 0.
229  * The function returns the old value of *v minus 1, even if
230  * the atomic variable, v, was not decremented.
231  */
232 static __inline__ int atomic_dec_if_positive(atomic_t *v)
233 {
234 	int t;
235 
236 	__asm__ __volatile__(
237 	PPC_ATOMIC_ENTRY_BARRIER
238 "1:	lwarx	%0,0,%1		# atomic_dec_if_positive\n\
239 	cmpwi	%0,1\n\
240 	addi	%0,%0,-1\n\
241 	blt-	2f\n"
242 	PPC405_ERR77(0,%1)
243 "	stwcx.	%0,0,%1\n\
244 	bne-	1b"
245 	PPC_ATOMIC_EXIT_BARRIER
246 	"\n\
247 2:"	: "=&b" (t)
248 	: "r" (&v->counter)
249 	: "cc", "memory");
250 
251 	return t;
252 }
253 #define atomic_dec_if_positive atomic_dec_if_positive
254 
255 #ifdef __powerpc64__
256 
257 #define ATOMIC64_INIT(i)	{ (i) }
258 
259 static __inline__ long atomic64_read(const atomic64_t *v)
260 {
261 	long t;
262 
263 	__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
264 
265 	return t;
266 }
267 
268 static __inline__ void atomic64_set(atomic64_t *v, long i)
269 {
270 	__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
271 }
272 
273 #define ATOMIC64_OP(op, asm_op)						\
274 static __inline__ void atomic64_##op(long a, atomic64_t *v)		\
275 {									\
276 	long t;								\
277 									\
278 	__asm__ __volatile__(						\
279 "1:	ldarx	%0,0,%3		# atomic64_" #op "\n"			\
280 	#asm_op " %0,%2,%0\n"						\
281 "	stdcx.	%0,0,%3 \n"						\
282 "	bne-	1b\n"							\
283 	: "=&r" (t), "+m" (v->counter)					\
284 	: "r" (a), "r" (&v->counter)					\
285 	: "cc");							\
286 }
287 
288 #define ATOMIC64_OP_RETURN(op, asm_op)					\
289 static __inline__ long atomic64_##op##_return(long a, atomic64_t *v)	\
290 {									\
291 	long t;								\
292 									\
293 	__asm__ __volatile__(						\
294 	PPC_ATOMIC_ENTRY_BARRIER					\
295 "1:	ldarx	%0,0,%2		# atomic64_" #op "_return\n"		\
296 	#asm_op " %0,%1,%0\n"						\
297 "	stdcx.	%0,0,%2 \n"						\
298 "	bne-	1b\n"							\
299 	PPC_ATOMIC_EXIT_BARRIER						\
300 	: "=&r" (t)							\
301 	: "r" (a), "r" (&v->counter)					\
302 	: "cc", "memory");						\
303 									\
304 	return t;							\
305 }
306 
307 #define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
308 
309 ATOMIC64_OPS(add, add)
310 ATOMIC64_OPS(sub, subf)
311 ATOMIC64_OP(and, and)
312 ATOMIC64_OP(or, or)
313 ATOMIC64_OP(xor, xor)
314 
315 #undef ATOMIC64_OPS
316 #undef ATOMIC64_OP_RETURN
317 #undef ATOMIC64_OP
318 
319 #define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)
320 
321 static __inline__ void atomic64_inc(atomic64_t *v)
322 {
323 	long t;
324 
325 	__asm__ __volatile__(
326 "1:	ldarx	%0,0,%2		# atomic64_inc\n\
327 	addic	%0,%0,1\n\
328 	stdcx.	%0,0,%2 \n\
329 	bne-	1b"
330 	: "=&r" (t), "+m" (v->counter)
331 	: "r" (&v->counter)
332 	: "cc", "xer");
333 }
334 
335 static __inline__ long atomic64_inc_return(atomic64_t *v)
336 {
337 	long t;
338 
339 	__asm__ __volatile__(
340 	PPC_ATOMIC_ENTRY_BARRIER
341 "1:	ldarx	%0,0,%1		# atomic64_inc_return\n\
342 	addic	%0,%0,1\n\
343 	stdcx.	%0,0,%1 \n\
344 	bne-	1b"
345 	PPC_ATOMIC_EXIT_BARRIER
346 	: "=&r" (t)
347 	: "r" (&v->counter)
348 	: "cc", "xer", "memory");
349 
350 	return t;
351 }
352 
353 /*
354  * atomic64_inc_and_test - increment and test
355  * @v: pointer of type atomic64_t
356  *
357  * Atomically increments @v by 1
358  * and returns true if the result is zero, or false for all
359  * other cases.
360  */
361 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
362 
363 static __inline__ void atomic64_dec(atomic64_t *v)
364 {
365 	long t;
366 
367 	__asm__ __volatile__(
368 "1:	ldarx	%0,0,%2		# atomic64_dec\n\
369 	addic	%0,%0,-1\n\
370 	stdcx.	%0,0,%2\n\
371 	bne-	1b"
372 	: "=&r" (t), "+m" (v->counter)
373 	: "r" (&v->counter)
374 	: "cc", "xer");
375 }
376 
377 static __inline__ long atomic64_dec_return(atomic64_t *v)
378 {
379 	long t;
380 
381 	__asm__ __volatile__(
382 	PPC_ATOMIC_ENTRY_BARRIER
383 "1:	ldarx	%0,0,%1		# atomic64_dec_return\n\
384 	addic	%0,%0,-1\n\
385 	stdcx.	%0,0,%1\n\
386 	bne-	1b"
387 	PPC_ATOMIC_EXIT_BARRIER
388 	: "=&r" (t)
389 	: "r" (&v->counter)
390 	: "cc", "xer", "memory");
391 
392 	return t;
393 }
394 
395 #define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
396 #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
397 
398 /*
399  * Atomically test *v and decrement if it is greater than 0.
400  * The function returns the old value of *v minus 1.
401  */
402 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
403 {
404 	long t;
405 
406 	__asm__ __volatile__(
407 	PPC_ATOMIC_ENTRY_BARRIER
408 "1:	ldarx	%0,0,%1		# atomic64_dec_if_positive\n\
409 	addic.	%0,%0,-1\n\
410 	blt-	2f\n\
411 	stdcx.	%0,0,%1\n\
412 	bne-	1b"
413 	PPC_ATOMIC_EXIT_BARRIER
414 	"\n\
415 2:"	: "=&r" (t)
416 	: "r" (&v->counter)
417 	: "cc", "xer", "memory");
418 
419 	return t;
420 }
421 
422 #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
423 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
424 
425 /**
426  * atomic64_add_unless - add unless the number is a given value
427  * @v: pointer of type atomic64_t
428  * @a: the amount to add to v...
429  * @u: ...unless v is equal to u.
430  *
431  * Atomically adds @a to @v, so long as it was not @u.
432  * Returns the old value of @v.
433  */
434 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
435 {
436 	long t;
437 
438 	__asm__ __volatile__ (
439 	PPC_ATOMIC_ENTRY_BARRIER
440 "1:	ldarx	%0,0,%1		# __atomic_add_unless\n\
441 	cmpd	0,%0,%3 \n\
442 	beq-	2f \n\
443 	add	%0,%2,%0 \n"
444 "	stdcx.	%0,0,%1 \n\
445 	bne-	1b \n"
446 	PPC_ATOMIC_EXIT_BARRIER
447 "	subf	%0,%2,%0 \n\
448 2:"
449 	: "=&r" (t)
450 	: "r" (&v->counter), "r" (a), "r" (u)
451 	: "cc", "memory");
452 
453 	return t != u;
454 }
455 
456 /**
457  * atomic_inc64_not_zero - increment unless the number is zero
458  * @v: pointer of type atomic64_t
459  *
460  * Atomically increments @v by 1, so long as @v is non-zero.
461  * Returns non-zero if @v was non-zero, and zero otherwise.
462  */
463 static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
464 {
465 	long t1, t2;
466 
467 	__asm__ __volatile__ (
468 	PPC_ATOMIC_ENTRY_BARRIER
469 "1:	ldarx	%0,0,%2		# atomic64_inc_not_zero\n\
470 	cmpdi	0,%0,0\n\
471 	beq-	2f\n\
472 	addic	%1,%0,1\n\
473 	stdcx.	%1,0,%2\n\
474 	bne-	1b\n"
475 	PPC_ATOMIC_EXIT_BARRIER
476 	"\n\
477 2:"
478 	: "=&r" (t1), "=&r" (t2)
479 	: "r" (&v->counter)
480 	: "cc", "xer", "memory");
481 
482 	return t1;
483 }
484 
485 #endif /* __powerpc64__ */
486 
487 #endif /* __KERNEL__ */
488 #endif /* _ASM_POWERPC_ATOMIC_H_ */
489