xref: /openbmc/linux/include/asm-generic/atomic.h (revision 28aa2bda2211f4327d83b44a4f917b4a061b1c56)
13f7e212dSArnd Bergmann /*
2acac43e2SArun Sharma  * Generic C implementation of atomic counter operations. Usable on
3acac43e2SArun Sharma  * UP systems only. Do not include in machine independent code.
4acac43e2SArun Sharma  *
53f7e212dSArnd Bergmann  * Originally implemented for MN10300.
63f7e212dSArnd Bergmann  *
73f7e212dSArnd Bergmann  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
83f7e212dSArnd Bergmann  * Written by David Howells (dhowells@redhat.com)
93f7e212dSArnd Bergmann  *
103f7e212dSArnd Bergmann  * This program is free software; you can redistribute it and/or
113f7e212dSArnd Bergmann  * modify it under the terms of the GNU General Public Licence
123f7e212dSArnd Bergmann  * as published by the Free Software Foundation; either version
133f7e212dSArnd Bergmann  * 2 of the Licence, or (at your option) any later version.
143f7e212dSArnd Bergmann  */
153f7e212dSArnd Bergmann #ifndef __ASM_GENERIC_ATOMIC_H
163f7e212dSArnd Bergmann #define __ASM_GENERIC_ATOMIC_H
173f7e212dSArnd Bergmann 
1834484277SDavid Howells #include <asm/cmpxchg.h>
19febdbfe8SPeter Zijlstra #include <asm/barrier.h>
2034484277SDavid Howells 
21560cb12aSPeter Zijlstra /*
22560cb12aSPeter Zijlstra  * atomic_$op() - $op integer to atomic variable
23560cb12aSPeter Zijlstra  * @i: integer value to $op
24560cb12aSPeter Zijlstra  * @v: pointer to the atomic variable
25560cb12aSPeter Zijlstra  *
26560cb12aSPeter Zijlstra  * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
27560cb12aSPeter Zijlstra  * smp_mb__{before,after}_atomic().
28560cb12aSPeter Zijlstra  */
29560cb12aSPeter Zijlstra 
30560cb12aSPeter Zijlstra /*
31560cb12aSPeter Zijlstra  * atomic_$op_return() - $op interer to atomic variable and returns the result
32560cb12aSPeter Zijlstra  * @i: integer value to $op
33560cb12aSPeter Zijlstra  * @v: pointer to the atomic variable
34560cb12aSPeter Zijlstra  *
35560cb12aSPeter Zijlstra  * Atomically $ops @i to @v. Does imply a full memory barrier.
36560cb12aSPeter Zijlstra  */
37560cb12aSPeter Zijlstra 
383f7e212dSArnd Bergmann #ifdef CONFIG_SMP
39560cb12aSPeter Zijlstra 
40560cb12aSPeter Zijlstra /* we can build all atomic primitives from cmpxchg */
41560cb12aSPeter Zijlstra 
42560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
43560cb12aSPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
44560cb12aSPeter Zijlstra {									\
45560cb12aSPeter Zijlstra 	int c, old;							\
46560cb12aSPeter Zijlstra 									\
47560cb12aSPeter Zijlstra 	c = v->counter;							\
48560cb12aSPeter Zijlstra 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
49560cb12aSPeter Zijlstra 		c = old;						\
50560cb12aSPeter Zijlstra }
51560cb12aSPeter Zijlstra 
52560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
53560cb12aSPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
54560cb12aSPeter Zijlstra {									\
55560cb12aSPeter Zijlstra 	int c, old;							\
56560cb12aSPeter Zijlstra 									\
57560cb12aSPeter Zijlstra 	c = v->counter;							\
58560cb12aSPeter Zijlstra 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
59560cb12aSPeter Zijlstra 		c = old;						\
60560cb12aSPeter Zijlstra 									\
61560cb12aSPeter Zijlstra 	return c c_op i;						\
62560cb12aSPeter Zijlstra }
63560cb12aSPeter Zijlstra 
64*28aa2bdaSPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op)					\
65*28aa2bdaSPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
66*28aa2bdaSPeter Zijlstra {									\
67*28aa2bdaSPeter Zijlstra 	int c, old;							\
68*28aa2bdaSPeter Zijlstra 									\
69*28aa2bdaSPeter Zijlstra 	c = v->counter;							\
70*28aa2bdaSPeter Zijlstra 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
71*28aa2bdaSPeter Zijlstra 		c = old;						\
72*28aa2bdaSPeter Zijlstra 									\
73*28aa2bdaSPeter Zijlstra 	return c;							\
74*28aa2bdaSPeter Zijlstra }
75*28aa2bdaSPeter Zijlstra 
76560cb12aSPeter Zijlstra #else
77560cb12aSPeter Zijlstra 
78560cb12aSPeter Zijlstra #include <linux/irqflags.h>
79560cb12aSPeter Zijlstra 
80560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
81560cb12aSPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
82560cb12aSPeter Zijlstra {									\
83560cb12aSPeter Zijlstra 	unsigned long flags;						\
84560cb12aSPeter Zijlstra 									\
85560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
86560cb12aSPeter Zijlstra 	v->counter = v->counter c_op i;					\
87560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
88560cb12aSPeter Zijlstra }
89560cb12aSPeter Zijlstra 
90560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
91560cb12aSPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
92560cb12aSPeter Zijlstra {									\
93560cb12aSPeter Zijlstra 	unsigned long flags;						\
94560cb12aSPeter Zijlstra 	int ret;							\
95560cb12aSPeter Zijlstra 									\
96560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
97560cb12aSPeter Zijlstra 	ret = (v->counter = v->counter c_op i);				\
98560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
99560cb12aSPeter Zijlstra 									\
100560cb12aSPeter Zijlstra 	return ret;							\
101560cb12aSPeter Zijlstra }
102560cb12aSPeter Zijlstra 
103*28aa2bdaSPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op)					\
104*28aa2bdaSPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
105*28aa2bdaSPeter Zijlstra {									\
106*28aa2bdaSPeter Zijlstra 	unsigned long flags;						\
107*28aa2bdaSPeter Zijlstra 	int ret;							\
108*28aa2bdaSPeter Zijlstra 									\
109*28aa2bdaSPeter Zijlstra 	raw_local_irq_save(flags);					\
110*28aa2bdaSPeter Zijlstra 	ret = v->counter;						\
111*28aa2bdaSPeter Zijlstra 	v->counter = v->counter c_op i;					\
112*28aa2bdaSPeter Zijlstra 	raw_local_irq_restore(flags);					\
113*28aa2bdaSPeter Zijlstra 									\
114*28aa2bdaSPeter Zijlstra 	return ret;							\
115*28aa2bdaSPeter Zijlstra }
116*28aa2bdaSPeter Zijlstra 
117560cb12aSPeter Zijlstra #endif /* CONFIG_SMP */
118560cb12aSPeter Zijlstra 
119560cb12aSPeter Zijlstra #ifndef atomic_add_return
120560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(add, +)
1217505cb60SMike Frysinger #endif
122560cb12aSPeter Zijlstra 
123560cb12aSPeter Zijlstra #ifndef atomic_sub_return
124560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(sub, -)
1253f7e212dSArnd Bergmann #endif
1263f7e212dSArnd Bergmann 
127*28aa2bdaSPeter Zijlstra #ifndef atomic_fetch_add
128*28aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(add, +)
129*28aa2bdaSPeter Zijlstra #endif
130*28aa2bdaSPeter Zijlstra 
131*28aa2bdaSPeter Zijlstra #ifndef atomic_fetch_sub
132*28aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(sub, -)
133*28aa2bdaSPeter Zijlstra #endif
134*28aa2bdaSPeter Zijlstra 
135*28aa2bdaSPeter Zijlstra #ifndef atomic_fetch_and
136*28aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(and, &)
137*28aa2bdaSPeter Zijlstra #endif
138*28aa2bdaSPeter Zijlstra 
139*28aa2bdaSPeter Zijlstra #ifndef atomic_fetch_or
140*28aa2bdaSPeter Zijlstra #define atomic_fetch_or atomic_fetch_or
141*28aa2bdaSPeter Zijlstra 
142*28aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(or, |)
143*28aa2bdaSPeter Zijlstra #endif
144*28aa2bdaSPeter Zijlstra 
145*28aa2bdaSPeter Zijlstra #ifndef atomic_fetch_xor
146*28aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(xor, ^)
147*28aa2bdaSPeter Zijlstra #endif
148*28aa2bdaSPeter Zijlstra 
14956d1defeSPeter Zijlstra #ifndef atomic_and
150560cb12aSPeter Zijlstra ATOMIC_OP(and, &)
15156d1defeSPeter Zijlstra #endif
15256d1defeSPeter Zijlstra 
15356d1defeSPeter Zijlstra #ifndef atomic_or
154560cb12aSPeter Zijlstra ATOMIC_OP(or, |)
15556d1defeSPeter Zijlstra #endif
15656d1defeSPeter Zijlstra 
157e6942b7dSPeter Zijlstra #ifndef atomic_xor
158e6942b7dSPeter Zijlstra ATOMIC_OP(xor, ^)
159560cb12aSPeter Zijlstra #endif
160560cb12aSPeter Zijlstra 
161*28aa2bdaSPeter Zijlstra #undef ATOMIC_FETCH_OP
162560cb12aSPeter Zijlstra #undef ATOMIC_OP_RETURN
163560cb12aSPeter Zijlstra #undef ATOMIC_OP
164560cb12aSPeter Zijlstra 
1653f7e212dSArnd Bergmann /*
1663f7e212dSArnd Bergmann  * Atomic operations that C can't guarantee us.  Useful for
1673f7e212dSArnd Bergmann  * resource counting etc..
1683f7e212dSArnd Bergmann  */
1693f7e212dSArnd Bergmann 
1703f7e212dSArnd Bergmann #define ATOMIC_INIT(i)	{ (i) }
1713f7e212dSArnd Bergmann 
1723f7e212dSArnd Bergmann /**
1733f7e212dSArnd Bergmann  * atomic_read - read atomic variable
1743f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
1753f7e212dSArnd Bergmann  *
17637682177SPeter Fritzsche  * Atomically reads the value of @v.
1773f7e212dSArnd Bergmann  */
1787505cb60SMike Frysinger #ifndef atomic_read
17962e8a325SPeter Zijlstra #define atomic_read(v)	READ_ONCE((v)->counter)
1807505cb60SMike Frysinger #endif
1813f7e212dSArnd Bergmann 
1823f7e212dSArnd Bergmann /**
1833f7e212dSArnd Bergmann  * atomic_set - set atomic variable
1843f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
1853f7e212dSArnd Bergmann  * @i: required value
1863f7e212dSArnd Bergmann  *
18737682177SPeter Fritzsche  * Atomically sets the value of @v to @i.
1883f7e212dSArnd Bergmann  */
18962e8a325SPeter Zijlstra #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
1903f7e212dSArnd Bergmann 
191df9ee292SDavid Howells #include <linux/irqflags.h>
1923f7e212dSArnd Bergmann 
1933f7e212dSArnd Bergmann static inline int atomic_add_negative(int i, atomic_t *v)
1943f7e212dSArnd Bergmann {
1953f7e212dSArnd Bergmann 	return atomic_add_return(i, v) < 0;
1963f7e212dSArnd Bergmann }
1973f7e212dSArnd Bergmann 
1983f7e212dSArnd Bergmann static inline void atomic_add(int i, atomic_t *v)
1993f7e212dSArnd Bergmann {
2003f7e212dSArnd Bergmann 	atomic_add_return(i, v);
2013f7e212dSArnd Bergmann }
2023f7e212dSArnd Bergmann 
2033f7e212dSArnd Bergmann static inline void atomic_sub(int i, atomic_t *v)
2043f7e212dSArnd Bergmann {
2053f7e212dSArnd Bergmann 	atomic_sub_return(i, v);
2063f7e212dSArnd Bergmann }
2073f7e212dSArnd Bergmann 
2083f7e212dSArnd Bergmann static inline void atomic_inc(atomic_t *v)
2093f7e212dSArnd Bergmann {
2103f7e212dSArnd Bergmann 	atomic_add_return(1, v);
2113f7e212dSArnd Bergmann }
2123f7e212dSArnd Bergmann 
2133f7e212dSArnd Bergmann static inline void atomic_dec(atomic_t *v)
2143f7e212dSArnd Bergmann {
2153f7e212dSArnd Bergmann 	atomic_sub_return(1, v);
2163f7e212dSArnd Bergmann }
2173f7e212dSArnd Bergmann 
2183f7e212dSArnd Bergmann #define atomic_dec_return(v)		atomic_sub_return(1, (v))
2193f7e212dSArnd Bergmann #define atomic_inc_return(v)		atomic_add_return(1, (v))
2203f7e212dSArnd Bergmann 
2213f7e212dSArnd Bergmann #define atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
2223eea44eaSMike Frysinger #define atomic_dec_and_test(v)		(atomic_dec_return(v) == 0)
2233eea44eaSMike Frysinger #define atomic_inc_and_test(v)		(atomic_inc_return(v) == 0)
2243f7e212dSArnd Bergmann 
2258b9d4069SMathieu Lacage #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
2268b9d4069SMathieu Lacage #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
2278b9d4069SMathieu Lacage 
228f24219b4SArun Sharma static inline int __atomic_add_unless(atomic_t *v, int a, int u)
2298b9d4069SMathieu Lacage {
2308b9d4069SMathieu Lacage 	int c, old;
2318b9d4069SMathieu Lacage 	c = atomic_read(v);
2328b9d4069SMathieu Lacage 	while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
2338b9d4069SMathieu Lacage 		c = old;
234f24219b4SArun Sharma 	return c;
2358b9d4069SMathieu Lacage }
2363f7e212dSArnd Bergmann 
2373f7e212dSArnd Bergmann #endif /* __ASM_GENERIC_ATOMIC_H */
238