xref: /openbmc/linux/include/asm-generic/atomic.h (revision e6942b7de2dfe44ebde9bae57dadece5abca9de8)
13f7e212dSArnd Bergmann /*
2acac43e2SArun Sharma  * Generic C implementation of atomic counter operations. Usable on
3acac43e2SArun Sharma  * UP systems only. Do not include in machine independent code.
4acac43e2SArun Sharma  *
53f7e212dSArnd Bergmann  * Originally implemented for MN10300.
63f7e212dSArnd Bergmann  *
73f7e212dSArnd Bergmann  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
83f7e212dSArnd Bergmann  * Written by David Howells (dhowells@redhat.com)
93f7e212dSArnd Bergmann  *
103f7e212dSArnd Bergmann  * This program is free software; you can redistribute it and/or
113f7e212dSArnd Bergmann  * modify it under the terms of the GNU General Public Licence
123f7e212dSArnd Bergmann  * as published by the Free Software Foundation; either version
133f7e212dSArnd Bergmann  * 2 of the Licence, or (at your option) any later version.
143f7e212dSArnd Bergmann  */
153f7e212dSArnd Bergmann #ifndef __ASM_GENERIC_ATOMIC_H
163f7e212dSArnd Bergmann #define __ASM_GENERIC_ATOMIC_H
173f7e212dSArnd Bergmann 
1834484277SDavid Howells #include <asm/cmpxchg.h>
19febdbfe8SPeter Zijlstra #include <asm/barrier.h>
2034484277SDavid Howells 
21560cb12aSPeter Zijlstra /*
22560cb12aSPeter Zijlstra  * atomic_$op() - $op integer to atomic variable
23560cb12aSPeter Zijlstra  * @i: integer value to $op
24560cb12aSPeter Zijlstra  * @v: pointer to the atomic variable
25560cb12aSPeter Zijlstra  *
26560cb12aSPeter Zijlstra  * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
27560cb12aSPeter Zijlstra  * smp_mb__{before,after}_atomic().
28560cb12aSPeter Zijlstra  */
29560cb12aSPeter Zijlstra 
30560cb12aSPeter Zijlstra /*
31560cb12aSPeter Zijlstra  * atomic_$op_return() - $op interer to atomic variable and returns the result
32560cb12aSPeter Zijlstra  * @i: integer value to $op
33560cb12aSPeter Zijlstra  * @v: pointer to the atomic variable
34560cb12aSPeter Zijlstra  *
35560cb12aSPeter Zijlstra  * Atomically $ops @i to @v. Does imply a full memory barrier.
36560cb12aSPeter Zijlstra  */
37560cb12aSPeter Zijlstra 
383f7e212dSArnd Bergmann #ifdef CONFIG_SMP
39560cb12aSPeter Zijlstra 
40560cb12aSPeter Zijlstra /* we can build all atomic primitives from cmpxchg */
41560cb12aSPeter Zijlstra 
42560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
43560cb12aSPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
44560cb12aSPeter Zijlstra {									\
45560cb12aSPeter Zijlstra 	int c, old;							\
46560cb12aSPeter Zijlstra 									\
47560cb12aSPeter Zijlstra 	c = v->counter;							\
48560cb12aSPeter Zijlstra 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
49560cb12aSPeter Zijlstra 		c = old;						\
50560cb12aSPeter Zijlstra }
51560cb12aSPeter Zijlstra 
52560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
53560cb12aSPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
54560cb12aSPeter Zijlstra {									\
55560cb12aSPeter Zijlstra 	int c, old;							\
56560cb12aSPeter Zijlstra 									\
57560cb12aSPeter Zijlstra 	c = v->counter;							\
58560cb12aSPeter Zijlstra 	while ((old = cmpxchg(&v->counter, c, c c_op i)) != c)		\
59560cb12aSPeter Zijlstra 		c = old;						\
60560cb12aSPeter Zijlstra 									\
61560cb12aSPeter Zijlstra 	return c c_op i;						\
62560cb12aSPeter Zijlstra }
63560cb12aSPeter Zijlstra 
64560cb12aSPeter Zijlstra #else
65560cb12aSPeter Zijlstra 
66560cb12aSPeter Zijlstra #include <linux/irqflags.h>
67560cb12aSPeter Zijlstra 
68560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
69560cb12aSPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
70560cb12aSPeter Zijlstra {									\
71560cb12aSPeter Zijlstra 	unsigned long flags;						\
72560cb12aSPeter Zijlstra 									\
73560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
74560cb12aSPeter Zijlstra 	v->counter = v->counter c_op i;					\
75560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
76560cb12aSPeter Zijlstra }
77560cb12aSPeter Zijlstra 
78560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
79560cb12aSPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
80560cb12aSPeter Zijlstra {									\
81560cb12aSPeter Zijlstra 	unsigned long flags;						\
82560cb12aSPeter Zijlstra 	int ret;							\
83560cb12aSPeter Zijlstra 									\
84560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
85560cb12aSPeter Zijlstra 	ret = (v->counter = v->counter c_op i);				\
86560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
87560cb12aSPeter Zijlstra 									\
88560cb12aSPeter Zijlstra 	return ret;							\
89560cb12aSPeter Zijlstra }
90560cb12aSPeter Zijlstra 
91560cb12aSPeter Zijlstra #endif /* CONFIG_SMP */
92560cb12aSPeter Zijlstra 
93560cb12aSPeter Zijlstra #ifndef atomic_add_return
94560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(add, +)
957505cb60SMike Frysinger #endif
96560cb12aSPeter Zijlstra 
97560cb12aSPeter Zijlstra #ifndef atomic_sub_return
98560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(sub, -)
993f7e212dSArnd Bergmann #endif
1003f7e212dSArnd Bergmann 
10156d1defeSPeter Zijlstra #ifndef atomic_and
102560cb12aSPeter Zijlstra ATOMIC_OP(and, &)
10356d1defeSPeter Zijlstra #endif
10456d1defeSPeter Zijlstra 
10556d1defeSPeter Zijlstra #ifndef atomic_or
106560cb12aSPeter Zijlstra ATOMIC_OP(or, |)
10756d1defeSPeter Zijlstra #endif
10856d1defeSPeter Zijlstra 
109*e6942b7dSPeter Zijlstra #ifndef atomic_xor
110*e6942b7dSPeter Zijlstra ATOMIC_OP(xor, ^)
111560cb12aSPeter Zijlstra #endif
112560cb12aSPeter Zijlstra 
113560cb12aSPeter Zijlstra #undef ATOMIC_OP_RETURN
114560cb12aSPeter Zijlstra #undef ATOMIC_OP
115560cb12aSPeter Zijlstra 
116*e6942b7dSPeter Zijlstra static inline __deprecated void atomic_clear_mask(unsigned int mask, atomic_t *v)
117*e6942b7dSPeter Zijlstra {
118*e6942b7dSPeter Zijlstra 	atomic_and(~mask, v);
119*e6942b7dSPeter Zijlstra }
120*e6942b7dSPeter Zijlstra 
121*e6942b7dSPeter Zijlstra static inline __deprecated void atomic_set_mask(unsigned int mask, atomic_t *v)
122*e6942b7dSPeter Zijlstra {
123*e6942b7dSPeter Zijlstra 	atomic_or(mask, v);
124*e6942b7dSPeter Zijlstra }
125*e6942b7dSPeter Zijlstra 
1263f7e212dSArnd Bergmann /*
1273f7e212dSArnd Bergmann  * Atomic operations that C can't guarantee us.  Useful for
1283f7e212dSArnd Bergmann  * resource counting etc..
1293f7e212dSArnd Bergmann  */
1303f7e212dSArnd Bergmann 
1313f7e212dSArnd Bergmann #define ATOMIC_INIT(i)	{ (i) }
1323f7e212dSArnd Bergmann 
1333f7e212dSArnd Bergmann /**
1343f7e212dSArnd Bergmann  * atomic_read - read atomic variable
1353f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
1363f7e212dSArnd Bergmann  *
13737682177SPeter Fritzsche  * Atomically reads the value of @v.
1383f7e212dSArnd Bergmann  */
1397505cb60SMike Frysinger #ifndef atomic_read
1402291059cSPranith Kumar #define atomic_read(v)	ACCESS_ONCE((v)->counter)
1417505cb60SMike Frysinger #endif
1423f7e212dSArnd Bergmann 
1433f7e212dSArnd Bergmann /**
1443f7e212dSArnd Bergmann  * atomic_set - set atomic variable
1453f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
1463f7e212dSArnd Bergmann  * @i: required value
1473f7e212dSArnd Bergmann  *
14837682177SPeter Fritzsche  * Atomically sets the value of @v to @i.
1493f7e212dSArnd Bergmann  */
1503f7e212dSArnd Bergmann #define atomic_set(v, i) (((v)->counter) = (i))
1513f7e212dSArnd Bergmann 
152df9ee292SDavid Howells #include <linux/irqflags.h>
1533f7e212dSArnd Bergmann 
1543f7e212dSArnd Bergmann static inline int atomic_add_negative(int i, atomic_t *v)
1553f7e212dSArnd Bergmann {
1563f7e212dSArnd Bergmann 	return atomic_add_return(i, v) < 0;
1573f7e212dSArnd Bergmann }
1583f7e212dSArnd Bergmann 
1593f7e212dSArnd Bergmann static inline void atomic_add(int i, atomic_t *v)
1603f7e212dSArnd Bergmann {
1613f7e212dSArnd Bergmann 	atomic_add_return(i, v);
1623f7e212dSArnd Bergmann }
1633f7e212dSArnd Bergmann 
1643f7e212dSArnd Bergmann static inline void atomic_sub(int i, atomic_t *v)
1653f7e212dSArnd Bergmann {
1663f7e212dSArnd Bergmann 	atomic_sub_return(i, v);
1673f7e212dSArnd Bergmann }
1683f7e212dSArnd Bergmann 
1693f7e212dSArnd Bergmann static inline void atomic_inc(atomic_t *v)
1703f7e212dSArnd Bergmann {
1713f7e212dSArnd Bergmann 	atomic_add_return(1, v);
1723f7e212dSArnd Bergmann }
1733f7e212dSArnd Bergmann 
1743f7e212dSArnd Bergmann static inline void atomic_dec(atomic_t *v)
1753f7e212dSArnd Bergmann {
1763f7e212dSArnd Bergmann 	atomic_sub_return(1, v);
1773f7e212dSArnd Bergmann }
1783f7e212dSArnd Bergmann 
1793f7e212dSArnd Bergmann #define atomic_dec_return(v)		atomic_sub_return(1, (v))
1803f7e212dSArnd Bergmann #define atomic_inc_return(v)		atomic_add_return(1, (v))
1813f7e212dSArnd Bergmann 
1823f7e212dSArnd Bergmann #define atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
1833eea44eaSMike Frysinger #define atomic_dec_and_test(v)		(atomic_dec_return(v) == 0)
1843eea44eaSMike Frysinger #define atomic_inc_and_test(v)		(atomic_inc_return(v) == 0)
1853f7e212dSArnd Bergmann 
1868b9d4069SMathieu Lacage #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
1878b9d4069SMathieu Lacage #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
1888b9d4069SMathieu Lacage 
189f24219b4SArun Sharma static inline int __atomic_add_unless(atomic_t *v, int a, int u)
1908b9d4069SMathieu Lacage {
1918b9d4069SMathieu Lacage 	int c, old;
1928b9d4069SMathieu Lacage 	c = atomic_read(v);
1938b9d4069SMathieu Lacage 	while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
1948b9d4069SMathieu Lacage 		c = old;
195f24219b4SArun Sharma 	return c;
1968b9d4069SMathieu Lacage }
1973f7e212dSArnd Bergmann 
1983f7e212dSArnd Bergmann #endif /* __ASM_GENERIC_ATOMIC_H */
199