xref: /openbmc/linux/include/asm-generic/atomic.h (revision 3f7e212df82ca0459d44c91d9e019efd1b5f936c)
1*3f7e212dSArnd Bergmann /*
2*3f7e212dSArnd Bergmann  * Generic C implementation of atomic counter operations
3*3f7e212dSArnd Bergmann  * Originally implemented for MN10300.
4*3f7e212dSArnd Bergmann  *
5*3f7e212dSArnd Bergmann  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
6*3f7e212dSArnd Bergmann  * Written by David Howells (dhowells@redhat.com)
7*3f7e212dSArnd Bergmann  *
8*3f7e212dSArnd Bergmann  * This program is free software; you can redistribute it and/or
9*3f7e212dSArnd Bergmann  * modify it under the terms of the GNU General Public Licence
10*3f7e212dSArnd Bergmann  * as published by the Free Software Foundation; either version
11*3f7e212dSArnd Bergmann  * 2 of the Licence, or (at your option) any later version.
12*3f7e212dSArnd Bergmann  */
13*3f7e212dSArnd Bergmann #ifndef __ASM_GENERIC_ATOMIC_H
14*3f7e212dSArnd Bergmann #define __ASM_GENERIC_ATOMIC_H
15*3f7e212dSArnd Bergmann 
16*3f7e212dSArnd Bergmann #ifdef CONFIG_SMP
17*3f7e212dSArnd Bergmann #error not SMP safe
18*3f7e212dSArnd Bergmann #endif
19*3f7e212dSArnd Bergmann 
20*3f7e212dSArnd Bergmann /*
21*3f7e212dSArnd Bergmann  * Atomic operations that C can't guarantee us.  Useful for
22*3f7e212dSArnd Bergmann  * resource counting etc..
23*3f7e212dSArnd Bergmann  */
24*3f7e212dSArnd Bergmann 
25*3f7e212dSArnd Bergmann #define ATOMIC_INIT(i)	{ (i) }
26*3f7e212dSArnd Bergmann 
27*3f7e212dSArnd Bergmann #ifdef __KERNEL__
28*3f7e212dSArnd Bergmann 
29*3f7e212dSArnd Bergmann /**
30*3f7e212dSArnd Bergmann  * atomic_read - read atomic variable
31*3f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
32*3f7e212dSArnd Bergmann  *
33*3f7e212dSArnd Bergmann  * Atomically reads the value of @v.  Note that the guaranteed
34*3f7e212dSArnd Bergmann  * useful range of an atomic_t is only 24 bits.
35*3f7e212dSArnd Bergmann  */
36*3f7e212dSArnd Bergmann #define atomic_read(v)	((v)->counter)
37*3f7e212dSArnd Bergmann 
38*3f7e212dSArnd Bergmann /**
39*3f7e212dSArnd Bergmann  * atomic_set - set atomic variable
40*3f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
41*3f7e212dSArnd Bergmann  * @i: required value
42*3f7e212dSArnd Bergmann  *
43*3f7e212dSArnd Bergmann  * Atomically sets the value of @v to @i.  Note that the guaranteed
44*3f7e212dSArnd Bergmann  * useful range of an atomic_t is only 24 bits.
45*3f7e212dSArnd Bergmann  */
46*3f7e212dSArnd Bergmann #define atomic_set(v, i) (((v)->counter) = (i))
47*3f7e212dSArnd Bergmann 
48*3f7e212dSArnd Bergmann #include <asm/system.h>
49*3f7e212dSArnd Bergmann 
50*3f7e212dSArnd Bergmann /**
51*3f7e212dSArnd Bergmann  * atomic_add_return - add integer to atomic variable
52*3f7e212dSArnd Bergmann  * @i: integer value to add
53*3f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
54*3f7e212dSArnd Bergmann  *
55*3f7e212dSArnd Bergmann  * Atomically adds @i to @v and returns the result
56*3f7e212dSArnd Bergmann  * Note that the guaranteed useful range of an atomic_t is only 24 bits.
57*3f7e212dSArnd Bergmann  */
58*3f7e212dSArnd Bergmann static inline int atomic_add_return(int i, atomic_t *v)
59*3f7e212dSArnd Bergmann {
60*3f7e212dSArnd Bergmann 	unsigned long flags;
61*3f7e212dSArnd Bergmann 	int temp;
62*3f7e212dSArnd Bergmann 
63*3f7e212dSArnd Bergmann 	local_irq_save(flags);
64*3f7e212dSArnd Bergmann 	temp = v->counter;
65*3f7e212dSArnd Bergmann 	temp += i;
66*3f7e212dSArnd Bergmann 	v->counter = temp;
67*3f7e212dSArnd Bergmann 	local_irq_restore(flags);
68*3f7e212dSArnd Bergmann 
69*3f7e212dSArnd Bergmann 	return temp;
70*3f7e212dSArnd Bergmann }
71*3f7e212dSArnd Bergmann 
72*3f7e212dSArnd Bergmann /**
73*3f7e212dSArnd Bergmann  * atomic_sub_return - subtract integer from atomic variable
74*3f7e212dSArnd Bergmann  * @i: integer value to subtract
75*3f7e212dSArnd Bergmann  * @v: pointer of type atomic_t
76*3f7e212dSArnd Bergmann  *
77*3f7e212dSArnd Bergmann  * Atomically subtracts @i from @v and returns the result
78*3f7e212dSArnd Bergmann  * Note that the guaranteed useful range of an atomic_t is only 24 bits.
79*3f7e212dSArnd Bergmann  */
80*3f7e212dSArnd Bergmann static inline int atomic_sub_return(int i, atomic_t *v)
81*3f7e212dSArnd Bergmann {
82*3f7e212dSArnd Bergmann 	unsigned long flags;
83*3f7e212dSArnd Bergmann 	int temp;
84*3f7e212dSArnd Bergmann 
85*3f7e212dSArnd Bergmann 	local_irq_save(flags);
86*3f7e212dSArnd Bergmann 	temp = v->counter;
87*3f7e212dSArnd Bergmann 	temp -= i;
88*3f7e212dSArnd Bergmann 	v->counter = temp;
89*3f7e212dSArnd Bergmann 	local_irq_restore(flags);
90*3f7e212dSArnd Bergmann 
91*3f7e212dSArnd Bergmann 	return temp;
92*3f7e212dSArnd Bergmann }
93*3f7e212dSArnd Bergmann 
94*3f7e212dSArnd Bergmann static inline int atomic_add_negative(int i, atomic_t *v)
95*3f7e212dSArnd Bergmann {
96*3f7e212dSArnd Bergmann 	return atomic_add_return(i, v) < 0;
97*3f7e212dSArnd Bergmann }
98*3f7e212dSArnd Bergmann 
99*3f7e212dSArnd Bergmann static inline void atomic_add(int i, atomic_t *v)
100*3f7e212dSArnd Bergmann {
101*3f7e212dSArnd Bergmann 	atomic_add_return(i, v);
102*3f7e212dSArnd Bergmann }
103*3f7e212dSArnd Bergmann 
104*3f7e212dSArnd Bergmann static inline void atomic_sub(int i, atomic_t *v)
105*3f7e212dSArnd Bergmann {
106*3f7e212dSArnd Bergmann 	atomic_sub_return(i, v);
107*3f7e212dSArnd Bergmann }
108*3f7e212dSArnd Bergmann 
109*3f7e212dSArnd Bergmann static inline void atomic_inc(atomic_t *v)
110*3f7e212dSArnd Bergmann {
111*3f7e212dSArnd Bergmann 	atomic_add_return(1, v);
112*3f7e212dSArnd Bergmann }
113*3f7e212dSArnd Bergmann 
114*3f7e212dSArnd Bergmann static inline void atomic_dec(atomic_t *v)
115*3f7e212dSArnd Bergmann {
116*3f7e212dSArnd Bergmann 	atomic_sub_return(1, v);
117*3f7e212dSArnd Bergmann }
118*3f7e212dSArnd Bergmann 
119*3f7e212dSArnd Bergmann #define atomic_dec_return(v)		atomic_sub_return(1, (v))
120*3f7e212dSArnd Bergmann #define atomic_inc_return(v)		atomic_add_return(1, (v))
121*3f7e212dSArnd Bergmann 
122*3f7e212dSArnd Bergmann #define atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
123*3f7e212dSArnd Bergmann #define atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
124*3f7e212dSArnd Bergmann #define atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
125*3f7e212dSArnd Bergmann 
126*3f7e212dSArnd Bergmann #define atomic_add_unless(v, a, u)				\
127*3f7e212dSArnd Bergmann ({								\
128*3f7e212dSArnd Bergmann 	int c, old;						\
129*3f7e212dSArnd Bergmann 	c = atomic_read(v);					\
130*3f7e212dSArnd Bergmann 	while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
131*3f7e212dSArnd Bergmann 		c = old;					\
132*3f7e212dSArnd Bergmann 	c != (u);						\
133*3f7e212dSArnd Bergmann })
134*3f7e212dSArnd Bergmann 
135*3f7e212dSArnd Bergmann #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
136*3f7e212dSArnd Bergmann 
137*3f7e212dSArnd Bergmann static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
138*3f7e212dSArnd Bergmann {
139*3f7e212dSArnd Bergmann 	unsigned long flags;
140*3f7e212dSArnd Bergmann 
141*3f7e212dSArnd Bergmann 	mask = ~mask;
142*3f7e212dSArnd Bergmann 	local_irq_save(flags);
143*3f7e212dSArnd Bergmann 	*addr &= mask;
144*3f7e212dSArnd Bergmann 	local_irq_restore(flags);
145*3f7e212dSArnd Bergmann }
146*3f7e212dSArnd Bergmann 
147*3f7e212dSArnd Bergmann #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
148*3f7e212dSArnd Bergmann #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
149*3f7e212dSArnd Bergmann 
150*3f7e212dSArnd Bergmann #define cmpxchg_local(ptr, o, n)				  	       \
151*3f7e212dSArnd Bergmann 	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
152*3f7e212dSArnd Bergmann 			(unsigned long)(n), sizeof(*(ptr))))
153*3f7e212dSArnd Bergmann 
154*3f7e212dSArnd Bergmann #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
155*3f7e212dSArnd Bergmann 
156*3f7e212dSArnd Bergmann /* Assume that atomic operations are already serializing */
157*3f7e212dSArnd Bergmann #define smp_mb__before_atomic_dec()	barrier()
158*3f7e212dSArnd Bergmann #define smp_mb__after_atomic_dec()	barrier()
159*3f7e212dSArnd Bergmann #define smp_mb__before_atomic_inc()	barrier()
160*3f7e212dSArnd Bergmann #define smp_mb__after_atomic_inc()	barrier()
161*3f7e212dSArnd Bergmann 
162*3f7e212dSArnd Bergmann #include <asm-generic/atomic-long.h>
163*3f7e212dSArnd Bergmann 
164*3f7e212dSArnd Bergmann #endif /* __KERNEL__ */
165*3f7e212dSArnd Bergmann #endif /* __ASM_GENERIC_ATOMIC_H */
166