xref: /openbmc/linux/arch/ia64/include/asm/atomic.h (revision a8fe58ce)
1 #ifndef _ASM_IA64_ATOMIC_H
2 #define _ASM_IA64_ATOMIC_H
3 
4 /*
5  * Atomic operations that C can't guarantee us.  Useful for
6  * resource counting etc..
7  *
8  * NOTE: don't mess with the types below!  The "unsigned long" and
9  * "int" types were carefully placed so as to ensure proper operation
10  * of the macros.
11  *
12  * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co
13  *	David Mosberger-Tang <davidm@hpl.hp.com>
14  */
15 #include <linux/types.h>
16 
17 #include <asm/intrinsics.h>
18 #include <asm/barrier.h>
19 
20 
21 #define ATOMIC_INIT(i)		{ (i) }
22 #define ATOMIC64_INIT(i)	{ (i) }
23 
24 #define atomic_read(v)		READ_ONCE((v)->counter)
25 #define atomic64_read(v)	READ_ONCE((v)->counter)
26 
27 #define atomic_set(v,i)		WRITE_ONCE(((v)->counter), (i))
28 #define atomic64_set(v,i)	WRITE_ONCE(((v)->counter), (i))
29 
30 #define ATOMIC_OP(op, c_op)						\
31 static __inline__ int							\
32 ia64_atomic_##op (int i, atomic_t *v)					\
33 {									\
34 	__s32 old, new;							\
35 	CMPXCHG_BUGCHECK_DECL						\
36 									\
37 	do {								\
38 		CMPXCHG_BUGCHECK(v);					\
39 		old = atomic_read(v);					\
40 		new = old c_op i;					\
41 	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); \
42 	return new;							\
43 }
44 
45 ATOMIC_OP(add, +)
46 ATOMIC_OP(sub, -)
47 
48 #define atomic_add_return(i,v)						\
49 ({									\
50 	int __ia64_aar_i = (i);						\
51 	(__builtin_constant_p(i)					\
52 	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
53 	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
54 	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
55 	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
56 		? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\
57 		: ia64_atomic_add(__ia64_aar_i, v);			\
58 })
59 
60 #define atomic_sub_return(i,v)						\
61 ({									\
62 	int __ia64_asr_i = (i);						\
63 	(__builtin_constant_p(i)					\
64 	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
65 	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
66 	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
67 	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
68 		? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\
69 		: ia64_atomic_sub(__ia64_asr_i, v);			\
70 })
71 
72 ATOMIC_OP(and, &)
73 ATOMIC_OP(or, |)
74 ATOMIC_OP(xor, ^)
75 
76 #define atomic_and(i,v)	(void)ia64_atomic_and(i,v)
77 #define atomic_or(i,v)	(void)ia64_atomic_or(i,v)
78 #define atomic_xor(i,v)	(void)ia64_atomic_xor(i,v)
79 
80 #undef ATOMIC_OP
81 
82 #define ATOMIC64_OP(op, c_op)						\
83 static __inline__ long							\
84 ia64_atomic64_##op (__s64 i, atomic64_t *v)				\
85 {									\
86 	__s64 old, new;							\
87 	CMPXCHG_BUGCHECK_DECL						\
88 									\
89 	do {								\
90 		CMPXCHG_BUGCHECK(v);					\
91 		old = atomic64_read(v);					\
92 		new = old c_op i;					\
93 	} while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); \
94 	return new;							\
95 }
96 
97 ATOMIC64_OP(add, +)
98 ATOMIC64_OP(sub, -)
99 
100 #define atomic64_add_return(i,v)					\
101 ({									\
102 	long __ia64_aar_i = (i);					\
103 	(__builtin_constant_p(i)					\
104 	 && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\
105 	     || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\
106 	     || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\
107 	     || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\
108 		? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\
109 		: ia64_atomic64_add(__ia64_aar_i, v);			\
110 })
111 
112 #define atomic64_sub_return(i,v)					\
113 ({									\
114 	long __ia64_asr_i = (i);					\
115 	(__builtin_constant_p(i)					\
116 	 && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\
117 	     || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\
118 	     || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\
119 	     || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\
120 		? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\
121 		: ia64_atomic64_sub(__ia64_asr_i, v);			\
122 })
123 
124 ATOMIC64_OP(and, &)
125 ATOMIC64_OP(or, |)
126 ATOMIC64_OP(xor, ^)
127 
128 #define atomic64_and(i,v)	(void)ia64_atomic64_and(i,v)
129 #define atomic64_or(i,v)	(void)ia64_atomic64_or(i,v)
130 #define atomic64_xor(i,v)	(void)ia64_atomic64_xor(i,v)
131 
132 #undef ATOMIC64_OP
133 
134 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
135 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
136 
137 #define atomic64_cmpxchg(v, old, new) \
138 	(cmpxchg(&((v)->counter), old, new))
139 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
140 
141 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
142 {
143 	int c, old;
144 	c = atomic_read(v);
145 	for (;;) {
146 		if (unlikely(c == (u)))
147 			break;
148 		old = atomic_cmpxchg((v), c, c + (a));
149 		if (likely(old == c))
150 			break;
151 		c = old;
152 	}
153 	return c;
154 }
155 
156 
157 static __inline__ long atomic64_add_unless(atomic64_t *v, long a, long u)
158 {
159 	long c, old;
160 	c = atomic64_read(v);
161 	for (;;) {
162 		if (unlikely(c == (u)))
163 			break;
164 		old = atomic64_cmpxchg((v), c, c + (a));
165 		if (likely(old == c))
166 			break;
167 		c = old;
168 	}
169 	return c != (u);
170 }
171 
172 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
173 
174 /*
175  * Atomically add I to V and return TRUE if the resulting value is
176  * negative.
177  */
178 static __inline__ int
179 atomic_add_negative (int i, atomic_t *v)
180 {
181 	return atomic_add_return(i, v) < 0;
182 }
183 
184 static __inline__ long
185 atomic64_add_negative (__s64 i, atomic64_t *v)
186 {
187 	return atomic64_add_return(i, v) < 0;
188 }
189 
190 #define atomic_dec_return(v)		atomic_sub_return(1, (v))
191 #define atomic_inc_return(v)		atomic_add_return(1, (v))
192 #define atomic64_dec_return(v)		atomic64_sub_return(1, (v))
193 #define atomic64_inc_return(v)		atomic64_add_return(1, (v))
194 
195 #define atomic_sub_and_test(i,v)	(atomic_sub_return((i), (v)) == 0)
196 #define atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
197 #define atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
198 #define atomic64_sub_and_test(i,v)	(atomic64_sub_return((i), (v)) == 0)
199 #define atomic64_dec_and_test(v)	(atomic64_sub_return(1, (v)) == 0)
200 #define atomic64_inc_and_test(v)	(atomic64_add_return(1, (v)) == 0)
201 
202 #define atomic_add(i,v)			(void)atomic_add_return((i), (v))
203 #define atomic_sub(i,v)			(void)atomic_sub_return((i), (v))
204 #define atomic_inc(v)			atomic_add(1, (v))
205 #define atomic_dec(v)			atomic_sub(1, (v))
206 
207 #define atomic64_add(i,v)		(void)atomic64_add_return((i), (v))
208 #define atomic64_sub(i,v)		(void)atomic64_sub_return((i), (v))
209 #define atomic64_inc(v)			atomic64_add(1, (v))
210 #define atomic64_dec(v)			atomic64_sub(1, (v))
211 
212 #endif /* _ASM_IA64_ATOMIC_H */
213