1 #ifndef _ASM_GENERIC_ATOMIC_LONG_H 2 #define _ASM_GENERIC_ATOMIC_LONG_H 3 /* 4 * Copyright (C) 2005 Silicon Graphics, Inc. 5 * Christoph Lameter 6 * 7 * Allows to provide arch independent atomic definitions without the need to 8 * edit all arch specific atomic.h files. 9 */ 10 11 #include <asm/types.h> 12 13 /* 14 * Suppport for atomic_long_t 15 * 16 * Casts for parameters are avoided for existing atomic functions in order to 17 * avoid issues with cast-as-lval under gcc 4.x and other limitations that the 18 * macros of a platform may have. 19 */ 20 21 #if BITS_PER_LONG == 64 22 23 typedef atomic64_t atomic_long_t; 24 25 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) 26 27 static inline long atomic_long_read(atomic_long_t *l) 28 { 29 atomic64_t *v = (atomic64_t *)l; 30 31 return (long)atomic64_read(v); 32 } 33 34 static inline void atomic_long_set(atomic_long_t *l, long i) 35 { 36 atomic64_t *v = (atomic64_t *)l; 37 38 atomic64_set(v, i); 39 } 40 41 static inline void atomic_long_inc(atomic_long_t *l) 42 { 43 atomic64_t *v = (atomic64_t *)l; 44 45 atomic64_inc(v); 46 } 47 48 static inline void atomic_long_dec(atomic_long_t *l) 49 { 50 atomic64_t *v = (atomic64_t *)l; 51 52 atomic64_dec(v); 53 } 54 55 static inline void atomic_long_add(long i, atomic_long_t *l) 56 { 57 atomic64_t *v = (atomic64_t *)l; 58 59 atomic64_add(i, v); 60 } 61 62 static inline void atomic_long_sub(long i, atomic_long_t *l) 63 { 64 atomic64_t *v = (atomic64_t *)l; 65 66 atomic64_sub(i, v); 67 } 68 69 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l) 70 { 71 atomic64_t *v = (atomic64_t *)l; 72 73 return atomic64_sub_and_test(i, v); 74 } 75 76 static inline int atomic_long_dec_and_test(atomic_long_t *l) 77 { 78 atomic64_t *v = (atomic64_t *)l; 79 80 return atomic64_dec_and_test(v); 81 } 82 83 static inline int atomic_long_inc_and_test(atomic_long_t *l) 84 { 85 atomic64_t *v = (atomic64_t *)l; 86 87 return atomic64_inc_and_test(v); 88 } 89 90 static inline int atomic_long_add_negative(long i, atomic_long_t *l) 91 { 92 atomic64_t *v = (atomic64_t *)l; 93 94 return atomic64_add_negative(i, v); 95 } 96 97 static inline long atomic_long_add_return(long i, atomic_long_t *l) 98 { 99 atomic64_t *v = (atomic64_t *)l; 100 101 return (long)atomic64_add_return(i, v); 102 } 103 104 static inline long atomic_long_sub_return(long i, atomic_long_t *l) 105 { 106 atomic64_t *v = (atomic64_t *)l; 107 108 return (long)atomic64_sub_return(i, v); 109 } 110 111 static inline long atomic_long_inc_return(atomic_long_t *l) 112 { 113 atomic64_t *v = (atomic64_t *)l; 114 115 return (long)atomic64_inc_return(v); 116 } 117 118 static inline long atomic_long_dec_return(atomic_long_t *l) 119 { 120 atomic64_t *v = (atomic64_t *)l; 121 122 return (long)atomic64_dec_return(v); 123 } 124 125 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u) 126 { 127 atomic64_t *v = (atomic64_t *)l; 128 129 return (long)atomic64_add_unless(v, a, u); 130 } 131 132 #define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l)) 133 134 #define atomic_long_cmpxchg(l, old, new) \ 135 (atomic64_cmpxchg((atomic64_t *)(l), (old), (new))) 136 #define atomic_long_xchg(v, new) \ 137 (atomic64_xchg((atomic64_t *)(v), (new))) 138 139 #else /* BITS_PER_LONG == 64 */ 140 141 typedef atomic_t atomic_long_t; 142 143 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) 144 static inline long atomic_long_read(atomic_long_t *l) 145 { 146 atomic_t *v = (atomic_t *)l; 147 148 return (long)atomic_read(v); 149 } 150 151 static inline void atomic_long_set(atomic_long_t *l, long i) 152 { 153 atomic_t *v = (atomic_t *)l; 154 155 atomic_set(v, i); 156 } 157 158 static inline void atomic_long_inc(atomic_long_t *l) 159 { 160 atomic_t *v = (atomic_t *)l; 161 162 atomic_inc(v); 163 } 164 165 static inline void atomic_long_dec(atomic_long_t *l) 166 { 167 atomic_t *v = (atomic_t *)l; 168 169 atomic_dec(v); 170 } 171 172 static inline void atomic_long_add(long i, atomic_long_t *l) 173 { 174 atomic_t *v = (atomic_t *)l; 175 176 atomic_add(i, v); 177 } 178 179 static inline void atomic_long_sub(long i, atomic_long_t *l) 180 { 181 atomic_t *v = (atomic_t *)l; 182 183 atomic_sub(i, v); 184 } 185 186 #ifndef __UBOOT__ 187 static inline int atomic_long_sub_and_test(long i, atomic_long_t *l) 188 { 189 atomic_t *v = (atomic_t *)l; 190 191 return atomic_sub_and_test(i, v); 192 } 193 194 static inline int atomic_long_dec_and_test(atomic_long_t *l) 195 { 196 atomic_t *v = (atomic_t *)l; 197 198 return atomic_dec_and_test(v); 199 } 200 201 static inline int atomic_long_inc_and_test(atomic_long_t *l) 202 { 203 atomic_t *v = (atomic_t *)l; 204 205 return atomic_inc_and_test(v); 206 } 207 208 static inline int atomic_long_add_negative(long i, atomic_long_t *l) 209 { 210 atomic_t *v = (atomic_t *)l; 211 212 return atomic_add_negative(i, v); 213 } 214 215 static inline long atomic_long_add_return(long i, atomic_long_t *l) 216 { 217 atomic_t *v = (atomic_t *)l; 218 219 return (long)atomic_add_return(i, v); 220 } 221 222 static inline long atomic_long_sub_return(long i, atomic_long_t *l) 223 { 224 atomic_t *v = (atomic_t *)l; 225 226 return (long)atomic_sub_return(i, v); 227 } 228 229 static inline long atomic_long_inc_return(atomic_long_t *l) 230 { 231 atomic_t *v = (atomic_t *)l; 232 233 return (long)atomic_inc_return(v); 234 } 235 236 static inline long atomic_long_dec_return(atomic_long_t *l) 237 { 238 atomic_t *v = (atomic_t *)l; 239 240 return (long)atomic_dec_return(v); 241 } 242 243 static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u) 244 { 245 atomic_t *v = (atomic_t *)l; 246 247 return (long)atomic_add_unless(v, a, u); 248 } 249 250 #define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l)) 251 252 #define atomic_long_cmpxchg(l, old, new) \ 253 (atomic_cmpxchg((atomic_t *)(l), (old), (new))) 254 #define atomic_long_xchg(v, new) \ 255 (atomic_xchg((atomic_t *)(v), (new))) 256 #endif /* __UBOOT__ */ 257 258 #endif /* BITS_PER_LONG == 64 */ 259 260 #endif /* _ASM_GENERIC_ATOMIC_LONG_H */ 261