1 #ifndef _ASM_X86_ATOMIC64_32_H 2 #define _ASM_X86_ATOMIC64_32_H 3 4 #include <linux/compiler.h> 5 #include <linux/types.h> 6 #include <asm/processor.h> 7 //#include <asm/cmpxchg.h> 8 9 /* An 64bit atomic type */ 10 11 typedef struct { 12 u64 __aligned(8) counter; 13 } atomic64_t; 14 15 #define ATOMIC64_INIT(val) { (val) } 16 17 #ifdef CONFIG_X86_CMPXCHG64 18 #define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8" 19 #else 20 #define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8) 21 #endif 22 23 #define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f) 24 25 /** 26 * atomic64_cmpxchg - cmpxchg atomic64 variable 27 * @p: pointer to type atomic64_t 28 * @o: expected value 29 * @n: new value 30 * 31 * Atomically sets @v to @n if it was equal to @o and returns 32 * the old value. 33 */ 34 35 static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n) 36 { 37 return cmpxchg64(&v->counter, o, n); 38 } 39 40 /** 41 * atomic64_xchg - xchg atomic64 variable 42 * @v: pointer to type atomic64_t 43 * @n: value to assign 44 * 45 * Atomically xchgs the value of @v to @n and returns 46 * the old value. 47 */ 48 static inline long long atomic64_xchg(atomic64_t *v, long long n) 49 { 50 long long o; 51 unsigned high = (unsigned)(n >> 32); 52 unsigned low = (unsigned)n; 53 asm volatile(ATOMIC64_ALTERNATIVE(xchg) 54 : "=A" (o), "+b" (low), "+c" (high) 55 : "S" (v) 56 : "memory" 57 ); 58 return o; 59 } 60 61 /** 62 * atomic64_set - set atomic64 variable 63 * @v: pointer to type atomic64_t 64 * @n: value to assign 65 * 66 * Atomically sets the value of @v to @n. 67 */ 68 static inline void atomic64_set(atomic64_t *v, long long i) 69 { 70 unsigned high = (unsigned)(i >> 32); 71 unsigned low = (unsigned)i; 72 asm volatile(ATOMIC64_ALTERNATIVE(set) 73 : "+b" (low), "+c" (high) 74 : "S" (v) 75 : "eax", "edx", "memory" 76 ); 77 } 78 79 /** 80 * atomic64_read - read atomic64 variable 81 * @v: pointer to type atomic64_t 82 * 83 * Atomically reads the value of @v and returns it. 84 */ 85 static inline long long atomic64_read(atomic64_t *v) 86 { 87 long long r; 88 asm volatile(ATOMIC64_ALTERNATIVE(read) 89 : "=A" (r), "+c" (v) 90 : : "memory" 91 ); 92 return r; 93 } 94 95 /** 96 * atomic64_add_return - add and return 97 * @i: integer value to add 98 * @v: pointer to type atomic64_t 99 * 100 * Atomically adds @i to @v and returns @i + *@v 101 */ 102 static inline long long atomic64_add_return(long long i, atomic64_t *v) 103 { 104 asm volatile(ATOMIC64_ALTERNATIVE(add_return) 105 : "+A" (i), "+c" (v) 106 : : "memory" 107 ); 108 return i; 109 } 110 111 /* 112 * Other variants with different arithmetic operators: 113 */ 114 static inline long long atomic64_sub_return(long long i, atomic64_t *v) 115 { 116 asm volatile(ATOMIC64_ALTERNATIVE(sub_return) 117 : "+A" (i), "+c" (v) 118 : : "memory" 119 ); 120 return i; 121 } 122 123 static inline long long atomic64_inc_return(atomic64_t *v) 124 { 125 long long a; 126 asm volatile(ATOMIC64_ALTERNATIVE(inc_return) 127 : "=A" (a) 128 : "S" (v) 129 : "memory", "ecx" 130 ); 131 return a; 132 } 133 134 static inline long long atomic64_dec_return(atomic64_t *v) 135 { 136 long long a; 137 asm volatile(ATOMIC64_ALTERNATIVE(dec_return) 138 : "=A" (a) 139 : "S" (v) 140 : "memory", "ecx" 141 ); 142 return a; 143 } 144 145 /** 146 * atomic64_add - add integer to atomic64 variable 147 * @i: integer value to add 148 * @v: pointer to type atomic64_t 149 * 150 * Atomically adds @i to @v. 151 */ 152 static inline long long atomic64_add(long long i, atomic64_t *v) 153 { 154 asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return) 155 : "+A" (i), "+c" (v) 156 : : "memory" 157 ); 158 return i; 159 } 160 161 /** 162 * atomic64_sub - subtract the atomic64 variable 163 * @i: integer value to subtract 164 * @v: pointer to type atomic64_t 165 * 166 * Atomically subtracts @i from @v. 167 */ 168 static inline long long atomic64_sub(long long i, atomic64_t *v) 169 { 170 asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return) 171 : "+A" (i), "+c" (v) 172 : : "memory" 173 ); 174 return i; 175 } 176 177 /** 178 * atomic64_sub_and_test - subtract value from variable and test result 179 * @i: integer value to subtract 180 * @v: pointer to type atomic64_t 181 * 182 * Atomically subtracts @i from @v and returns 183 * true if the result is zero, or false for all 184 * other cases. 185 */ 186 static inline int atomic64_sub_and_test(long long i, atomic64_t *v) 187 { 188 return atomic64_sub_return(i, v) == 0; 189 } 190 191 /** 192 * atomic64_inc - increment atomic64 variable 193 * @v: pointer to type atomic64_t 194 * 195 * Atomically increments @v by 1. 196 */ 197 static inline void atomic64_inc(atomic64_t *v) 198 { 199 asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return) 200 : : "S" (v) 201 : "memory", "eax", "ecx", "edx" 202 ); 203 } 204 205 /** 206 * atomic64_dec - decrement atomic64 variable 207 * @ptr: pointer to type atomic64_t 208 * 209 * Atomically decrements @ptr by 1. 210 */ 211 static inline void atomic64_dec(atomic64_t *v) 212 { 213 asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return) 214 : : "S" (v) 215 : "memory", "eax", "ecx", "edx" 216 ); 217 } 218 219 /** 220 * atomic64_dec_and_test - decrement and test 221 * @v: pointer to type atomic64_t 222 * 223 * Atomically decrements @v by 1 and 224 * returns true if the result is 0, or false for all other 225 * cases. 226 */ 227 static inline int atomic64_dec_and_test(atomic64_t *v) 228 { 229 return atomic64_dec_return(v) == 0; 230 } 231 232 /** 233 * atomic64_inc_and_test - increment and test 234 * @v: pointer to type atomic64_t 235 * 236 * Atomically increments @v by 1 237 * and returns true if the result is zero, or false for all 238 * other cases. 239 */ 240 static inline int atomic64_inc_and_test(atomic64_t *v) 241 { 242 return atomic64_inc_return(v) == 0; 243 } 244 245 /** 246 * atomic64_add_negative - add and test if negative 247 * @i: integer value to add 248 * @v: pointer to type atomic64_t 249 * 250 * Atomically adds @i to @v and returns true 251 * if the result is negative, or false when 252 * result is greater than or equal to zero. 253 */ 254 static inline int atomic64_add_negative(long long i, atomic64_t *v) 255 { 256 return atomic64_add_return(i, v) < 0; 257 } 258 259 /** 260 * atomic64_add_unless - add unless the number is a given value 261 * @v: pointer of type atomic64_t 262 * @a: the amount to add to v... 263 * @u: ...unless v is equal to u. 264 * 265 * Atomically adds @a to @v, so long as it was not @u. 266 * Returns the old value of @v. 267 */ 268 static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u) 269 { 270 unsigned low = (unsigned)u; 271 unsigned high = (unsigned)(u >> 32); 272 asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t" 273 : "+A" (a), "+c" (v), "+S" (low), "+D" (high) 274 : : "memory"); 275 return (int)a; 276 } 277 278 279 static inline int atomic64_inc_not_zero(atomic64_t *v) 280 { 281 int r; 282 asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero) 283 : "=a" (r) 284 : "S" (v) 285 : "ecx", "edx", "memory" 286 ); 287 return r; 288 } 289 290 static inline long long atomic64_dec_if_positive(atomic64_t *v) 291 { 292 long long r; 293 asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive) 294 : "=A" (r) 295 : "S" (v) 296 : "ecx", "memory" 297 ); 298 return r; 299 } 300 301 #undef ATOMIC64_ALTERNATIVE 302 #undef ATOMIC64_ALTERNATIVE_ 303 304 #endif /* _ASM_X86_ATOMIC64_32_H */ 305