1 #ifndef _ASM_X86_ATOMIC64_64_H 2 #define _ASM_X86_ATOMIC64_64_H 3 4 #include <linux/types.h> 5 #include <asm/alternative.h> 6 #include <asm/cmpxchg.h> 7 8 /* The 64-bit atomic type */ 9 10 #define ATOMIC64_INIT(i) { (i) } 11 12 /** 13 * atomic64_read - read atomic64 variable 14 * @v: pointer of type atomic64_t 15 * 16 * Atomically reads the value of @v. 17 * Doesn't imply a read memory barrier. 18 */ 19 static inline long atomic64_read(const atomic64_t *v) 20 { 21 return READ_ONCE((v)->counter); 22 } 23 24 /** 25 * atomic64_set - set atomic64 variable 26 * @v: pointer to type atomic64_t 27 * @i: required value 28 * 29 * Atomically sets the value of @v to @i. 30 */ 31 static inline void atomic64_set(atomic64_t *v, long i) 32 { 33 WRITE_ONCE(v->counter, i); 34 } 35 36 /** 37 * atomic64_add - add integer to atomic64 variable 38 * @i: integer value to add 39 * @v: pointer to type atomic64_t 40 * 41 * Atomically adds @i to @v. 42 */ 43 static __always_inline void atomic64_add(long i, atomic64_t *v) 44 { 45 asm volatile(LOCK_PREFIX "addq %1,%0" 46 : "=m" (v->counter) 47 : "er" (i), "m" (v->counter)); 48 } 49 50 /** 51 * atomic64_sub - subtract the atomic64 variable 52 * @i: integer value to subtract 53 * @v: pointer to type atomic64_t 54 * 55 * Atomically subtracts @i from @v. 56 */ 57 static inline void atomic64_sub(long i, atomic64_t *v) 58 { 59 asm volatile(LOCK_PREFIX "subq %1,%0" 60 : "=m" (v->counter) 61 : "er" (i), "m" (v->counter)); 62 } 63 64 /** 65 * atomic64_sub_and_test - subtract value from variable and test result 66 * @i: integer value to subtract 67 * @v: pointer to type atomic64_t 68 * 69 * Atomically subtracts @i from @v and returns 70 * true if the result is zero, or false for all 71 * other cases. 72 */ 73 static inline bool atomic64_sub_and_test(long i, atomic64_t *v) 74 { 75 GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, "er", i, "%0", e); 76 } 77 78 /** 79 * atomic64_inc - increment atomic64 variable 80 * @v: pointer to type atomic64_t 81 * 82 * Atomically increments @v by 1. 83 */ 84 static __always_inline void atomic64_inc(atomic64_t *v) 85 { 86 asm volatile(LOCK_PREFIX "incq %0" 87 : "=m" (v->counter) 88 : "m" (v->counter)); 89 } 90 91 /** 92 * atomic64_dec - decrement atomic64 variable 93 * @v: pointer to type atomic64_t 94 * 95 * Atomically decrements @v by 1. 96 */ 97 static __always_inline void atomic64_dec(atomic64_t *v) 98 { 99 asm volatile(LOCK_PREFIX "decq %0" 100 : "=m" (v->counter) 101 : "m" (v->counter)); 102 } 103 104 /** 105 * atomic64_dec_and_test - decrement and test 106 * @v: pointer to type atomic64_t 107 * 108 * Atomically decrements @v by 1 and 109 * returns true if the result is 0, or false for all other 110 * cases. 111 */ 112 static inline bool atomic64_dec_and_test(atomic64_t *v) 113 { 114 GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, "%0", e); 115 } 116 117 /** 118 * atomic64_inc_and_test - increment and test 119 * @v: pointer to type atomic64_t 120 * 121 * Atomically increments @v by 1 122 * and returns true if the result is zero, or false for all 123 * other cases. 124 */ 125 static inline bool atomic64_inc_and_test(atomic64_t *v) 126 { 127 GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, "%0", e); 128 } 129 130 /** 131 * atomic64_add_negative - add and test if negative 132 * @i: integer value to add 133 * @v: pointer to type atomic64_t 134 * 135 * Atomically adds @i to @v and returns true 136 * if the result is negative, or false when 137 * result is greater than or equal to zero. 138 */ 139 static inline bool atomic64_add_negative(long i, atomic64_t *v) 140 { 141 GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, "er", i, "%0", s); 142 } 143 144 /** 145 * atomic64_add_return - add and return 146 * @i: integer value to add 147 * @v: pointer to type atomic64_t 148 * 149 * Atomically adds @i to @v and returns @i + @v 150 */ 151 static __always_inline long atomic64_add_return(long i, atomic64_t *v) 152 { 153 return i + xadd(&v->counter, i); 154 } 155 156 static inline long atomic64_sub_return(long i, atomic64_t *v) 157 { 158 return atomic64_add_return(-i, v); 159 } 160 161 static inline long atomic64_fetch_add(long i, atomic64_t *v) 162 { 163 return xadd(&v->counter, i); 164 } 165 166 static inline long atomic64_fetch_sub(long i, atomic64_t *v) 167 { 168 return xadd(&v->counter, -i); 169 } 170 171 #define atomic64_inc_return(v) (atomic64_add_return(1, (v))) 172 #define atomic64_dec_return(v) (atomic64_sub_return(1, (v))) 173 174 static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) 175 { 176 return cmpxchg(&v->counter, old, new); 177 } 178 179 #define atomic64_try_cmpxchg atomic64_try_cmpxchg 180 static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new) 181 { 182 return try_cmpxchg(&v->counter, old, new); 183 } 184 185 static inline long atomic64_xchg(atomic64_t *v, long new) 186 { 187 return xchg(&v->counter, new); 188 } 189 190 /** 191 * atomic64_add_unless - add unless the number is a given value 192 * @v: pointer of type atomic64_t 193 * @a: the amount to add to v... 194 * @u: ...unless v is equal to u. 195 * 196 * Atomically adds @a to @v, so long as it was not @u. 197 * Returns the old value of @v. 198 */ 199 static inline bool atomic64_add_unless(atomic64_t *v, long a, long u) 200 { 201 s64 c = atomic64_read(v); 202 do { 203 if (unlikely(c == u)) 204 return false; 205 } while (!atomic64_try_cmpxchg(v, &c, c + a)); 206 return true; 207 } 208 209 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 210 211 /* 212 * atomic64_dec_if_positive - decrement by 1 if old value positive 213 * @v: pointer of type atomic_t 214 * 215 * The function returns the old value of *v minus 1, even if 216 * the atomic variable, v, was not decremented. 217 */ 218 static inline long atomic64_dec_if_positive(atomic64_t *v) 219 { 220 s64 dec, c = atomic64_read(v); 221 do { 222 dec = c - 1; 223 if (unlikely(dec < 0)) 224 break; 225 } while (!atomic64_try_cmpxchg(v, &c, dec)); 226 return dec; 227 } 228 229 static inline void atomic64_and(long i, atomic64_t *v) 230 { 231 asm volatile(LOCK_PREFIX "andq %1,%0" 232 : "+m" (v->counter) 233 : "er" (i) 234 : "memory"); 235 } 236 237 static inline long atomic64_fetch_and(long i, atomic64_t *v) 238 { 239 s64 val = atomic64_read(v); 240 241 do { 242 } while (!atomic64_try_cmpxchg(v, &val, val & i)); 243 return val; 244 } 245 246 static inline void atomic64_or(long i, atomic64_t *v) 247 { 248 asm volatile(LOCK_PREFIX "orq %1,%0" 249 : "+m" (v->counter) 250 : "er" (i) 251 : "memory"); 252 } 253 254 static inline long atomic64_fetch_or(long i, atomic64_t *v) 255 { 256 s64 val = atomic64_read(v); 257 258 do { 259 } while (!atomic64_try_cmpxchg(v, &val, val | i)); 260 return val; 261 } 262 263 static inline void atomic64_xor(long i, atomic64_t *v) 264 { 265 asm volatile(LOCK_PREFIX "xorq %1,%0" 266 : "+m" (v->counter) 267 : "er" (i) 268 : "memory"); 269 } 270 271 static inline long atomic64_fetch_xor(long i, atomic64_t *v) 272 { 273 s64 val = atomic64_read(v); 274 275 do { 276 } while (!atomic64_try_cmpxchg(v, &val, val ^ i)); 277 return val; 278 } 279 280 #endif /* _ASM_X86_ATOMIC64_64_H */ 281