1 #ifndef _ASM_X86_ATOMIC_H 2 #define _ASM_X86_ATOMIC_H 3 4 #include <linux/compiler.h> 5 #include <linux/types.h> 6 #include <asm/alternative.h> 7 #include <asm/cmpxchg.h> 8 #include <asm/rmwcc.h> 9 #include <asm/barrier.h> 10 11 /* 12 * Atomic operations that C can't guarantee us. Useful for 13 * resource counting etc.. 14 */ 15 16 #define ATOMIC_INIT(i) { (i) } 17 18 /** 19 * atomic_read - read atomic variable 20 * @v: pointer of type atomic_t 21 * 22 * Atomically reads the value of @v. 23 */ 24 static __always_inline int atomic_read(const atomic_t *v) 25 { 26 return READ_ONCE((v)->counter); 27 } 28 29 /** 30 * atomic_set - set atomic variable 31 * @v: pointer of type atomic_t 32 * @i: required value 33 * 34 * Atomically sets the value of @v to @i. 35 */ 36 static __always_inline void atomic_set(atomic_t *v, int i) 37 { 38 WRITE_ONCE(v->counter, i); 39 } 40 41 /** 42 * atomic_add - add integer to atomic variable 43 * @i: integer value to add 44 * @v: pointer of type atomic_t 45 * 46 * Atomically adds @i to @v. 47 */ 48 static __always_inline void atomic_add(int i, atomic_t *v) 49 { 50 asm volatile(LOCK_PREFIX "addl %1,%0" 51 : "+m" (v->counter) 52 : "ir" (i)); 53 } 54 55 /** 56 * atomic_sub - subtract integer from atomic variable 57 * @i: integer value to subtract 58 * @v: pointer of type atomic_t 59 * 60 * Atomically subtracts @i from @v. 61 */ 62 static __always_inline void atomic_sub(int i, atomic_t *v) 63 { 64 asm volatile(LOCK_PREFIX "subl %1,%0" 65 : "+m" (v->counter) 66 : "ir" (i)); 67 } 68 69 /** 70 * atomic_sub_and_test - subtract value from variable and test result 71 * @i: integer value to subtract 72 * @v: pointer of type atomic_t 73 * 74 * Atomically subtracts @i from @v and returns 75 * true if the result is zero, or false for all 76 * other cases. 77 */ 78 static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) 79 { 80 GEN_BINARY_RMWcc(LOCK_PREFIX "subl", v->counter, "er", i, "%0", e); 81 } 82 83 /** 84 * atomic_inc - increment atomic variable 85 * @v: pointer of type atomic_t 86 * 87 * Atomically increments @v by 1. 88 */ 89 static __always_inline void atomic_inc(atomic_t *v) 90 { 91 asm volatile(LOCK_PREFIX "incl %0" 92 : "+m" (v->counter)); 93 } 94 95 /** 96 * atomic_dec - decrement atomic variable 97 * @v: pointer of type atomic_t 98 * 99 * Atomically decrements @v by 1. 100 */ 101 static __always_inline void atomic_dec(atomic_t *v) 102 { 103 asm volatile(LOCK_PREFIX "decl %0" 104 : "+m" (v->counter)); 105 } 106 107 /** 108 * atomic_dec_and_test - decrement and test 109 * @v: pointer of type atomic_t 110 * 111 * Atomically decrements @v by 1 and 112 * returns true if the result is 0, or false for all other 113 * cases. 114 */ 115 static __always_inline bool atomic_dec_and_test(atomic_t *v) 116 { 117 GEN_UNARY_RMWcc(LOCK_PREFIX "decl", v->counter, "%0", e); 118 } 119 120 /** 121 * atomic_inc_and_test - increment and test 122 * @v: pointer of type atomic_t 123 * 124 * Atomically increments @v by 1 125 * and returns true if the result is zero, or false for all 126 * other cases. 127 */ 128 static __always_inline bool atomic_inc_and_test(atomic_t *v) 129 { 130 GEN_UNARY_RMWcc(LOCK_PREFIX "incl", v->counter, "%0", e); 131 } 132 133 /** 134 * atomic_add_negative - add and test if negative 135 * @i: integer value to add 136 * @v: pointer of type atomic_t 137 * 138 * Atomically adds @i to @v and returns true 139 * if the result is negative, or false when 140 * result is greater than or equal to zero. 141 */ 142 static __always_inline bool atomic_add_negative(int i, atomic_t *v) 143 { 144 GEN_BINARY_RMWcc(LOCK_PREFIX "addl", v->counter, "er", i, "%0", s); 145 } 146 147 /** 148 * atomic_add_return - add integer and return 149 * @i: integer value to add 150 * @v: pointer of type atomic_t 151 * 152 * Atomically adds @i to @v and returns @i + @v 153 */ 154 static __always_inline int atomic_add_return(int i, atomic_t *v) 155 { 156 return i + xadd(&v->counter, i); 157 } 158 159 /** 160 * atomic_sub_return - subtract integer and return 161 * @v: pointer of type atomic_t 162 * @i: integer value to subtract 163 * 164 * Atomically subtracts @i from @v and returns @v - @i 165 */ 166 static __always_inline int atomic_sub_return(int i, atomic_t *v) 167 { 168 return atomic_add_return(-i, v); 169 } 170 171 #define atomic_inc_return(v) (atomic_add_return(1, v)) 172 #define atomic_dec_return(v) (atomic_sub_return(1, v)) 173 174 static __always_inline int atomic_fetch_add(int i, atomic_t *v) 175 { 176 return xadd(&v->counter, i); 177 } 178 179 static __always_inline int atomic_fetch_sub(int i, atomic_t *v) 180 { 181 return xadd(&v->counter, -i); 182 } 183 184 static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) 185 { 186 return cmpxchg(&v->counter, old, new); 187 } 188 189 static inline int atomic_xchg(atomic_t *v, int new) 190 { 191 return xchg(&v->counter, new); 192 } 193 194 #define ATOMIC_OP(op) \ 195 static inline void atomic_##op(int i, atomic_t *v) \ 196 { \ 197 asm volatile(LOCK_PREFIX #op"l %1,%0" \ 198 : "+m" (v->counter) \ 199 : "ir" (i) \ 200 : "memory"); \ 201 } 202 203 #define ATOMIC_FETCH_OP(op, c_op) \ 204 static inline int atomic_fetch_##op(int i, atomic_t *v) \ 205 { \ 206 int old, val = atomic_read(v); \ 207 for (;;) { \ 208 old = atomic_cmpxchg(v, val, val c_op i); \ 209 if (old == val) \ 210 break; \ 211 val = old; \ 212 } \ 213 return old; \ 214 } 215 216 #define ATOMIC_OPS(op, c_op) \ 217 ATOMIC_OP(op) \ 218 ATOMIC_FETCH_OP(op, c_op) 219 220 ATOMIC_OPS(and, &) 221 ATOMIC_OPS(or , |) 222 ATOMIC_OPS(xor, ^) 223 224 #undef ATOMIC_OPS 225 #undef ATOMIC_FETCH_OP 226 #undef ATOMIC_OP 227 228 /** 229 * __atomic_add_unless - add unless the number is already a given value 230 * @v: pointer of type atomic_t 231 * @a: the amount to add to v... 232 * @u: ...unless v is equal to u. 233 * 234 * Atomically adds @a to @v, so long as @v was not already @u. 235 * Returns the old value of @v. 236 */ 237 static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) 238 { 239 int c, old; 240 c = atomic_read(v); 241 for (;;) { 242 if (unlikely(c == (u))) 243 break; 244 old = atomic_cmpxchg((v), c, c + (a)); 245 if (likely(old == c)) 246 break; 247 c = old; 248 } 249 return c; 250 } 251 252 /** 253 * atomic_inc_short - increment of a short integer 254 * @v: pointer to type int 255 * 256 * Atomically adds 1 to @v 257 * Returns the new value of @u 258 */ 259 static __always_inline short int atomic_inc_short(short int *v) 260 { 261 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v)); 262 return *v; 263 } 264 265 #ifdef CONFIG_X86_32 266 # include <asm/atomic64_32.h> 267 #else 268 # include <asm/atomic64_64.h> 269 #endif 270 271 #endif /* _ASM_X86_ATOMIC_H */ 272