1 /* 2 * Generic implementation of 64-bit atomics using spinlocks, 3 * useful on processors that don't have 64-bit atomic instructions. 4 * 5 * Copyright © 2009 Paul Mackerras, IBM Corp. <paulus@au1.ibm.com> 6 * 7 * This program is free software; you can redistribute it and/or 8 * modify it under the terms of the GNU General Public License 9 * as published by the Free Software Foundation; either version 10 * 2 of the License, or (at your option) any later version. 11 */ 12 #ifndef _ASM_GENERIC_ATOMIC64_H 13 #define _ASM_GENERIC_ATOMIC64_H 14 15 typedef struct { 16 long long counter; 17 } atomic64_t; 18 19 #define ATOMIC64_INIT(i) { (i) } 20 21 extern long long atomic64_read(const atomic64_t *v); 22 extern void atomic64_set(atomic64_t *v, long long i); 23 24 #define ATOMIC64_OP(op) \ 25 extern void atomic64_##op(long long a, atomic64_t *v); 26 27 #define ATOMIC64_OP_RETURN(op) \ 28 extern long long atomic64_##op##_return(long long a, atomic64_t *v); 29 30 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op) 31 32 ATOMIC64_OPS(add) 33 ATOMIC64_OPS(sub) 34 35 ATOMIC64_OP(and) 36 ATOMIC64_OP(or) 37 ATOMIC64_OP(xor) 38 39 #undef ATOMIC64_OPS 40 #undef ATOMIC64_OP_RETURN 41 #undef ATOMIC64_OP 42 43 extern long long atomic64_dec_if_positive(atomic64_t *v); 44 extern long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n); 45 extern long long atomic64_xchg(atomic64_t *v, long long new); 46 extern int atomic64_add_unless(atomic64_t *v, long long a, long long u); 47 48 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) 49 #define atomic64_inc(v) atomic64_add(1LL, (v)) 50 #define atomic64_inc_return(v) atomic64_add_return(1LL, (v)) 51 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) 52 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) 53 #define atomic64_dec(v) atomic64_sub(1LL, (v)) 54 #define atomic64_dec_return(v) atomic64_sub_return(1LL, (v)) 55 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) 56 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL) 57 58 #endif /* _ASM_GENERIC_ATOMIC64_H */ 59