xref: /openbmc/linux/arch/sparc/include/asm/atomic_64.h (revision e290ed81)
1 /* atomic.h: Thankfully the V9 is at least reasonable for this
2  *           stuff.
3  *
4  * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
5  */
6 
7 #ifndef __ARCH_SPARC64_ATOMIC__
8 #define __ARCH_SPARC64_ATOMIC__
9 
10 #include <linux/types.h>
11 #include <asm/system.h>
12 
13 #define ATOMIC_INIT(i)		{ (i) }
14 #define ATOMIC64_INIT(i)	{ (i) }
15 
16 #define atomic_read(v)		(*(volatile int *)&(v)->counter)
17 #define atomic64_read(v)	(*(volatile long *)&(v)->counter)
18 
19 #define atomic_set(v, i)	(((v)->counter) = i)
20 #define atomic64_set(v, i)	(((v)->counter) = i)
21 
22 extern void atomic_add(int, atomic_t *);
23 extern void atomic64_add(long, atomic64_t *);
24 extern void atomic_sub(int, atomic_t *);
25 extern void atomic64_sub(long, atomic64_t *);
26 
27 extern int atomic_add_ret(int, atomic_t *);
28 extern long atomic64_add_ret(long, atomic64_t *);
29 extern int atomic_sub_ret(int, atomic_t *);
30 extern long atomic64_sub_ret(long, atomic64_t *);
31 
32 #define atomic_dec_return(v) atomic_sub_ret(1, v)
33 #define atomic64_dec_return(v) atomic64_sub_ret(1, v)
34 
35 #define atomic_inc_return(v) atomic_add_ret(1, v)
36 #define atomic64_inc_return(v) atomic64_add_ret(1, v)
37 
38 #define atomic_sub_return(i, v) atomic_sub_ret(i, v)
39 #define atomic64_sub_return(i, v) atomic64_sub_ret(i, v)
40 
41 #define atomic_add_return(i, v) atomic_add_ret(i, v)
42 #define atomic64_add_return(i, v) atomic64_add_ret(i, v)
43 
44 /*
45  * atomic_inc_and_test - increment and test
46  * @v: pointer of type atomic_t
47  *
48  * Atomically increments @v by 1
49  * and returns true if the result is zero, or false for all
50  * other cases.
51  */
52 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
53 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
54 
55 #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0)
56 #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0)
57 
58 #define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0)
59 #define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0)
60 
61 #define atomic_inc(v) atomic_add(1, v)
62 #define atomic64_inc(v) atomic64_add(1, v)
63 
64 #define atomic_dec(v) atomic_sub(1, v)
65 #define atomic64_dec(v) atomic64_sub(1, v)
66 
67 #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0)
68 #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0)
69 
70 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
71 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
72 
73 static inline int __atomic_add_unless(atomic_t *v, int a, int u)
74 {
75 	int c, old;
76 	c = atomic_read(v);
77 	for (;;) {
78 		if (unlikely(c == (u)))
79 			break;
80 		old = atomic_cmpxchg((v), c, c + (a));
81 		if (likely(old == c))
82 			break;
83 		c = old;
84 	}
85 	return c;
86 }
87 
88 
89 #define atomic64_cmpxchg(v, o, n) \
90 	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
91 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
92 
93 static inline long atomic64_add_unless(atomic64_t *v, long a, long u)
94 {
95 	long c, old;
96 	c = atomic64_read(v);
97 	for (;;) {
98 		if (unlikely(c == (u)))
99 			break;
100 		old = atomic64_cmpxchg((v), c, c + (a));
101 		if (likely(old == c))
102 			break;
103 		c = old;
104 	}
105 	return c != (u);
106 }
107 
108 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
109 
110 /* Atomic operations are already serializing */
111 #define smp_mb__before_atomic_dec()	barrier()
112 #define smp_mb__after_atomic_dec()	barrier()
113 #define smp_mb__before_atomic_inc()	barrier()
114 #define smp_mb__after_atomic_inc()	barrier()
115 
116 #endif /* !(__ARCH_SPARC64_ATOMIC__) */
117