xref: /openbmc/linux/arch/sh/include/asm/atomic.h (revision b627b4ed)
1 #ifndef __ASM_SH_ATOMIC_H
2 #define __ASM_SH_ATOMIC_H
3 
4 /*
5  * Atomic operations that C can't guarantee us.  Useful for
6  * resource counting etc..
7  *
8  */
9 
10 #include <linux/compiler.h>
11 #include <linux/types.h>
12 #include <asm/system.h>
13 
14 #define ATOMIC_INIT(i)	( (atomic_t) { (i) } )
15 
16 #define atomic_read(v)		((v)->counter)
17 #define atomic_set(v,i)		((v)->counter = (i))
18 
19 #if defined(CONFIG_GUSA_RB)
20 #include <asm/atomic-grb.h>
21 #elif defined(CONFIG_CPU_SH4A)
22 #include <asm/atomic-llsc.h>
23 #else
24 #include <asm/atomic-irq.h>
25 #endif
26 
27 #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
28 
29 #define atomic_dec_return(v) atomic_sub_return(1,(v))
30 #define atomic_inc_return(v) atomic_add_return(1,(v))
31 
32 /*
33  * atomic_inc_and_test - increment and test
34  * @v: pointer of type atomic_t
35  *
36  * Atomically increments @v by 1
37  * and returns true if the result is zero, or false for all
38  * other cases.
39  */
40 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
41 
42 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
43 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
44 
45 #define atomic_inc(v) atomic_add(1,(v))
46 #define atomic_dec(v) atomic_sub(1,(v))
47 
48 #ifndef CONFIG_GUSA_RB
49 static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
50 {
51 	int ret;
52 	unsigned long flags;
53 
54 	local_irq_save(flags);
55 	ret = v->counter;
56 	if (likely(ret == old))
57 		v->counter = new;
58 	local_irq_restore(flags);
59 
60 	return ret;
61 }
62 
63 static inline int atomic_add_unless(atomic_t *v, int a, int u)
64 {
65 	int ret;
66 	unsigned long flags;
67 
68 	local_irq_save(flags);
69 	ret = v->counter;
70 	if (ret != u)
71 		v->counter += a;
72 	local_irq_restore(flags);
73 
74 	return ret != u;
75 }
76 #endif
77 
78 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
79 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
80 
81 /* Atomic operations are already serializing on SH */
82 #define smp_mb__before_atomic_dec()	barrier()
83 #define smp_mb__after_atomic_dec()	barrier()
84 #define smp_mb__before_atomic_inc()	barrier()
85 #define smp_mb__after_atomic_inc()	barrier()
86 
87 #include <asm-generic/atomic.h>
88 #endif /* __ASM_SH_ATOMIC_H */
89