xref: /openbmc/linux/include/asm-generic/atomic.h (revision 55b37d9c)
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * Generic C implementation of atomic counter operations. Do not include in
4  * machine independent code.
5  *
6  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
7  * Written by David Howells (dhowells@redhat.com)
8  */
9 #ifndef __ASM_GENERIC_ATOMIC_H
10 #define __ASM_GENERIC_ATOMIC_H
11 
12 #include <asm/cmpxchg.h>
13 #include <asm/barrier.h>
14 
15 #ifdef CONFIG_SMP
16 
17 /* we can build all atomic primitives from cmpxchg */
18 
19 #define ATOMIC_OP(op, c_op)						\
20 static inline void generic_atomic_##op(int i, atomic_t *v)		\
21 {									\
22 	int c, old;							\
23 									\
24 	c = v->counter;							\
25 	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
26 		c = old;						\
27 }
28 
29 #define ATOMIC_OP_RETURN(op, c_op)					\
30 static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
31 {									\
32 	int c, old;							\
33 									\
34 	c = v->counter;							\
35 	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
36 		c = old;						\
37 									\
38 	return c c_op i;						\
39 }
40 
41 #define ATOMIC_FETCH_OP(op, c_op)					\
42 static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
43 {									\
44 	int c, old;							\
45 									\
46 	c = v->counter;							\
47 	while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c)	\
48 		c = old;						\
49 									\
50 	return c;							\
51 }
52 
53 #else
54 
55 #include <linux/irqflags.h>
56 
57 #define ATOMIC_OP(op, c_op)						\
58 static inline void generic_atomic_##op(int i, atomic_t *v)		\
59 {									\
60 	unsigned long flags;						\
61 									\
62 	raw_local_irq_save(flags);					\
63 	v->counter = v->counter c_op i;					\
64 	raw_local_irq_restore(flags);					\
65 }
66 
67 #define ATOMIC_OP_RETURN(op, c_op)					\
68 static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
69 {									\
70 	unsigned long flags;						\
71 	int ret;							\
72 									\
73 	raw_local_irq_save(flags);					\
74 	ret = (v->counter = v->counter c_op i);				\
75 	raw_local_irq_restore(flags);					\
76 									\
77 	return ret;							\
78 }
79 
80 #define ATOMIC_FETCH_OP(op, c_op)					\
81 static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
82 {									\
83 	unsigned long flags;						\
84 	int ret;							\
85 									\
86 	raw_local_irq_save(flags);					\
87 	ret = v->counter;						\
88 	v->counter = v->counter c_op i;					\
89 	raw_local_irq_restore(flags);					\
90 									\
91 	return ret;							\
92 }
93 
94 #endif /* CONFIG_SMP */
95 
96 ATOMIC_OP_RETURN(add, +)
97 ATOMIC_OP_RETURN(sub, -)
98 
99 ATOMIC_FETCH_OP(add, +)
100 ATOMIC_FETCH_OP(sub, -)
101 ATOMIC_FETCH_OP(and, &)
102 ATOMIC_FETCH_OP(or, |)
103 ATOMIC_FETCH_OP(xor, ^)
104 
105 ATOMIC_OP(add, +)
106 ATOMIC_OP(sub, -)
107 ATOMIC_OP(and, &)
108 ATOMIC_OP(or, |)
109 ATOMIC_OP(xor, ^)
110 
111 #undef ATOMIC_FETCH_OP
112 #undef ATOMIC_OP_RETURN
113 #undef ATOMIC_OP
114 
115 #define arch_atomic_add_return			generic_atomic_add_return
116 #define arch_atomic_sub_return			generic_atomic_sub_return
117 
118 #define arch_atomic_fetch_add			generic_atomic_fetch_add
119 #define arch_atomic_fetch_sub			generic_atomic_fetch_sub
120 #define arch_atomic_fetch_and			generic_atomic_fetch_and
121 #define arch_atomic_fetch_or			generic_atomic_fetch_or
122 #define arch_atomic_fetch_xor			generic_atomic_fetch_xor
123 
124 #define arch_atomic_add				generic_atomic_add
125 #define arch_atomic_sub				generic_atomic_sub
126 #define arch_atomic_and				generic_atomic_and
127 #define arch_atomic_or				generic_atomic_or
128 #define arch_atomic_xor				generic_atomic_xor
129 
130 #define arch_atomic_read(v)			READ_ONCE((v)->counter)
131 #define arch_atomic_set(v, i)			WRITE_ONCE(((v)->counter), (i))
132 
133 #define arch_atomic_xchg(ptr, v)		(arch_xchg(&(ptr)->counter, (u32)(v)))
134 #define arch_atomic_cmpxchg(v, old, new)	(arch_cmpxchg(&((v)->counter), (u32)(old), (u32)(new)))
135 
136 #endif /* __ASM_GENERIC_ATOMIC_H */
137