xref: /openbmc/linux/include/asm-generic/atomic.h (revision f8b6455a9d381fc513efbec0be0c312b96e6eb6b)
1b4d0d230SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-or-later */
23f7e212dSArnd Bergmann /*
32609a195SMark Rutland  * Generic C implementation of atomic counter operations. Do not include in
42609a195SMark Rutland  * machine independent code.
5acac43e2SArun Sharma  *
63f7e212dSArnd Bergmann  * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
73f7e212dSArnd Bergmann  * Written by David Howells (dhowells@redhat.com)
83f7e212dSArnd Bergmann  */
93f7e212dSArnd Bergmann #ifndef __ASM_GENERIC_ATOMIC_H
103f7e212dSArnd Bergmann #define __ASM_GENERIC_ATOMIC_H
113f7e212dSArnd Bergmann 
1234484277SDavid Howells #include <asm/cmpxchg.h>
13febdbfe8SPeter Zijlstra #include <asm/barrier.h>
1434484277SDavid Howells 
15*f8b6455aSMark Rutland #ifdef CONFIG_ARCH_ATOMIC
16*f8b6455aSMark Rutland #define __ga_cmpxchg	arch_cmpxchg
17*f8b6455aSMark Rutland #define __ga_xchg	arch_xchg
18*f8b6455aSMark Rutland #else
19*f8b6455aSMark Rutland #define __ga_cmpxchg	cmpxchg
20*f8b6455aSMark Rutland #define __ga_xchg	xchg
21*f8b6455aSMark Rutland #endif
22*f8b6455aSMark Rutland 
233f7e212dSArnd Bergmann #ifdef CONFIG_SMP
24560cb12aSPeter Zijlstra 
25560cb12aSPeter Zijlstra /* we can build all atomic primitives from cmpxchg */
26560cb12aSPeter Zijlstra 
27560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
28*f8b6455aSMark Rutland static inline void generic_atomic_##op(int i, atomic_t *v)		\
29560cb12aSPeter Zijlstra {									\
30560cb12aSPeter Zijlstra 	int c, old;							\
31560cb12aSPeter Zijlstra 									\
32560cb12aSPeter Zijlstra 	c = v->counter;							\
33*f8b6455aSMark Rutland 	while ((old = __ga_cmpxchg(&v->counter, c, c c_op i)) != c)	\
34560cb12aSPeter Zijlstra 		c = old;						\
35560cb12aSPeter Zijlstra }
36560cb12aSPeter Zijlstra 
37560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
38*f8b6455aSMark Rutland static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
39560cb12aSPeter Zijlstra {									\
40560cb12aSPeter Zijlstra 	int c, old;							\
41560cb12aSPeter Zijlstra 									\
42560cb12aSPeter Zijlstra 	c = v->counter;							\
43*f8b6455aSMark Rutland 	while ((old = __ga_cmpxchg(&v->counter, c, c c_op i)) != c)	\
44560cb12aSPeter Zijlstra 		c = old;						\
45560cb12aSPeter Zijlstra 									\
46560cb12aSPeter Zijlstra 	return c c_op i;						\
47560cb12aSPeter Zijlstra }
48560cb12aSPeter Zijlstra 
4928aa2bdaSPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op)					\
50*f8b6455aSMark Rutland static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
5128aa2bdaSPeter Zijlstra {									\
5228aa2bdaSPeter Zijlstra 	int c, old;							\
5328aa2bdaSPeter Zijlstra 									\
5428aa2bdaSPeter Zijlstra 	c = v->counter;							\
55*f8b6455aSMark Rutland 	while ((old = __ga_cmpxchg(&v->counter, c, c c_op i)) != c)	\
5628aa2bdaSPeter Zijlstra 		c = old;						\
5728aa2bdaSPeter Zijlstra 									\
5828aa2bdaSPeter Zijlstra 	return c;							\
5928aa2bdaSPeter Zijlstra }
6028aa2bdaSPeter Zijlstra 
61560cb12aSPeter Zijlstra #else
62560cb12aSPeter Zijlstra 
63560cb12aSPeter Zijlstra #include <linux/irqflags.h>
64560cb12aSPeter Zijlstra 
65560cb12aSPeter Zijlstra #define ATOMIC_OP(op, c_op)						\
66*f8b6455aSMark Rutland static inline void generic_atomic_##op(int i, atomic_t *v)		\
67560cb12aSPeter Zijlstra {									\
68560cb12aSPeter Zijlstra 	unsigned long flags;						\
69560cb12aSPeter Zijlstra 									\
70560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
71560cb12aSPeter Zijlstra 	v->counter = v->counter c_op i;					\
72560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
73560cb12aSPeter Zijlstra }
74560cb12aSPeter Zijlstra 
75560cb12aSPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op)					\
76*f8b6455aSMark Rutland static inline int generic_atomic_##op##_return(int i, atomic_t *v)	\
77560cb12aSPeter Zijlstra {									\
78560cb12aSPeter Zijlstra 	unsigned long flags;						\
79560cb12aSPeter Zijlstra 	int ret;							\
80560cb12aSPeter Zijlstra 									\
81560cb12aSPeter Zijlstra 	raw_local_irq_save(flags);					\
82560cb12aSPeter Zijlstra 	ret = (v->counter = v->counter c_op i);				\
83560cb12aSPeter Zijlstra 	raw_local_irq_restore(flags);					\
84560cb12aSPeter Zijlstra 									\
85560cb12aSPeter Zijlstra 	return ret;							\
86560cb12aSPeter Zijlstra }
87560cb12aSPeter Zijlstra 
8828aa2bdaSPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op)					\
89*f8b6455aSMark Rutland static inline int generic_atomic_fetch_##op(int i, atomic_t *v)		\
9028aa2bdaSPeter Zijlstra {									\
9128aa2bdaSPeter Zijlstra 	unsigned long flags;						\
9228aa2bdaSPeter Zijlstra 	int ret;							\
9328aa2bdaSPeter Zijlstra 									\
9428aa2bdaSPeter Zijlstra 	raw_local_irq_save(flags);					\
9528aa2bdaSPeter Zijlstra 	ret = v->counter;						\
9628aa2bdaSPeter Zijlstra 	v->counter = v->counter c_op i;					\
9728aa2bdaSPeter Zijlstra 	raw_local_irq_restore(flags);					\
9828aa2bdaSPeter Zijlstra 									\
9928aa2bdaSPeter Zijlstra 	return ret;							\
10028aa2bdaSPeter Zijlstra }
10128aa2bdaSPeter Zijlstra 
102560cb12aSPeter Zijlstra #endif /* CONFIG_SMP */
103560cb12aSPeter Zijlstra 
104560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(add, +)
105560cb12aSPeter Zijlstra ATOMIC_OP_RETURN(sub, -)
1063f7e212dSArnd Bergmann 
10728aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(add, +)
10828aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(sub, -)
10928aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(and, &)
11028aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(or, |)
11128aa2bdaSPeter Zijlstra ATOMIC_FETCH_OP(xor, ^)
11228aa2bdaSPeter Zijlstra 
113d0e03218SMark Rutland ATOMIC_OP(add, +)
114d0e03218SMark Rutland ATOMIC_OP(sub, -)
115560cb12aSPeter Zijlstra ATOMIC_OP(and, &)
116560cb12aSPeter Zijlstra ATOMIC_OP(or, |)
117e6942b7dSPeter Zijlstra ATOMIC_OP(xor, ^)
118560cb12aSPeter Zijlstra 
11928aa2bdaSPeter Zijlstra #undef ATOMIC_FETCH_OP
120560cb12aSPeter Zijlstra #undef ATOMIC_OP_RETURN
121560cb12aSPeter Zijlstra #undef ATOMIC_OP
122560cb12aSPeter Zijlstra 
123*f8b6455aSMark Rutland #undef __ga_cmpxchg
124*f8b6455aSMark Rutland #undef __ga_xchg
125*f8b6455aSMark Rutland 
126*f8b6455aSMark Rutland #ifdef CONFIG_ARCH_ATOMIC
127*f8b6455aSMark Rutland 
128*f8b6455aSMark Rutland #define arch_atomic_add_return			generic_atomic_add_return
129*f8b6455aSMark Rutland #define arch_atomic_sub_return			generic_atomic_sub_return
130*f8b6455aSMark Rutland 
131*f8b6455aSMark Rutland #define arch_atomic_fetch_add			generic_atomic_fetch_add
132*f8b6455aSMark Rutland #define arch_atomic_fetch_sub			generic_atomic_fetch_sub
133*f8b6455aSMark Rutland #define arch_atomic_fetch_and			generic_atomic_fetch_and
134*f8b6455aSMark Rutland #define arch_atomic_fetch_or			generic_atomic_fetch_or
135*f8b6455aSMark Rutland #define arch_atomic_fetch_xor			generic_atomic_fetch_xor
136*f8b6455aSMark Rutland 
137*f8b6455aSMark Rutland #define arch_atomic_add				generic_atomic_add
138*f8b6455aSMark Rutland #define arch_atomic_sub				generic_atomic_sub
139*f8b6455aSMark Rutland #define arch_atomic_and				generic_atomic_and
140*f8b6455aSMark Rutland #define arch_atomic_or				generic_atomic_or
141*f8b6455aSMark Rutland #define arch_atomic_xor				generic_atomic_xor
142*f8b6455aSMark Rutland 
143*f8b6455aSMark Rutland #define arch_atomic_read(v)			READ_ONCE((v)->counter)
144*f8b6455aSMark Rutland #define arch_atomic_set(v, i)			WRITE_ONCE(((v)->counter), (i))
145*f8b6455aSMark Rutland 
146*f8b6455aSMark Rutland #define arch_atomic_xchg(ptr, v)		(arch_xchg(&(ptr)->counter, (v)))
147*f8b6455aSMark Rutland #define arch_atomic_cmpxchg(v, old, new)	(arch_cmpxchg(&((v)->counter), (old), (new)))
148*f8b6455aSMark Rutland 
149*f8b6455aSMark Rutland #else /* CONFIG_ARCH_ATOMIC */
150*f8b6455aSMark Rutland 
151*f8b6455aSMark Rutland #define atomic_add_return		generic_atomic_add_return
152*f8b6455aSMark Rutland #define atomic_sub_return		generic_atomic_sub_return
153*f8b6455aSMark Rutland 
154*f8b6455aSMark Rutland #define atomic_fetch_add		generic_atomic_fetch_add
155*f8b6455aSMark Rutland #define atomic_fetch_sub		generic_atomic_fetch_sub
156*f8b6455aSMark Rutland #define atomic_fetch_and		generic_atomic_fetch_and
157*f8b6455aSMark Rutland #define atomic_fetch_or			generic_atomic_fetch_or
158*f8b6455aSMark Rutland #define atomic_fetch_xor		generic_atomic_fetch_xor
159*f8b6455aSMark Rutland 
160*f8b6455aSMark Rutland #define atomic_add			generic_atomic_add
161*f8b6455aSMark Rutland #define atomic_sub			generic_atomic_sub
162*f8b6455aSMark Rutland #define atomic_and			generic_atomic_and
163*f8b6455aSMark Rutland #define atomic_or			generic_atomic_or
164*f8b6455aSMark Rutland #define atomic_xor			generic_atomic_xor
165*f8b6455aSMark Rutland 
16662e8a325SPeter Zijlstra #define atomic_read(v)			READ_ONCE((v)->counter)
16762e8a325SPeter Zijlstra #define atomic_set(v, i)		WRITE_ONCE(((v)->counter), (i))
1683f7e212dSArnd Bergmann 
1698b9d4069SMathieu Lacage #define atomic_xchg(ptr, v)		(xchg(&(ptr)->counter, (v)))
1708b9d4069SMathieu Lacage #define atomic_cmpxchg(v, old, new)	(cmpxchg(&((v)->counter), (old), (new)))
1718b9d4069SMathieu Lacage 
172*f8b6455aSMark Rutland #endif /* CONFIG_ARCH_ATOMIC */
173*f8b6455aSMark Rutland 
1743f7e212dSArnd Bergmann #endif /* __ASM_GENERIC_ATOMIC_H */
175