xref: /openbmc/linux/arch/arm/include/asm/atomic.h (revision 6da068c1beba684b2a0dbf43a07b0529edd9e959)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/atomic.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996 Russell King.
54baa9922SRussell King  *  Copyright (C) 2002 Deep Blue Solutions Ltd.
64baa9922SRussell King  *
74baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
84baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
94baa9922SRussell King  * published by the Free Software Foundation.
104baa9922SRussell King  */
114baa9922SRussell King #ifndef __ASM_ARM_ATOMIC_H
124baa9922SRussell King #define __ASM_ARM_ATOMIC_H
134baa9922SRussell King 
144baa9922SRussell King #include <linux/compiler.h>
15f38d999cSWill Deacon #include <linux/prefetch.h>
16ea435467SMatthew Wilcox #include <linux/types.h>
179f97da78SDavid Howells #include <linux/irqflags.h>
189f97da78SDavid Howells #include <asm/barrier.h>
199f97da78SDavid Howells #include <asm/cmpxchg.h>
204baa9922SRussell King 
214baa9922SRussell King #define ATOMIC_INIT(i)	{ (i) }
224baa9922SRussell King 
234baa9922SRussell King #ifdef __KERNEL__
244baa9922SRussell King 
25200b812dSCatalin Marinas /*
26200b812dSCatalin Marinas  * On ARM, ordinary assignment (str instruction) doesn't clear the local
27200b812dSCatalin Marinas  * strex/ldrex monitor on some implementations. The reason we can use it for
28200b812dSCatalin Marinas  * atomic_set() is the clrex or dummy strex done on every exception return.
29200b812dSCatalin Marinas  */
3062e8a325SPeter Zijlstra #define atomic_read(v)	READ_ONCE((v)->counter)
3162e8a325SPeter Zijlstra #define atomic_set(v,i)	WRITE_ONCE(((v)->counter), (i))
324baa9922SRussell King 
334baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
344baa9922SRussell King 
354baa9922SRussell King /*
364baa9922SRussell King  * ARMv6 UP and SMP safe atomic ops.  We use load exclusive and
374baa9922SRussell King  * store exclusive to ensure that these are atomic.  We may loop
38200b812dSCatalin Marinas  * to ensure that the update happens.
394baa9922SRussell King  */
40bac4e960SRussell King 
41aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op)					\
42aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
43aee9a554SPeter Zijlstra {									\
44aee9a554SPeter Zijlstra 	unsigned long tmp;						\
45aee9a554SPeter Zijlstra 	int result;							\
46aee9a554SPeter Zijlstra 									\
47aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
48aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic_" #op "\n"			\
49aee9a554SPeter Zijlstra "1:	ldrex	%0, [%3]\n"						\
50aee9a554SPeter Zijlstra "	" #asm_op "	%0, %0, %4\n"					\
51aee9a554SPeter Zijlstra "	strex	%1, %0, [%3]\n"						\
52aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
53aee9a554SPeter Zijlstra "	bne	1b"							\
54aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
55aee9a554SPeter Zijlstra 	: "r" (&v->counter), "Ir" (i)					\
56aee9a554SPeter Zijlstra 	: "cc");							\
57aee9a554SPeter Zijlstra }									\
58bac4e960SRussell King 
59aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op)				\
600ca326deSWill Deacon static inline int atomic_##op##_return_relaxed(int i, atomic_t *v)	\
61aee9a554SPeter Zijlstra {									\
62aee9a554SPeter Zijlstra 	unsigned long tmp;						\
63aee9a554SPeter Zijlstra 	int result;							\
64aee9a554SPeter Zijlstra 									\
65aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
66aee9a554SPeter Zijlstra 									\
67aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic_" #op "_return\n"		\
68aee9a554SPeter Zijlstra "1:	ldrex	%0, [%3]\n"						\
69aee9a554SPeter Zijlstra "	" #asm_op "	%0, %0, %4\n"					\
70aee9a554SPeter Zijlstra "	strex	%1, %0, [%3]\n"						\
71aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
72aee9a554SPeter Zijlstra "	bne	1b"							\
73aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
74aee9a554SPeter Zijlstra 	: "r" (&v->counter), "Ir" (i)					\
75aee9a554SPeter Zijlstra 	: "cc");							\
76aee9a554SPeter Zijlstra 									\
77aee9a554SPeter Zijlstra 	return result;							\
784baa9922SRussell King }
794baa9922SRussell King 
80*6da068c1SPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op, asm_op)				\
81*6da068c1SPeter Zijlstra static inline int atomic_fetch_##op##_relaxed(int i, atomic_t *v)	\
82*6da068c1SPeter Zijlstra {									\
83*6da068c1SPeter Zijlstra 	unsigned long tmp;						\
84*6da068c1SPeter Zijlstra 	int result, val;						\
85*6da068c1SPeter Zijlstra 									\
86*6da068c1SPeter Zijlstra 	prefetchw(&v->counter);						\
87*6da068c1SPeter Zijlstra 									\
88*6da068c1SPeter Zijlstra 	__asm__ __volatile__("@ atomic_fetch_" #op "\n"			\
89*6da068c1SPeter Zijlstra "1:	ldrex	%0, [%4]\n"						\
90*6da068c1SPeter Zijlstra "	" #asm_op "	%1, %0, %5\n"					\
91*6da068c1SPeter Zijlstra "	strex	%2, %1, [%4]\n"						\
92*6da068c1SPeter Zijlstra "	teq	%2, #0\n"						\
93*6da068c1SPeter Zijlstra "	bne	1b"							\
94*6da068c1SPeter Zijlstra 	: "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter)	\
95*6da068c1SPeter Zijlstra 	: "r" (&v->counter), "Ir" (i)					\
96*6da068c1SPeter Zijlstra 	: "cc");							\
97*6da068c1SPeter Zijlstra 									\
98*6da068c1SPeter Zijlstra 	return result;							\
99*6da068c1SPeter Zijlstra }
100*6da068c1SPeter Zijlstra 
1010ca326deSWill Deacon #define atomic_add_return_relaxed	atomic_add_return_relaxed
1020ca326deSWill Deacon #define atomic_sub_return_relaxed	atomic_sub_return_relaxed
103*6da068c1SPeter Zijlstra #define atomic_fetch_add_relaxed	atomic_fetch_add_relaxed
104*6da068c1SPeter Zijlstra #define atomic_fetch_sub_relaxed	atomic_fetch_sub_relaxed
105*6da068c1SPeter Zijlstra 
106*6da068c1SPeter Zijlstra #define atomic_fetch_and_relaxed	atomic_fetch_and_relaxed
107*6da068c1SPeter Zijlstra #define atomic_fetch_andnot_relaxed	atomic_fetch_andnot_relaxed
108*6da068c1SPeter Zijlstra #define atomic_fetch_or_relaxed		atomic_fetch_or_relaxed
109*6da068c1SPeter Zijlstra #define atomic_fetch_xor_relaxed	atomic_fetch_xor_relaxed
1100ca326deSWill Deacon 
1110ca326deSWill Deacon static inline int atomic_cmpxchg_relaxed(atomic_t *ptr, int old, int new)
1124baa9922SRussell King {
1134dcc1cf7SChen Gang 	int oldval;
1144dcc1cf7SChen Gang 	unsigned long res;
1154baa9922SRussell King 
116c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
117bac4e960SRussell King 
1184baa9922SRussell King 	do {
1194baa9922SRussell King 		__asm__ __volatile__("@ atomic_cmpxchg\n"
120398aa668SWill Deacon 		"ldrex	%1, [%3]\n"
1214baa9922SRussell King 		"mov	%0, #0\n"
122398aa668SWill Deacon 		"teq	%1, %4\n"
123398aa668SWill Deacon 		"strexeq %0, %5, [%3]\n"
124398aa668SWill Deacon 		    : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter)
1254baa9922SRussell King 		    : "r" (&ptr->counter), "Ir" (old), "r" (new)
1264baa9922SRussell King 		    : "cc");
1274baa9922SRussell King 	} while (res);
1284baa9922SRussell King 
1294baa9922SRussell King 	return oldval;
1304baa9922SRussell King }
1310ca326deSWill Deacon #define atomic_cmpxchg_relaxed		atomic_cmpxchg_relaxed
1324baa9922SRussell King 
133db38ee87SWill Deacon static inline int __atomic_add_unless(atomic_t *v, int a, int u)
134db38ee87SWill Deacon {
135db38ee87SWill Deacon 	int oldval, newval;
136db38ee87SWill Deacon 	unsigned long tmp;
137db38ee87SWill Deacon 
138db38ee87SWill Deacon 	smp_mb();
139db38ee87SWill Deacon 	prefetchw(&v->counter);
140db38ee87SWill Deacon 
141db38ee87SWill Deacon 	__asm__ __volatile__ ("@ atomic_add_unless\n"
142db38ee87SWill Deacon "1:	ldrex	%0, [%4]\n"
143db38ee87SWill Deacon "	teq	%0, %5\n"
144db38ee87SWill Deacon "	beq	2f\n"
145db38ee87SWill Deacon "	add	%1, %0, %6\n"
146db38ee87SWill Deacon "	strex	%2, %1, [%4]\n"
147db38ee87SWill Deacon "	teq	%2, #0\n"
148db38ee87SWill Deacon "	bne	1b\n"
149db38ee87SWill Deacon "2:"
150db38ee87SWill Deacon 	: "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter)
151db38ee87SWill Deacon 	: "r" (&v->counter), "r" (u), "r" (a)
152db38ee87SWill Deacon 	: "cc");
153db38ee87SWill Deacon 
154db38ee87SWill Deacon 	if (oldval != u)
155db38ee87SWill Deacon 		smp_mb();
156db38ee87SWill Deacon 
157db38ee87SWill Deacon 	return oldval;
158db38ee87SWill Deacon }
159db38ee87SWill Deacon 
1604baa9922SRussell King #else /* ARM_ARCH_6 */
1614baa9922SRussell King 
1624baa9922SRussell King #ifdef CONFIG_SMP
1634baa9922SRussell King #error SMP not supported on pre-ARMv6 CPUs
1644baa9922SRussell King #endif
1654baa9922SRussell King 
166aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op)					\
167aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
168aee9a554SPeter Zijlstra {									\
169aee9a554SPeter Zijlstra 	unsigned long flags;						\
170aee9a554SPeter Zijlstra 									\
171aee9a554SPeter Zijlstra 	raw_local_irq_save(flags);					\
172aee9a554SPeter Zijlstra 	v->counter c_op i;						\
173aee9a554SPeter Zijlstra 	raw_local_irq_restore(flags);					\
174aee9a554SPeter Zijlstra }									\
1754baa9922SRussell King 
176aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op)				\
177aee9a554SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
178aee9a554SPeter Zijlstra {									\
179aee9a554SPeter Zijlstra 	unsigned long flags;						\
180aee9a554SPeter Zijlstra 	int val;							\
181aee9a554SPeter Zijlstra 									\
182aee9a554SPeter Zijlstra 	raw_local_irq_save(flags);					\
183aee9a554SPeter Zijlstra 	v->counter c_op i;						\
184aee9a554SPeter Zijlstra 	val = v->counter;						\
185aee9a554SPeter Zijlstra 	raw_local_irq_restore(flags);					\
186aee9a554SPeter Zijlstra 									\
187aee9a554SPeter Zijlstra 	return val;							\
1884baa9922SRussell King }
1894baa9922SRussell King 
190*6da068c1SPeter Zijlstra #define ATOMIC_FETCH_OP(op, c_op, asm_op)				\
191*6da068c1SPeter Zijlstra static inline int atomic_fetch_##op(int i, atomic_t *v)			\
192*6da068c1SPeter Zijlstra {									\
193*6da068c1SPeter Zijlstra 	unsigned long flags;						\
194*6da068c1SPeter Zijlstra 	int val;							\
195*6da068c1SPeter Zijlstra 									\
196*6da068c1SPeter Zijlstra 	raw_local_irq_save(flags);					\
197*6da068c1SPeter Zijlstra 	val = v->counter;						\
198*6da068c1SPeter Zijlstra 	v->counter c_op i;						\
199*6da068c1SPeter Zijlstra 	raw_local_irq_restore(flags);					\
200*6da068c1SPeter Zijlstra 									\
201*6da068c1SPeter Zijlstra 	return val;							\
202*6da068c1SPeter Zijlstra }
203*6da068c1SPeter Zijlstra 
204*6da068c1SPeter Zijlstra #define atomic_fetch_or atomic_fetch_or
205*6da068c1SPeter Zijlstra 
2064baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
2074baa9922SRussell King {
2084baa9922SRussell King 	int ret;
2094baa9922SRussell King 	unsigned long flags;
2104baa9922SRussell King 
2114baa9922SRussell King 	raw_local_irq_save(flags);
2124baa9922SRussell King 	ret = v->counter;
2134baa9922SRussell King 	if (likely(ret == old))
2144baa9922SRussell King 		v->counter = new;
2154baa9922SRussell King 	raw_local_irq_restore(flags);
2164baa9922SRussell King 
2174baa9922SRussell King 	return ret;
2184baa9922SRussell King }
2194baa9922SRussell King 
220f24219b4SArun Sharma static inline int __atomic_add_unless(atomic_t *v, int a, int u)
2214baa9922SRussell King {
2224baa9922SRussell King 	int c, old;
2234baa9922SRussell King 
2244baa9922SRussell King 	c = atomic_read(v);
2254baa9922SRussell King 	while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
2264baa9922SRussell King 		c = old;
227f24219b4SArun Sharma 	return c;
2284baa9922SRussell King }
2294baa9922SRussell King 
230db38ee87SWill Deacon #endif /* __LINUX_ARM_ARCH__ */
231db38ee87SWill Deacon 
232aee9a554SPeter Zijlstra #define ATOMIC_OPS(op, c_op, asm_op)					\
233aee9a554SPeter Zijlstra 	ATOMIC_OP(op, c_op, asm_op)					\
234*6da068c1SPeter Zijlstra 	ATOMIC_OP_RETURN(op, c_op, asm_op)				\
235*6da068c1SPeter Zijlstra 	ATOMIC_FETCH_OP(op, c_op, asm_op)
236aee9a554SPeter Zijlstra 
237aee9a554SPeter Zijlstra ATOMIC_OPS(add, +=, add)
238aee9a554SPeter Zijlstra ATOMIC_OPS(sub, -=, sub)
239aee9a554SPeter Zijlstra 
24012589790SPeter Zijlstra #define atomic_andnot atomic_andnot
24112589790SPeter Zijlstra 
242*6da068c1SPeter Zijlstra #undef ATOMIC_OPS
243*6da068c1SPeter Zijlstra #define ATOMIC_OPS(op, c_op, asm_op)					\
244*6da068c1SPeter Zijlstra 	ATOMIC_OP(op, c_op, asm_op)					\
245*6da068c1SPeter Zijlstra 	ATOMIC_FETCH_OP(op, c_op, asm_op)
246*6da068c1SPeter Zijlstra 
247*6da068c1SPeter Zijlstra ATOMIC_OPS(and, &=, and)
248*6da068c1SPeter Zijlstra ATOMIC_OPS(andnot, &= ~, bic)
249*6da068c1SPeter Zijlstra ATOMIC_OPS(or,  |=, orr)
250*6da068c1SPeter Zijlstra ATOMIC_OPS(xor, ^=, eor)
25112589790SPeter Zijlstra 
252aee9a554SPeter Zijlstra #undef ATOMIC_OPS
253*6da068c1SPeter Zijlstra #undef ATOMIC_FETCH_OP
254aee9a554SPeter Zijlstra #undef ATOMIC_OP_RETURN
255aee9a554SPeter Zijlstra #undef ATOMIC_OP
256aee9a554SPeter Zijlstra 
257db38ee87SWill Deacon #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
258db38ee87SWill Deacon 
259bac4e960SRussell King #define atomic_inc(v)		atomic_add(1, v)
260bac4e960SRussell King #define atomic_dec(v)		atomic_sub(1, v)
2614baa9922SRussell King 
2624baa9922SRussell King #define atomic_inc_and_test(v)	(atomic_add_return(1, v) == 0)
2634baa9922SRussell King #define atomic_dec_and_test(v)	(atomic_sub_return(1, v) == 0)
2646e490b01SWill Deacon #define atomic_inc_return_relaxed(v)    (atomic_add_return_relaxed(1, v))
2656e490b01SWill Deacon #define atomic_dec_return_relaxed(v)    (atomic_sub_return_relaxed(1, v))
2664baa9922SRussell King #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
2674baa9922SRussell King 
2684baa9922SRussell King #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
2694baa9922SRussell King 
27024b44a66SWill Deacon #ifndef CONFIG_GENERIC_ATOMIC64
27124b44a66SWill Deacon typedef struct {
272237f1233SChen Gang 	long long counter;
27324b44a66SWill Deacon } atomic64_t;
27424b44a66SWill Deacon 
27524b44a66SWill Deacon #define ATOMIC64_INIT(i) { (i) }
27624b44a66SWill Deacon 
2774fd75911SWill Deacon #ifdef CONFIG_ARM_LPAE
278237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v)
2794fd75911SWill Deacon {
280237f1233SChen Gang 	long long result;
2814fd75911SWill Deacon 
2824fd75911SWill Deacon 	__asm__ __volatile__("@ atomic64_read\n"
2834fd75911SWill Deacon "	ldrd	%0, %H0, [%1]"
2844fd75911SWill Deacon 	: "=&r" (result)
2854fd75911SWill Deacon 	: "r" (&v->counter), "Qo" (v->counter)
2864fd75911SWill Deacon 	);
2874fd75911SWill Deacon 
2884fd75911SWill Deacon 	return result;
2894fd75911SWill Deacon }
2904fd75911SWill Deacon 
291237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i)
2924fd75911SWill Deacon {
2934fd75911SWill Deacon 	__asm__ __volatile__("@ atomic64_set\n"
2944fd75911SWill Deacon "	strd	%2, %H2, [%1]"
2954fd75911SWill Deacon 	: "=Qo" (v->counter)
2964fd75911SWill Deacon 	: "r" (&v->counter), "r" (i)
2974fd75911SWill Deacon 	);
2984fd75911SWill Deacon }
2994fd75911SWill Deacon #else
300237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v)
30124b44a66SWill Deacon {
302237f1233SChen Gang 	long long result;
30324b44a66SWill Deacon 
30424b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_read\n"
30524b44a66SWill Deacon "	ldrexd	%0, %H0, [%1]"
30624b44a66SWill Deacon 	: "=&r" (result)
307398aa668SWill Deacon 	: "r" (&v->counter), "Qo" (v->counter)
30824b44a66SWill Deacon 	);
30924b44a66SWill Deacon 
31024b44a66SWill Deacon 	return result;
31124b44a66SWill Deacon }
31224b44a66SWill Deacon 
313237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i)
31424b44a66SWill Deacon {
315237f1233SChen Gang 	long long tmp;
31624b44a66SWill Deacon 
317f38d999cSWill Deacon 	prefetchw(&v->counter);
31824b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_set\n"
319398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%2]\n"
320398aa668SWill Deacon "	strexd	%0, %3, %H3, [%2]\n"
32124b44a66SWill Deacon "	teq	%0, #0\n"
32224b44a66SWill Deacon "	bne	1b"
323398aa668SWill Deacon 	: "=&r" (tmp), "=Qo" (v->counter)
32424b44a66SWill Deacon 	: "r" (&v->counter), "r" (i)
32524b44a66SWill Deacon 	: "cc");
32624b44a66SWill Deacon }
3274fd75911SWill Deacon #endif
32824b44a66SWill Deacon 
329aee9a554SPeter Zijlstra #define ATOMIC64_OP(op, op1, op2)					\
330aee9a554SPeter Zijlstra static inline void atomic64_##op(long long i, atomic64_t *v)		\
331aee9a554SPeter Zijlstra {									\
332aee9a554SPeter Zijlstra 	long long result;						\
333aee9a554SPeter Zijlstra 	unsigned long tmp;						\
334aee9a554SPeter Zijlstra 									\
335aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
336aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic64_" #op "\n"			\
337aee9a554SPeter Zijlstra "1:	ldrexd	%0, %H0, [%3]\n"					\
338aee9a554SPeter Zijlstra "	" #op1 " %Q0, %Q0, %Q4\n"					\
339aee9a554SPeter Zijlstra "	" #op2 " %R0, %R0, %R4\n"					\
340aee9a554SPeter Zijlstra "	strexd	%1, %0, %H0, [%3]\n"					\
341aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
342aee9a554SPeter Zijlstra "	bne	1b"							\
343aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
344aee9a554SPeter Zijlstra 	: "r" (&v->counter), "r" (i)					\
345aee9a554SPeter Zijlstra 	: "cc");							\
346aee9a554SPeter Zijlstra }									\
34724b44a66SWill Deacon 
348aee9a554SPeter Zijlstra #define ATOMIC64_OP_RETURN(op, op1, op2)				\
3490ca326deSWill Deacon static inline long long							\
3500ca326deSWill Deacon atomic64_##op##_return_relaxed(long long i, atomic64_t *v)		\
351aee9a554SPeter Zijlstra {									\
352aee9a554SPeter Zijlstra 	long long result;						\
353aee9a554SPeter Zijlstra 	unsigned long tmp;						\
354aee9a554SPeter Zijlstra 									\
355aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
356aee9a554SPeter Zijlstra 									\
357aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic64_" #op "_return\n"		\
358aee9a554SPeter Zijlstra "1:	ldrexd	%0, %H0, [%3]\n"					\
359aee9a554SPeter Zijlstra "	" #op1 " %Q0, %Q0, %Q4\n"					\
360aee9a554SPeter Zijlstra "	" #op2 " %R0, %R0, %R4\n"					\
361aee9a554SPeter Zijlstra "	strexd	%1, %0, %H0, [%3]\n"					\
362aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
363aee9a554SPeter Zijlstra "	bne	1b"							\
364aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
365aee9a554SPeter Zijlstra 	: "r" (&v->counter), "r" (i)					\
366aee9a554SPeter Zijlstra 	: "cc");							\
367aee9a554SPeter Zijlstra 									\
368aee9a554SPeter Zijlstra 	return result;							\
36924b44a66SWill Deacon }
37024b44a66SWill Deacon 
371*6da068c1SPeter Zijlstra #define ATOMIC64_FETCH_OP(op, op1, op2)					\
372*6da068c1SPeter Zijlstra static inline long long							\
373*6da068c1SPeter Zijlstra atomic64_fetch_##op##_relaxed(long long i, atomic64_t *v)		\
374*6da068c1SPeter Zijlstra {									\
375*6da068c1SPeter Zijlstra 	long long result, val;						\
376*6da068c1SPeter Zijlstra 	unsigned long tmp;						\
377*6da068c1SPeter Zijlstra 									\
378*6da068c1SPeter Zijlstra 	prefetchw(&v->counter);						\
379*6da068c1SPeter Zijlstra 									\
380*6da068c1SPeter Zijlstra 	__asm__ __volatile__("@ atomic64_fetch_" #op "\n"		\
381*6da068c1SPeter Zijlstra "1:	ldrexd	%0, %H0, [%4]\n"					\
382*6da068c1SPeter Zijlstra "	" #op1 " %Q1, %Q0, %Q5\n"					\
383*6da068c1SPeter Zijlstra "	" #op2 " %R1, %R0, %R5\n"					\
384*6da068c1SPeter Zijlstra "	strexd	%2, %1, %H1, [%4]\n"					\
385*6da068c1SPeter Zijlstra "	teq	%2, #0\n"						\
386*6da068c1SPeter Zijlstra "	bne	1b"							\
387*6da068c1SPeter Zijlstra 	: "=&r" (result), "=&r" (val), "=&r" (tmp), "+Qo" (v->counter)	\
388*6da068c1SPeter Zijlstra 	: "r" (&v->counter), "r" (i)					\
389*6da068c1SPeter Zijlstra 	: "cc");							\
390*6da068c1SPeter Zijlstra 									\
391*6da068c1SPeter Zijlstra 	return result;							\
392*6da068c1SPeter Zijlstra }
393*6da068c1SPeter Zijlstra 
394aee9a554SPeter Zijlstra #define ATOMIC64_OPS(op, op1, op2)					\
395aee9a554SPeter Zijlstra 	ATOMIC64_OP(op, op1, op2)					\
396*6da068c1SPeter Zijlstra 	ATOMIC64_OP_RETURN(op, op1, op2)				\
397*6da068c1SPeter Zijlstra 	ATOMIC64_FETCH_OP(op, op1, op2)
39824b44a66SWill Deacon 
399aee9a554SPeter Zijlstra ATOMIC64_OPS(add, adds, adc)
400aee9a554SPeter Zijlstra ATOMIC64_OPS(sub, subs, sbc)
40124b44a66SWill Deacon 
4020ca326deSWill Deacon #define atomic64_add_return_relaxed	atomic64_add_return_relaxed
4030ca326deSWill Deacon #define atomic64_sub_return_relaxed	atomic64_sub_return_relaxed
404*6da068c1SPeter Zijlstra #define atomic64_fetch_add_relaxed	atomic64_fetch_add_relaxed
405*6da068c1SPeter Zijlstra #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub_relaxed
406*6da068c1SPeter Zijlstra 
407*6da068c1SPeter Zijlstra #undef ATOMIC64_OPS
408*6da068c1SPeter Zijlstra #define ATOMIC64_OPS(op, op1, op2)					\
409*6da068c1SPeter Zijlstra 	ATOMIC64_OP(op, op1, op2)					\
410*6da068c1SPeter Zijlstra 	ATOMIC64_FETCH_OP(op, op1, op2)
4110ca326deSWill Deacon 
41212589790SPeter Zijlstra #define atomic64_andnot atomic64_andnot
41312589790SPeter Zijlstra 
414*6da068c1SPeter Zijlstra ATOMIC64_OPS(and, and, and)
415*6da068c1SPeter Zijlstra ATOMIC64_OPS(andnot, bic, bic)
416*6da068c1SPeter Zijlstra ATOMIC64_OPS(or,  orr, orr)
417*6da068c1SPeter Zijlstra ATOMIC64_OPS(xor, eor, eor)
418*6da068c1SPeter Zijlstra 
419*6da068c1SPeter Zijlstra #define atomic64_fetch_and_relaxed	atomic64_fetch_and_relaxed
420*6da068c1SPeter Zijlstra #define atomic64_fetch_andnot_relaxed	atomic64_fetch_andnot_relaxed
421*6da068c1SPeter Zijlstra #define atomic64_fetch_or_relaxed	atomic64_fetch_or_relaxed
422*6da068c1SPeter Zijlstra #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor_relaxed
42312589790SPeter Zijlstra 
424aee9a554SPeter Zijlstra #undef ATOMIC64_OPS
425*6da068c1SPeter Zijlstra #undef ATOMIC64_FETCH_OP
426aee9a554SPeter Zijlstra #undef ATOMIC64_OP_RETURN
427aee9a554SPeter Zijlstra #undef ATOMIC64_OP
42824b44a66SWill Deacon 
4290ca326deSWill Deacon static inline long long
4300ca326deSWill Deacon atomic64_cmpxchg_relaxed(atomic64_t *ptr, long long old, long long new)
43124b44a66SWill Deacon {
432237f1233SChen Gang 	long long oldval;
43324b44a66SWill Deacon 	unsigned long res;
43424b44a66SWill Deacon 
435c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
43624b44a66SWill Deacon 
43724b44a66SWill Deacon 	do {
43824b44a66SWill Deacon 		__asm__ __volatile__("@ atomic64_cmpxchg\n"
439398aa668SWill Deacon 		"ldrexd		%1, %H1, [%3]\n"
44024b44a66SWill Deacon 		"mov		%0, #0\n"
441398aa668SWill Deacon 		"teq		%1, %4\n"
442398aa668SWill Deacon 		"teqeq		%H1, %H4\n"
443398aa668SWill Deacon 		"strexdeq	%0, %5, %H5, [%3]"
444398aa668SWill Deacon 		: "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter)
44524b44a66SWill Deacon 		: "r" (&ptr->counter), "r" (old), "r" (new)
44624b44a66SWill Deacon 		: "cc");
44724b44a66SWill Deacon 	} while (res);
44824b44a66SWill Deacon 
44924b44a66SWill Deacon 	return oldval;
45024b44a66SWill Deacon }
4510ca326deSWill Deacon #define atomic64_cmpxchg_relaxed	atomic64_cmpxchg_relaxed
45224b44a66SWill Deacon 
4530ca326deSWill Deacon static inline long long atomic64_xchg_relaxed(atomic64_t *ptr, long long new)
45424b44a66SWill Deacon {
455237f1233SChen Gang 	long long result;
45624b44a66SWill Deacon 	unsigned long tmp;
45724b44a66SWill Deacon 
458c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
45924b44a66SWill Deacon 
46024b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_xchg\n"
461398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%3]\n"
462398aa668SWill Deacon "	strexd	%1, %4, %H4, [%3]\n"
46324b44a66SWill Deacon "	teq	%1, #0\n"
46424b44a66SWill Deacon "	bne	1b"
465398aa668SWill Deacon 	: "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter)
46624b44a66SWill Deacon 	: "r" (&ptr->counter), "r" (new)
46724b44a66SWill Deacon 	: "cc");
46824b44a66SWill Deacon 
46924b44a66SWill Deacon 	return result;
47024b44a66SWill Deacon }
4710ca326deSWill Deacon #define atomic64_xchg_relaxed		atomic64_xchg_relaxed
47224b44a66SWill Deacon 
473237f1233SChen Gang static inline long long atomic64_dec_if_positive(atomic64_t *v)
47424b44a66SWill Deacon {
475237f1233SChen Gang 	long long result;
47624b44a66SWill Deacon 	unsigned long tmp;
47724b44a66SWill Deacon 
47824b44a66SWill Deacon 	smp_mb();
479c32ffce0SWill Deacon 	prefetchw(&v->counter);
48024b44a66SWill Deacon 
48124b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_dec_if_positive\n"
482398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%3]\n"
4832245f924SVictor Kamensky "	subs	%Q0, %Q0, #1\n"
4842245f924SVictor Kamensky "	sbc	%R0, %R0, #0\n"
4852245f924SVictor Kamensky "	teq	%R0, #0\n"
48624b44a66SWill Deacon "	bmi	2f\n"
487398aa668SWill Deacon "	strexd	%1, %0, %H0, [%3]\n"
48824b44a66SWill Deacon "	teq	%1, #0\n"
48924b44a66SWill Deacon "	bne	1b\n"
49024b44a66SWill Deacon "2:"
491398aa668SWill Deacon 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)
49224b44a66SWill Deacon 	: "r" (&v->counter)
49324b44a66SWill Deacon 	: "cc");
49424b44a66SWill Deacon 
49524b44a66SWill Deacon 	smp_mb();
49624b44a66SWill Deacon 
49724b44a66SWill Deacon 	return result;
49824b44a66SWill Deacon }
49924b44a66SWill Deacon 
500237f1233SChen Gang static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
50124b44a66SWill Deacon {
502237f1233SChen Gang 	long long val;
50324b44a66SWill Deacon 	unsigned long tmp;
50424b44a66SWill Deacon 	int ret = 1;
50524b44a66SWill Deacon 
50624b44a66SWill Deacon 	smp_mb();
507c32ffce0SWill Deacon 	prefetchw(&v->counter);
50824b44a66SWill Deacon 
50924b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_add_unless\n"
510398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%4]\n"
511398aa668SWill Deacon "	teq	%0, %5\n"
512398aa668SWill Deacon "	teqeq	%H0, %H5\n"
51324b44a66SWill Deacon "	moveq	%1, #0\n"
51424b44a66SWill Deacon "	beq	2f\n"
5152245f924SVictor Kamensky "	adds	%Q0, %Q0, %Q6\n"
5162245f924SVictor Kamensky "	adc	%R0, %R0, %R6\n"
517398aa668SWill Deacon "	strexd	%2, %0, %H0, [%4]\n"
51824b44a66SWill Deacon "	teq	%2, #0\n"
51924b44a66SWill Deacon "	bne	1b\n"
52024b44a66SWill Deacon "2:"
521398aa668SWill Deacon 	: "=&r" (val), "+r" (ret), "=&r" (tmp), "+Qo" (v->counter)
52224b44a66SWill Deacon 	: "r" (&v->counter), "r" (u), "r" (a)
52324b44a66SWill Deacon 	: "cc");
52424b44a66SWill Deacon 
52524b44a66SWill Deacon 	if (ret)
52624b44a66SWill Deacon 		smp_mb();
52724b44a66SWill Deacon 
52824b44a66SWill Deacon 	return ret;
52924b44a66SWill Deacon }
53024b44a66SWill Deacon 
53124b44a66SWill Deacon #define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)
53224b44a66SWill Deacon #define atomic64_inc(v)			atomic64_add(1LL, (v))
5336e490b01SWill Deacon #define atomic64_inc_return_relaxed(v)	atomic64_add_return_relaxed(1LL, (v))
53424b44a66SWill Deacon #define atomic64_inc_and_test(v)	(atomic64_inc_return(v) == 0)
53524b44a66SWill Deacon #define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
53624b44a66SWill Deacon #define atomic64_dec(v)			atomic64_sub(1LL, (v))
5376e490b01SWill Deacon #define atomic64_dec_return_relaxed(v)	atomic64_sub_return_relaxed(1LL, (v))
53824b44a66SWill Deacon #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
53924b44a66SWill Deacon #define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1LL, 0LL)
54024b44a66SWill Deacon 
5417847777aSArun Sharma #endif /* !CONFIG_GENERIC_ATOMIC64 */
5424baa9922SRussell King #endif
5434baa9922SRussell King #endif
544