xref: /openbmc/linux/arch/arm/include/asm/atomic.h (revision 125897908c718972351b589da89b7f990892d4df)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/atomic.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996 Russell King.
54baa9922SRussell King  *  Copyright (C) 2002 Deep Blue Solutions Ltd.
64baa9922SRussell King  *
74baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
84baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
94baa9922SRussell King  * published by the Free Software Foundation.
104baa9922SRussell King  */
114baa9922SRussell King #ifndef __ASM_ARM_ATOMIC_H
124baa9922SRussell King #define __ASM_ARM_ATOMIC_H
134baa9922SRussell King 
144baa9922SRussell King #include <linux/compiler.h>
15f38d999cSWill Deacon #include <linux/prefetch.h>
16ea435467SMatthew Wilcox #include <linux/types.h>
179f97da78SDavid Howells #include <linux/irqflags.h>
189f97da78SDavid Howells #include <asm/barrier.h>
199f97da78SDavid Howells #include <asm/cmpxchg.h>
204baa9922SRussell King 
214baa9922SRussell King #define ATOMIC_INIT(i)	{ (i) }
224baa9922SRussell King 
234baa9922SRussell King #ifdef __KERNEL__
244baa9922SRussell King 
25200b812dSCatalin Marinas /*
26200b812dSCatalin Marinas  * On ARM, ordinary assignment (str instruction) doesn't clear the local
27200b812dSCatalin Marinas  * strex/ldrex monitor on some implementations. The reason we can use it for
28200b812dSCatalin Marinas  * atomic_set() is the clrex or dummy strex done on every exception return.
29200b812dSCatalin Marinas  */
302291059cSPranith Kumar #define atomic_read(v)	ACCESS_ONCE((v)->counter)
31200b812dSCatalin Marinas #define atomic_set(v,i)	(((v)->counter) = (i))
324baa9922SRussell King 
334baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
344baa9922SRussell King 
354baa9922SRussell King /*
364baa9922SRussell King  * ARMv6 UP and SMP safe atomic ops.  We use load exclusive and
374baa9922SRussell King  * store exclusive to ensure that these are atomic.  We may loop
38200b812dSCatalin Marinas  * to ensure that the update happens.
394baa9922SRussell King  */
40bac4e960SRussell King 
41aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op)					\
42aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
43aee9a554SPeter Zijlstra {									\
44aee9a554SPeter Zijlstra 	unsigned long tmp;						\
45aee9a554SPeter Zijlstra 	int result;							\
46aee9a554SPeter Zijlstra 									\
47aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
48aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic_" #op "\n"			\
49aee9a554SPeter Zijlstra "1:	ldrex	%0, [%3]\n"						\
50aee9a554SPeter Zijlstra "	" #asm_op "	%0, %0, %4\n"					\
51aee9a554SPeter Zijlstra "	strex	%1, %0, [%3]\n"						\
52aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
53aee9a554SPeter Zijlstra "	bne	1b"							\
54aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
55aee9a554SPeter Zijlstra 	: "r" (&v->counter), "Ir" (i)					\
56aee9a554SPeter Zijlstra 	: "cc");							\
57aee9a554SPeter Zijlstra }									\
58bac4e960SRussell King 
59aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op)				\
60aee9a554SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
61aee9a554SPeter Zijlstra {									\
62aee9a554SPeter Zijlstra 	unsigned long tmp;						\
63aee9a554SPeter Zijlstra 	int result;							\
64aee9a554SPeter Zijlstra 									\
65aee9a554SPeter Zijlstra 	smp_mb();							\
66aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
67aee9a554SPeter Zijlstra 									\
68aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic_" #op "_return\n"		\
69aee9a554SPeter Zijlstra "1:	ldrex	%0, [%3]\n"						\
70aee9a554SPeter Zijlstra "	" #asm_op "	%0, %0, %4\n"					\
71aee9a554SPeter Zijlstra "	strex	%1, %0, [%3]\n"						\
72aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
73aee9a554SPeter Zijlstra "	bne	1b"							\
74aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
75aee9a554SPeter Zijlstra 	: "r" (&v->counter), "Ir" (i)					\
76aee9a554SPeter Zijlstra 	: "cc");							\
77aee9a554SPeter Zijlstra 									\
78aee9a554SPeter Zijlstra 	smp_mb();							\
79aee9a554SPeter Zijlstra 									\
80aee9a554SPeter Zijlstra 	return result;							\
814baa9922SRussell King }
824baa9922SRussell King 
834baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
844baa9922SRussell King {
854dcc1cf7SChen Gang 	int oldval;
864dcc1cf7SChen Gang 	unsigned long res;
874baa9922SRussell King 
88bac4e960SRussell King 	smp_mb();
89c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
90bac4e960SRussell King 
914baa9922SRussell King 	do {
924baa9922SRussell King 		__asm__ __volatile__("@ atomic_cmpxchg\n"
93398aa668SWill Deacon 		"ldrex	%1, [%3]\n"
944baa9922SRussell King 		"mov	%0, #0\n"
95398aa668SWill Deacon 		"teq	%1, %4\n"
96398aa668SWill Deacon 		"strexeq %0, %5, [%3]\n"
97398aa668SWill Deacon 		    : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter)
984baa9922SRussell King 		    : "r" (&ptr->counter), "Ir" (old), "r" (new)
994baa9922SRussell King 		    : "cc");
1004baa9922SRussell King 	} while (res);
1014baa9922SRussell King 
102bac4e960SRussell King 	smp_mb();
103bac4e960SRussell King 
1044baa9922SRussell King 	return oldval;
1054baa9922SRussell King }
1064baa9922SRussell King 
107db38ee87SWill Deacon static inline int __atomic_add_unless(atomic_t *v, int a, int u)
108db38ee87SWill Deacon {
109db38ee87SWill Deacon 	int oldval, newval;
110db38ee87SWill Deacon 	unsigned long tmp;
111db38ee87SWill Deacon 
112db38ee87SWill Deacon 	smp_mb();
113db38ee87SWill Deacon 	prefetchw(&v->counter);
114db38ee87SWill Deacon 
115db38ee87SWill Deacon 	__asm__ __volatile__ ("@ atomic_add_unless\n"
116db38ee87SWill Deacon "1:	ldrex	%0, [%4]\n"
117db38ee87SWill Deacon "	teq	%0, %5\n"
118db38ee87SWill Deacon "	beq	2f\n"
119db38ee87SWill Deacon "	add	%1, %0, %6\n"
120db38ee87SWill Deacon "	strex	%2, %1, [%4]\n"
121db38ee87SWill Deacon "	teq	%2, #0\n"
122db38ee87SWill Deacon "	bne	1b\n"
123db38ee87SWill Deacon "2:"
124db38ee87SWill Deacon 	: "=&r" (oldval), "=&r" (newval), "=&r" (tmp), "+Qo" (v->counter)
125db38ee87SWill Deacon 	: "r" (&v->counter), "r" (u), "r" (a)
126db38ee87SWill Deacon 	: "cc");
127db38ee87SWill Deacon 
128db38ee87SWill Deacon 	if (oldval != u)
129db38ee87SWill Deacon 		smp_mb();
130db38ee87SWill Deacon 
131db38ee87SWill Deacon 	return oldval;
132db38ee87SWill Deacon }
133db38ee87SWill Deacon 
1344baa9922SRussell King #else /* ARM_ARCH_6 */
1354baa9922SRussell King 
1364baa9922SRussell King #ifdef CONFIG_SMP
1374baa9922SRussell King #error SMP not supported on pre-ARMv6 CPUs
1384baa9922SRussell King #endif
1394baa9922SRussell King 
140aee9a554SPeter Zijlstra #define ATOMIC_OP(op, c_op, asm_op)					\
141aee9a554SPeter Zijlstra static inline void atomic_##op(int i, atomic_t *v)			\
142aee9a554SPeter Zijlstra {									\
143aee9a554SPeter Zijlstra 	unsigned long flags;						\
144aee9a554SPeter Zijlstra 									\
145aee9a554SPeter Zijlstra 	raw_local_irq_save(flags);					\
146aee9a554SPeter Zijlstra 	v->counter c_op i;						\
147aee9a554SPeter Zijlstra 	raw_local_irq_restore(flags);					\
148aee9a554SPeter Zijlstra }									\
1494baa9922SRussell King 
150aee9a554SPeter Zijlstra #define ATOMIC_OP_RETURN(op, c_op, asm_op)				\
151aee9a554SPeter Zijlstra static inline int atomic_##op##_return(int i, atomic_t *v)		\
152aee9a554SPeter Zijlstra {									\
153aee9a554SPeter Zijlstra 	unsigned long flags;						\
154aee9a554SPeter Zijlstra 	int val;							\
155aee9a554SPeter Zijlstra 									\
156aee9a554SPeter Zijlstra 	raw_local_irq_save(flags);					\
157aee9a554SPeter Zijlstra 	v->counter c_op i;						\
158aee9a554SPeter Zijlstra 	val = v->counter;						\
159aee9a554SPeter Zijlstra 	raw_local_irq_restore(flags);					\
160aee9a554SPeter Zijlstra 									\
161aee9a554SPeter Zijlstra 	return val;							\
1624baa9922SRussell King }
1634baa9922SRussell King 
1644baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
1654baa9922SRussell King {
1664baa9922SRussell King 	int ret;
1674baa9922SRussell King 	unsigned long flags;
1684baa9922SRussell King 
1694baa9922SRussell King 	raw_local_irq_save(flags);
1704baa9922SRussell King 	ret = v->counter;
1714baa9922SRussell King 	if (likely(ret == old))
1724baa9922SRussell King 		v->counter = new;
1734baa9922SRussell King 	raw_local_irq_restore(flags);
1744baa9922SRussell King 
1754baa9922SRussell King 	return ret;
1764baa9922SRussell King }
1774baa9922SRussell King 
178f24219b4SArun Sharma static inline int __atomic_add_unless(atomic_t *v, int a, int u)
1794baa9922SRussell King {
1804baa9922SRussell King 	int c, old;
1814baa9922SRussell King 
1824baa9922SRussell King 	c = atomic_read(v);
1834baa9922SRussell King 	while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
1844baa9922SRussell King 		c = old;
185f24219b4SArun Sharma 	return c;
1864baa9922SRussell King }
1874baa9922SRussell King 
188db38ee87SWill Deacon #endif /* __LINUX_ARM_ARCH__ */
189db38ee87SWill Deacon 
190aee9a554SPeter Zijlstra #define ATOMIC_OPS(op, c_op, asm_op)					\
191aee9a554SPeter Zijlstra 	ATOMIC_OP(op, c_op, asm_op)					\
192aee9a554SPeter Zijlstra 	ATOMIC_OP_RETURN(op, c_op, asm_op)
193aee9a554SPeter Zijlstra 
194aee9a554SPeter Zijlstra ATOMIC_OPS(add, +=, add)
195aee9a554SPeter Zijlstra ATOMIC_OPS(sub, -=, sub)
196aee9a554SPeter Zijlstra 
197*12589790SPeter Zijlstra #define CONFIG_ARCH_HAS_ATOMIC_OR
198*12589790SPeter Zijlstra #define atomic_andnot atomic_andnot
199*12589790SPeter Zijlstra 
200*12589790SPeter Zijlstra ATOMIC_OP(and, &=, and)
201*12589790SPeter Zijlstra ATOMIC_OP(andnot, &= ~, bic)
202*12589790SPeter Zijlstra ATOMIC_OP(or,  |=, orr)
203*12589790SPeter Zijlstra ATOMIC_OP(xor, ^=, eor)
204*12589790SPeter Zijlstra 
205aee9a554SPeter Zijlstra #undef ATOMIC_OPS
206aee9a554SPeter Zijlstra #undef ATOMIC_OP_RETURN
207aee9a554SPeter Zijlstra #undef ATOMIC_OP
208aee9a554SPeter Zijlstra 
209db38ee87SWill Deacon #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
210db38ee87SWill Deacon 
211bac4e960SRussell King #define atomic_inc(v)		atomic_add(1, v)
212bac4e960SRussell King #define atomic_dec(v)		atomic_sub(1, v)
2134baa9922SRussell King 
2144baa9922SRussell King #define atomic_inc_and_test(v)	(atomic_add_return(1, v) == 0)
2154baa9922SRussell King #define atomic_dec_and_test(v)	(atomic_sub_return(1, v) == 0)
2164baa9922SRussell King #define atomic_inc_return(v)    (atomic_add_return(1, v))
2174baa9922SRussell King #define atomic_dec_return(v)    (atomic_sub_return(1, v))
2184baa9922SRussell King #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
2194baa9922SRussell King 
2204baa9922SRussell King #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
2214baa9922SRussell King 
22224b44a66SWill Deacon #ifndef CONFIG_GENERIC_ATOMIC64
22324b44a66SWill Deacon typedef struct {
224237f1233SChen Gang 	long long counter;
22524b44a66SWill Deacon } atomic64_t;
22624b44a66SWill Deacon 
22724b44a66SWill Deacon #define ATOMIC64_INIT(i) { (i) }
22824b44a66SWill Deacon 
2294fd75911SWill Deacon #ifdef CONFIG_ARM_LPAE
230237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v)
2314fd75911SWill Deacon {
232237f1233SChen Gang 	long long result;
2334fd75911SWill Deacon 
2344fd75911SWill Deacon 	__asm__ __volatile__("@ atomic64_read\n"
2354fd75911SWill Deacon "	ldrd	%0, %H0, [%1]"
2364fd75911SWill Deacon 	: "=&r" (result)
2374fd75911SWill Deacon 	: "r" (&v->counter), "Qo" (v->counter)
2384fd75911SWill Deacon 	);
2394fd75911SWill Deacon 
2404fd75911SWill Deacon 	return result;
2414fd75911SWill Deacon }
2424fd75911SWill Deacon 
243237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i)
2444fd75911SWill Deacon {
2454fd75911SWill Deacon 	__asm__ __volatile__("@ atomic64_set\n"
2464fd75911SWill Deacon "	strd	%2, %H2, [%1]"
2474fd75911SWill Deacon 	: "=Qo" (v->counter)
2484fd75911SWill Deacon 	: "r" (&v->counter), "r" (i)
2494fd75911SWill Deacon 	);
2504fd75911SWill Deacon }
2514fd75911SWill Deacon #else
252237f1233SChen Gang static inline long long atomic64_read(const atomic64_t *v)
25324b44a66SWill Deacon {
254237f1233SChen Gang 	long long result;
25524b44a66SWill Deacon 
25624b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_read\n"
25724b44a66SWill Deacon "	ldrexd	%0, %H0, [%1]"
25824b44a66SWill Deacon 	: "=&r" (result)
259398aa668SWill Deacon 	: "r" (&v->counter), "Qo" (v->counter)
26024b44a66SWill Deacon 	);
26124b44a66SWill Deacon 
26224b44a66SWill Deacon 	return result;
26324b44a66SWill Deacon }
26424b44a66SWill Deacon 
265237f1233SChen Gang static inline void atomic64_set(atomic64_t *v, long long i)
26624b44a66SWill Deacon {
267237f1233SChen Gang 	long long tmp;
26824b44a66SWill Deacon 
269f38d999cSWill Deacon 	prefetchw(&v->counter);
27024b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_set\n"
271398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%2]\n"
272398aa668SWill Deacon "	strexd	%0, %3, %H3, [%2]\n"
27324b44a66SWill Deacon "	teq	%0, #0\n"
27424b44a66SWill Deacon "	bne	1b"
275398aa668SWill Deacon 	: "=&r" (tmp), "=Qo" (v->counter)
27624b44a66SWill Deacon 	: "r" (&v->counter), "r" (i)
27724b44a66SWill Deacon 	: "cc");
27824b44a66SWill Deacon }
2794fd75911SWill Deacon #endif
28024b44a66SWill Deacon 
281aee9a554SPeter Zijlstra #define ATOMIC64_OP(op, op1, op2)					\
282aee9a554SPeter Zijlstra static inline void atomic64_##op(long long i, atomic64_t *v)		\
283aee9a554SPeter Zijlstra {									\
284aee9a554SPeter Zijlstra 	long long result;						\
285aee9a554SPeter Zijlstra 	unsigned long tmp;						\
286aee9a554SPeter Zijlstra 									\
287aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
288aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic64_" #op "\n"			\
289aee9a554SPeter Zijlstra "1:	ldrexd	%0, %H0, [%3]\n"					\
290aee9a554SPeter Zijlstra "	" #op1 " %Q0, %Q0, %Q4\n"					\
291aee9a554SPeter Zijlstra "	" #op2 " %R0, %R0, %R4\n"					\
292aee9a554SPeter Zijlstra "	strexd	%1, %0, %H0, [%3]\n"					\
293aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
294aee9a554SPeter Zijlstra "	bne	1b"							\
295aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
296aee9a554SPeter Zijlstra 	: "r" (&v->counter), "r" (i)					\
297aee9a554SPeter Zijlstra 	: "cc");							\
298aee9a554SPeter Zijlstra }									\
29924b44a66SWill Deacon 
300aee9a554SPeter Zijlstra #define ATOMIC64_OP_RETURN(op, op1, op2)				\
301aee9a554SPeter Zijlstra static inline long long atomic64_##op##_return(long long i, atomic64_t *v) \
302aee9a554SPeter Zijlstra {									\
303aee9a554SPeter Zijlstra 	long long result;						\
304aee9a554SPeter Zijlstra 	unsigned long tmp;						\
305aee9a554SPeter Zijlstra 									\
306aee9a554SPeter Zijlstra 	smp_mb();							\
307aee9a554SPeter Zijlstra 	prefetchw(&v->counter);						\
308aee9a554SPeter Zijlstra 									\
309aee9a554SPeter Zijlstra 	__asm__ __volatile__("@ atomic64_" #op "_return\n"		\
310aee9a554SPeter Zijlstra "1:	ldrexd	%0, %H0, [%3]\n"					\
311aee9a554SPeter Zijlstra "	" #op1 " %Q0, %Q0, %Q4\n"					\
312aee9a554SPeter Zijlstra "	" #op2 " %R0, %R0, %R4\n"					\
313aee9a554SPeter Zijlstra "	strexd	%1, %0, %H0, [%3]\n"					\
314aee9a554SPeter Zijlstra "	teq	%1, #0\n"						\
315aee9a554SPeter Zijlstra "	bne	1b"							\
316aee9a554SPeter Zijlstra 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)		\
317aee9a554SPeter Zijlstra 	: "r" (&v->counter), "r" (i)					\
318aee9a554SPeter Zijlstra 	: "cc");							\
319aee9a554SPeter Zijlstra 									\
320aee9a554SPeter Zijlstra 	smp_mb();							\
321aee9a554SPeter Zijlstra 									\
322aee9a554SPeter Zijlstra 	return result;							\
32324b44a66SWill Deacon }
32424b44a66SWill Deacon 
325aee9a554SPeter Zijlstra #define ATOMIC64_OPS(op, op1, op2)					\
326aee9a554SPeter Zijlstra 	ATOMIC64_OP(op, op1, op2)					\
327aee9a554SPeter Zijlstra 	ATOMIC64_OP_RETURN(op, op1, op2)
32824b44a66SWill Deacon 
329aee9a554SPeter Zijlstra ATOMIC64_OPS(add, adds, adc)
330aee9a554SPeter Zijlstra ATOMIC64_OPS(sub, subs, sbc)
33124b44a66SWill Deacon 
332*12589790SPeter Zijlstra #define atomic64_andnot atomic64_andnot
333*12589790SPeter Zijlstra 
334*12589790SPeter Zijlstra ATOMIC64_OP(and, and, and)
335*12589790SPeter Zijlstra ATOMIC64_OP(andnot, bic, bic)
336*12589790SPeter Zijlstra ATOMIC64_OP(or,  orr, orr)
337*12589790SPeter Zijlstra ATOMIC64_OP(xor, eor, eor)
338*12589790SPeter Zijlstra 
339aee9a554SPeter Zijlstra #undef ATOMIC64_OPS
340aee9a554SPeter Zijlstra #undef ATOMIC64_OP_RETURN
341aee9a554SPeter Zijlstra #undef ATOMIC64_OP
34224b44a66SWill Deacon 
343237f1233SChen Gang static inline long long atomic64_cmpxchg(atomic64_t *ptr, long long old,
344237f1233SChen Gang 					long long new)
34524b44a66SWill Deacon {
346237f1233SChen Gang 	long long oldval;
34724b44a66SWill Deacon 	unsigned long res;
34824b44a66SWill Deacon 
34924b44a66SWill Deacon 	smp_mb();
350c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
35124b44a66SWill Deacon 
35224b44a66SWill Deacon 	do {
35324b44a66SWill Deacon 		__asm__ __volatile__("@ atomic64_cmpxchg\n"
354398aa668SWill Deacon 		"ldrexd		%1, %H1, [%3]\n"
35524b44a66SWill Deacon 		"mov		%0, #0\n"
356398aa668SWill Deacon 		"teq		%1, %4\n"
357398aa668SWill Deacon 		"teqeq		%H1, %H4\n"
358398aa668SWill Deacon 		"strexdeq	%0, %5, %H5, [%3]"
359398aa668SWill Deacon 		: "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter)
36024b44a66SWill Deacon 		: "r" (&ptr->counter), "r" (old), "r" (new)
36124b44a66SWill Deacon 		: "cc");
36224b44a66SWill Deacon 	} while (res);
36324b44a66SWill Deacon 
36424b44a66SWill Deacon 	smp_mb();
36524b44a66SWill Deacon 
36624b44a66SWill Deacon 	return oldval;
36724b44a66SWill Deacon }
36824b44a66SWill Deacon 
369237f1233SChen Gang static inline long long atomic64_xchg(atomic64_t *ptr, long long new)
37024b44a66SWill Deacon {
371237f1233SChen Gang 	long long result;
37224b44a66SWill Deacon 	unsigned long tmp;
37324b44a66SWill Deacon 
37424b44a66SWill Deacon 	smp_mb();
375c32ffce0SWill Deacon 	prefetchw(&ptr->counter);
37624b44a66SWill Deacon 
37724b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_xchg\n"
378398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%3]\n"
379398aa668SWill Deacon "	strexd	%1, %4, %H4, [%3]\n"
38024b44a66SWill Deacon "	teq	%1, #0\n"
38124b44a66SWill Deacon "	bne	1b"
382398aa668SWill Deacon 	: "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter)
38324b44a66SWill Deacon 	: "r" (&ptr->counter), "r" (new)
38424b44a66SWill Deacon 	: "cc");
38524b44a66SWill Deacon 
38624b44a66SWill Deacon 	smp_mb();
38724b44a66SWill Deacon 
38824b44a66SWill Deacon 	return result;
38924b44a66SWill Deacon }
39024b44a66SWill Deacon 
391237f1233SChen Gang static inline long long atomic64_dec_if_positive(atomic64_t *v)
39224b44a66SWill Deacon {
393237f1233SChen Gang 	long long result;
39424b44a66SWill Deacon 	unsigned long tmp;
39524b44a66SWill Deacon 
39624b44a66SWill Deacon 	smp_mb();
397c32ffce0SWill Deacon 	prefetchw(&v->counter);
39824b44a66SWill Deacon 
39924b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_dec_if_positive\n"
400398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%3]\n"
4012245f924SVictor Kamensky "	subs	%Q0, %Q0, #1\n"
4022245f924SVictor Kamensky "	sbc	%R0, %R0, #0\n"
4032245f924SVictor Kamensky "	teq	%R0, #0\n"
40424b44a66SWill Deacon "	bmi	2f\n"
405398aa668SWill Deacon "	strexd	%1, %0, %H0, [%3]\n"
40624b44a66SWill Deacon "	teq	%1, #0\n"
40724b44a66SWill Deacon "	bne	1b\n"
40824b44a66SWill Deacon "2:"
409398aa668SWill Deacon 	: "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)
41024b44a66SWill Deacon 	: "r" (&v->counter)
41124b44a66SWill Deacon 	: "cc");
41224b44a66SWill Deacon 
41324b44a66SWill Deacon 	smp_mb();
41424b44a66SWill Deacon 
41524b44a66SWill Deacon 	return result;
41624b44a66SWill Deacon }
41724b44a66SWill Deacon 
418237f1233SChen Gang static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
41924b44a66SWill Deacon {
420237f1233SChen Gang 	long long val;
42124b44a66SWill Deacon 	unsigned long tmp;
42224b44a66SWill Deacon 	int ret = 1;
42324b44a66SWill Deacon 
42424b44a66SWill Deacon 	smp_mb();
425c32ffce0SWill Deacon 	prefetchw(&v->counter);
42624b44a66SWill Deacon 
42724b44a66SWill Deacon 	__asm__ __volatile__("@ atomic64_add_unless\n"
428398aa668SWill Deacon "1:	ldrexd	%0, %H0, [%4]\n"
429398aa668SWill Deacon "	teq	%0, %5\n"
430398aa668SWill Deacon "	teqeq	%H0, %H5\n"
43124b44a66SWill Deacon "	moveq	%1, #0\n"
43224b44a66SWill Deacon "	beq	2f\n"
4332245f924SVictor Kamensky "	adds	%Q0, %Q0, %Q6\n"
4342245f924SVictor Kamensky "	adc	%R0, %R0, %R6\n"
435398aa668SWill Deacon "	strexd	%2, %0, %H0, [%4]\n"
43624b44a66SWill Deacon "	teq	%2, #0\n"
43724b44a66SWill Deacon "	bne	1b\n"
43824b44a66SWill Deacon "2:"
439398aa668SWill Deacon 	: "=&r" (val), "+r" (ret), "=&r" (tmp), "+Qo" (v->counter)
44024b44a66SWill Deacon 	: "r" (&v->counter), "r" (u), "r" (a)
44124b44a66SWill Deacon 	: "cc");
44224b44a66SWill Deacon 
44324b44a66SWill Deacon 	if (ret)
44424b44a66SWill Deacon 		smp_mb();
44524b44a66SWill Deacon 
44624b44a66SWill Deacon 	return ret;
44724b44a66SWill Deacon }
44824b44a66SWill Deacon 
44924b44a66SWill Deacon #define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0)
45024b44a66SWill Deacon #define atomic64_inc(v)			atomic64_add(1LL, (v))
45124b44a66SWill Deacon #define atomic64_inc_return(v)		atomic64_add_return(1LL, (v))
45224b44a66SWill Deacon #define atomic64_inc_and_test(v)	(atomic64_inc_return(v) == 0)
45324b44a66SWill Deacon #define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0)
45424b44a66SWill Deacon #define atomic64_dec(v)			atomic64_sub(1LL, (v))
45524b44a66SWill Deacon #define atomic64_dec_return(v)		atomic64_sub_return(1LL, (v))
45624b44a66SWill Deacon #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0)
45724b44a66SWill Deacon #define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1LL, 0LL)
45824b44a66SWill Deacon 
4597847777aSArun Sharma #endif /* !CONFIG_GENERIC_ATOMIC64 */
4604baa9922SRussell King #endif
4614baa9922SRussell King #endif
462