xref: /openbmc/linux/arch/arm/include/asm/atomic.h (revision 200b812d0084f800bc52465e273b118ff5f8141f)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/atomic.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996 Russell King.
54baa9922SRussell King  *  Copyright (C) 2002 Deep Blue Solutions Ltd.
64baa9922SRussell King  *
74baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
84baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
94baa9922SRussell King  * published by the Free Software Foundation.
104baa9922SRussell King  */
114baa9922SRussell King #ifndef __ASM_ARM_ATOMIC_H
124baa9922SRussell King #define __ASM_ARM_ATOMIC_H
134baa9922SRussell King 
144baa9922SRussell King #include <linux/compiler.h>
15ea435467SMatthew Wilcox #include <linux/types.h>
164baa9922SRussell King #include <asm/system.h>
174baa9922SRussell King 
184baa9922SRussell King #define ATOMIC_INIT(i)	{ (i) }
194baa9922SRussell King 
204baa9922SRussell King #ifdef __KERNEL__
214baa9922SRussell King 
22*200b812dSCatalin Marinas /*
23*200b812dSCatalin Marinas  * On ARM, ordinary assignment (str instruction) doesn't clear the local
24*200b812dSCatalin Marinas  * strex/ldrex monitor on some implementations. The reason we can use it for
25*200b812dSCatalin Marinas  * atomic_set() is the clrex or dummy strex done on every exception return.
26*200b812dSCatalin Marinas  */
274baa9922SRussell King #define atomic_read(v)	((v)->counter)
28*200b812dSCatalin Marinas #define atomic_set(v,i)	(((v)->counter) = (i))
294baa9922SRussell King 
304baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
314baa9922SRussell King 
324baa9922SRussell King /*
334baa9922SRussell King  * ARMv6 UP and SMP safe atomic ops.  We use load exclusive and
344baa9922SRussell King  * store exclusive to ensure that these are atomic.  We may loop
35*200b812dSCatalin Marinas  * to ensure that the update happens.
364baa9922SRussell King  */
37bac4e960SRussell King static inline void atomic_add(int i, atomic_t *v)
38bac4e960SRussell King {
39bac4e960SRussell King 	unsigned long tmp;
40bac4e960SRussell King 	int result;
41bac4e960SRussell King 
42bac4e960SRussell King 	__asm__ __volatile__("@ atomic_add\n"
43bac4e960SRussell King "1:	ldrex	%0, [%2]\n"
44bac4e960SRussell King "	add	%0, %0, %3\n"
45bac4e960SRussell King "	strex	%1, %0, [%2]\n"
46bac4e960SRussell King "	teq	%1, #0\n"
47bac4e960SRussell King "	bne	1b"
48bac4e960SRussell King 	: "=&r" (result), "=&r" (tmp)
49bac4e960SRussell King 	: "r" (&v->counter), "Ir" (i)
50bac4e960SRussell King 	: "cc");
51bac4e960SRussell King }
52bac4e960SRussell King 
534baa9922SRussell King static inline int atomic_add_return(int i, atomic_t *v)
544baa9922SRussell King {
554baa9922SRussell King 	unsigned long tmp;
564baa9922SRussell King 	int result;
574baa9922SRussell King 
58bac4e960SRussell King 	smp_mb();
59bac4e960SRussell King 
604baa9922SRussell King 	__asm__ __volatile__("@ atomic_add_return\n"
614baa9922SRussell King "1:	ldrex	%0, [%2]\n"
624baa9922SRussell King "	add	%0, %0, %3\n"
634baa9922SRussell King "	strex	%1, %0, [%2]\n"
644baa9922SRussell King "	teq	%1, #0\n"
654baa9922SRussell King "	bne	1b"
664baa9922SRussell King 	: "=&r" (result), "=&r" (tmp)
674baa9922SRussell King 	: "r" (&v->counter), "Ir" (i)
684baa9922SRussell King 	: "cc");
694baa9922SRussell King 
70bac4e960SRussell King 	smp_mb();
71bac4e960SRussell King 
724baa9922SRussell King 	return result;
734baa9922SRussell King }
744baa9922SRussell King 
75bac4e960SRussell King static inline void atomic_sub(int i, atomic_t *v)
76bac4e960SRussell King {
77bac4e960SRussell King 	unsigned long tmp;
78bac4e960SRussell King 	int result;
79bac4e960SRussell King 
80bac4e960SRussell King 	__asm__ __volatile__("@ atomic_sub\n"
81bac4e960SRussell King "1:	ldrex	%0, [%2]\n"
82bac4e960SRussell King "	sub	%0, %0, %3\n"
83bac4e960SRussell King "	strex	%1, %0, [%2]\n"
84bac4e960SRussell King "	teq	%1, #0\n"
85bac4e960SRussell King "	bne	1b"
86bac4e960SRussell King 	: "=&r" (result), "=&r" (tmp)
87bac4e960SRussell King 	: "r" (&v->counter), "Ir" (i)
88bac4e960SRussell King 	: "cc");
89bac4e960SRussell King }
90bac4e960SRussell King 
914baa9922SRussell King static inline int atomic_sub_return(int i, atomic_t *v)
924baa9922SRussell King {
934baa9922SRussell King 	unsigned long tmp;
944baa9922SRussell King 	int result;
954baa9922SRussell King 
96bac4e960SRussell King 	smp_mb();
97bac4e960SRussell King 
984baa9922SRussell King 	__asm__ __volatile__("@ atomic_sub_return\n"
994baa9922SRussell King "1:	ldrex	%0, [%2]\n"
1004baa9922SRussell King "	sub	%0, %0, %3\n"
1014baa9922SRussell King "	strex	%1, %0, [%2]\n"
1024baa9922SRussell King "	teq	%1, #0\n"
1034baa9922SRussell King "	bne	1b"
1044baa9922SRussell King 	: "=&r" (result), "=&r" (tmp)
1054baa9922SRussell King 	: "r" (&v->counter), "Ir" (i)
1064baa9922SRussell King 	: "cc");
1074baa9922SRussell King 
108bac4e960SRussell King 	smp_mb();
109bac4e960SRussell King 
1104baa9922SRussell King 	return result;
1114baa9922SRussell King }
1124baa9922SRussell King 
1134baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
1144baa9922SRussell King {
1154baa9922SRussell King 	unsigned long oldval, res;
1164baa9922SRussell King 
117bac4e960SRussell King 	smp_mb();
118bac4e960SRussell King 
1194baa9922SRussell King 	do {
1204baa9922SRussell King 		__asm__ __volatile__("@ atomic_cmpxchg\n"
1214baa9922SRussell King 		"ldrex	%1, [%2]\n"
1224baa9922SRussell King 		"mov	%0, #0\n"
1234baa9922SRussell King 		"teq	%1, %3\n"
1244baa9922SRussell King 		"strexeq %0, %4, [%2]\n"
1254baa9922SRussell King 		    : "=&r" (res), "=&r" (oldval)
1264baa9922SRussell King 		    : "r" (&ptr->counter), "Ir" (old), "r" (new)
1274baa9922SRussell King 		    : "cc");
1284baa9922SRussell King 	} while (res);
1294baa9922SRussell King 
130bac4e960SRussell King 	smp_mb();
131bac4e960SRussell King 
1324baa9922SRussell King 	return oldval;
1334baa9922SRussell King }
1344baa9922SRussell King 
1354baa9922SRussell King static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
1364baa9922SRussell King {
1374baa9922SRussell King 	unsigned long tmp, tmp2;
1384baa9922SRussell King 
1394baa9922SRussell King 	__asm__ __volatile__("@ atomic_clear_mask\n"
1404baa9922SRussell King "1:	ldrex	%0, [%2]\n"
1414baa9922SRussell King "	bic	%0, %0, %3\n"
1424baa9922SRussell King "	strex	%1, %0, [%2]\n"
1434baa9922SRussell King "	teq	%1, #0\n"
1444baa9922SRussell King "	bne	1b"
1454baa9922SRussell King 	: "=&r" (tmp), "=&r" (tmp2)
1464baa9922SRussell King 	: "r" (addr), "Ir" (mask)
1474baa9922SRussell King 	: "cc");
1484baa9922SRussell King }
1494baa9922SRussell King 
1504baa9922SRussell King #else /* ARM_ARCH_6 */
1514baa9922SRussell King 
1524baa9922SRussell King #ifdef CONFIG_SMP
1534baa9922SRussell King #error SMP not supported on pre-ARMv6 CPUs
1544baa9922SRussell King #endif
1554baa9922SRussell King 
1564baa9922SRussell King static inline int atomic_add_return(int i, atomic_t *v)
1574baa9922SRussell King {
1584baa9922SRussell King 	unsigned long flags;
1594baa9922SRussell King 	int val;
1604baa9922SRussell King 
1614baa9922SRussell King 	raw_local_irq_save(flags);
1624baa9922SRussell King 	val = v->counter;
1634baa9922SRussell King 	v->counter = val += i;
1644baa9922SRussell King 	raw_local_irq_restore(flags);
1654baa9922SRussell King 
1664baa9922SRussell King 	return val;
1674baa9922SRussell King }
168bac4e960SRussell King #define atomic_add(i, v)	(void) atomic_add_return(i, v)
1694baa9922SRussell King 
1704baa9922SRussell King static inline int atomic_sub_return(int i, atomic_t *v)
1714baa9922SRussell King {
1724baa9922SRussell King 	unsigned long flags;
1734baa9922SRussell King 	int val;
1744baa9922SRussell King 
1754baa9922SRussell King 	raw_local_irq_save(flags);
1764baa9922SRussell King 	val = v->counter;
1774baa9922SRussell King 	v->counter = val -= i;
1784baa9922SRussell King 	raw_local_irq_restore(flags);
1794baa9922SRussell King 
1804baa9922SRussell King 	return val;
1814baa9922SRussell King }
182bac4e960SRussell King #define atomic_sub(i, v)	(void) atomic_sub_return(i, v)
1834baa9922SRussell King 
1844baa9922SRussell King static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
1854baa9922SRussell King {
1864baa9922SRussell King 	int ret;
1874baa9922SRussell King 	unsigned long flags;
1884baa9922SRussell King 
1894baa9922SRussell King 	raw_local_irq_save(flags);
1904baa9922SRussell King 	ret = v->counter;
1914baa9922SRussell King 	if (likely(ret == old))
1924baa9922SRussell King 		v->counter = new;
1934baa9922SRussell King 	raw_local_irq_restore(flags);
1944baa9922SRussell King 
1954baa9922SRussell King 	return ret;
1964baa9922SRussell King }
1974baa9922SRussell King 
1984baa9922SRussell King static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
1994baa9922SRussell King {
2004baa9922SRussell King 	unsigned long flags;
2014baa9922SRussell King 
2024baa9922SRussell King 	raw_local_irq_save(flags);
2034baa9922SRussell King 	*addr &= ~mask;
2044baa9922SRussell King 	raw_local_irq_restore(flags);
2054baa9922SRussell King }
2064baa9922SRussell King 
2074baa9922SRussell King #endif /* __LINUX_ARM_ARCH__ */
2084baa9922SRussell King 
2094baa9922SRussell King #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
2104baa9922SRussell King 
2114baa9922SRussell King static inline int atomic_add_unless(atomic_t *v, int a, int u)
2124baa9922SRussell King {
2134baa9922SRussell King 	int c, old;
2144baa9922SRussell King 
2154baa9922SRussell King 	c = atomic_read(v);
2164baa9922SRussell King 	while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
2174baa9922SRussell King 		c = old;
2184baa9922SRussell King 	return c != u;
2194baa9922SRussell King }
2204baa9922SRussell King #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
2214baa9922SRussell King 
222bac4e960SRussell King #define atomic_inc(v)		atomic_add(1, v)
223bac4e960SRussell King #define atomic_dec(v)		atomic_sub(1, v)
2244baa9922SRussell King 
2254baa9922SRussell King #define atomic_inc_and_test(v)	(atomic_add_return(1, v) == 0)
2264baa9922SRussell King #define atomic_dec_and_test(v)	(atomic_sub_return(1, v) == 0)
2274baa9922SRussell King #define atomic_inc_return(v)    (atomic_add_return(1, v))
2284baa9922SRussell King #define atomic_dec_return(v)    (atomic_sub_return(1, v))
2294baa9922SRussell King #define atomic_sub_and_test(i, v) (atomic_sub_return(i, v) == 0)
2304baa9922SRussell King 
2314baa9922SRussell King #define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
2324baa9922SRussell King 
233bac4e960SRussell King #define smp_mb__before_atomic_dec()	smp_mb()
234bac4e960SRussell King #define smp_mb__after_atomic_dec()	smp_mb()
235bac4e960SRussell King #define smp_mb__before_atomic_inc()	smp_mb()
236bac4e960SRussell King #define smp_mb__after_atomic_inc()	smp_mb()
2374baa9922SRussell King 
23872099ed2SArnd Bergmann #include <asm-generic/atomic-long.h>
2394baa9922SRussell King #endif
2404baa9922SRussell King #endif
241