xref: /openbmc/linux/arch/arm/include/asm/cmpxchg.h (revision 2523c67bb6962f98193dce1c73b6efb65a6ea92c)
19f97da78SDavid Howells #ifndef __ASM_ARM_CMPXCHG_H
29f97da78SDavid Howells #define __ASM_ARM_CMPXCHG_H
39f97da78SDavid Howells 
49f97da78SDavid Howells #include <linux/irqflags.h>
59f97da78SDavid Howells #include <asm/barrier.h>
69f97da78SDavid Howells 
79f97da78SDavid Howells #if defined(CONFIG_CPU_SA1100) || defined(CONFIG_CPU_SA110)
89f97da78SDavid Howells /*
99f97da78SDavid Howells  * On the StrongARM, "swp" is terminally broken since it bypasses the
109f97da78SDavid Howells  * cache totally.  This means that the cache becomes inconsistent, and,
119f97da78SDavid Howells  * since we use normal loads/stores as well, this is really bad.
129f97da78SDavid Howells  * Typically, this causes oopsen in filp_close, but could have other,
139f97da78SDavid Howells  * more disastrous effects.  There are two work-arounds:
149f97da78SDavid Howells  *  1. Disable interrupts and emulate the atomic swap
159f97da78SDavid Howells  *  2. Clean the cache, perform atomic swap, flush the cache
169f97da78SDavid Howells  *
179f97da78SDavid Howells  * We choose (1) since its the "easiest" to achieve here and is not
189f97da78SDavid Howells  * dependent on the processor type.
199f97da78SDavid Howells  *
209f97da78SDavid Howells  * NOTE that this solution won't work on an SMP system, so explcitly
219f97da78SDavid Howells  * forbid it here.
229f97da78SDavid Howells  */
239f97da78SDavid Howells #define swp_is_buggy
249f97da78SDavid Howells #endif
259f97da78SDavid Howells 
269f97da78SDavid Howells static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
279f97da78SDavid Howells {
289f97da78SDavid Howells 	extern void __bad_xchg(volatile void *, int);
299f97da78SDavid Howells 	unsigned long ret;
309f97da78SDavid Howells #ifdef swp_is_buggy
319f97da78SDavid Howells 	unsigned long flags;
329f97da78SDavid Howells #endif
339f97da78SDavid Howells #if __LINUX_ARM_ARCH__ >= 6
349f97da78SDavid Howells 	unsigned int tmp;
359f97da78SDavid Howells #endif
369f97da78SDavid Howells 
379f97da78SDavid Howells 	smp_mb();
389f97da78SDavid Howells 
399f97da78SDavid Howells 	switch (size) {
409f97da78SDavid Howells #if __LINUX_ARM_ARCH__ >= 6
419f97da78SDavid Howells 	case 1:
429f97da78SDavid Howells 		asm volatile("@	__xchg1\n"
439f97da78SDavid Howells 		"1:	ldrexb	%0, [%3]\n"
449f97da78SDavid Howells 		"	strexb	%1, %2, [%3]\n"
459f97da78SDavid Howells 		"	teq	%1, #0\n"
469f97da78SDavid Howells 		"	bne	1b"
479f97da78SDavid Howells 			: "=&r" (ret), "=&r" (tmp)
489f97da78SDavid Howells 			: "r" (x), "r" (ptr)
499f97da78SDavid Howells 			: "memory", "cc");
509f97da78SDavid Howells 		break;
519f97da78SDavid Howells 	case 4:
529f97da78SDavid Howells 		asm volatile("@	__xchg4\n"
539f97da78SDavid Howells 		"1:	ldrex	%0, [%3]\n"
549f97da78SDavid Howells 		"	strex	%1, %2, [%3]\n"
559f97da78SDavid Howells 		"	teq	%1, #0\n"
569f97da78SDavid Howells 		"	bne	1b"
579f97da78SDavid Howells 			: "=&r" (ret), "=&r" (tmp)
589f97da78SDavid Howells 			: "r" (x), "r" (ptr)
599f97da78SDavid Howells 			: "memory", "cc");
609f97da78SDavid Howells 		break;
619f97da78SDavid Howells #elif defined(swp_is_buggy)
629f97da78SDavid Howells #ifdef CONFIG_SMP
639f97da78SDavid Howells #error SMP is not supported on this platform
649f97da78SDavid Howells #endif
659f97da78SDavid Howells 	case 1:
669f97da78SDavid Howells 		raw_local_irq_save(flags);
679f97da78SDavid Howells 		ret = *(volatile unsigned char *)ptr;
689f97da78SDavid Howells 		*(volatile unsigned char *)ptr = x;
699f97da78SDavid Howells 		raw_local_irq_restore(flags);
709f97da78SDavid Howells 		break;
719f97da78SDavid Howells 
729f97da78SDavid Howells 	case 4:
739f97da78SDavid Howells 		raw_local_irq_save(flags);
749f97da78SDavid Howells 		ret = *(volatile unsigned long *)ptr;
759f97da78SDavid Howells 		*(volatile unsigned long *)ptr = x;
769f97da78SDavid Howells 		raw_local_irq_restore(flags);
779f97da78SDavid Howells 		break;
789f97da78SDavid Howells #else
799f97da78SDavid Howells 	case 1:
809f97da78SDavid Howells 		asm volatile("@	__xchg1\n"
819f97da78SDavid Howells 		"	swpb	%0, %1, [%2]"
829f97da78SDavid Howells 			: "=&r" (ret)
839f97da78SDavid Howells 			: "r" (x), "r" (ptr)
849f97da78SDavid Howells 			: "memory", "cc");
859f97da78SDavid Howells 		break;
869f97da78SDavid Howells 	case 4:
879f97da78SDavid Howells 		asm volatile("@	__xchg4\n"
889f97da78SDavid Howells 		"	swp	%0, %1, [%2]"
899f97da78SDavid Howells 			: "=&r" (ret)
909f97da78SDavid Howells 			: "r" (x), "r" (ptr)
919f97da78SDavid Howells 			: "memory", "cc");
929f97da78SDavid Howells 		break;
939f97da78SDavid Howells #endif
949f97da78SDavid Howells 	default:
959f97da78SDavid Howells 		__bad_xchg(ptr, size), ret = 0;
969f97da78SDavid Howells 		break;
979f97da78SDavid Howells 	}
989f97da78SDavid Howells 	smp_mb();
999f97da78SDavid Howells 
1009f97da78SDavid Howells 	return ret;
1019f97da78SDavid Howells }
1029f97da78SDavid Howells 
1039f97da78SDavid Howells #define xchg(ptr,x) \
1049f97da78SDavid Howells 	((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
1059f97da78SDavid Howells 
1069f97da78SDavid Howells #include <asm-generic/cmpxchg-local.h>
1079f97da78SDavid Howells 
1089f97da78SDavid Howells #if __LINUX_ARM_ARCH__ < 6
1099f97da78SDavid Howells /* min ARCH < ARMv6 */
1109f97da78SDavid Howells 
1119f97da78SDavid Howells #ifdef CONFIG_SMP
1129f97da78SDavid Howells #error "SMP is not supported on this platform"
1139f97da78SDavid Howells #endif
1149f97da78SDavid Howells 
1159f97da78SDavid Howells /*
1169f97da78SDavid Howells  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
1179f97da78SDavid Howells  * them available.
1189f97da78SDavid Howells  */
1199f97da78SDavid Howells #define cmpxchg_local(ptr, o, n)				  	       \
1209f97da78SDavid Howells 	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
1219f97da78SDavid Howells 			(unsigned long)(n), sizeof(*(ptr))))
1229f97da78SDavid Howells #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
1239f97da78SDavid Howells 
1249f97da78SDavid Howells #ifndef CONFIG_SMP
1259f97da78SDavid Howells #include <asm-generic/cmpxchg.h>
1269f97da78SDavid Howells #endif
1279f97da78SDavid Howells 
1289f97da78SDavid Howells #else	/* min ARCH >= ARMv6 */
1299f97da78SDavid Howells 
1309f97da78SDavid Howells extern void __bad_cmpxchg(volatile void *ptr, int size);
1319f97da78SDavid Howells 
1329f97da78SDavid Howells /*
1339f97da78SDavid Howells  * cmpxchg only support 32-bits operands on ARMv6.
1349f97da78SDavid Howells  */
1359f97da78SDavid Howells 
1369f97da78SDavid Howells static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
1379f97da78SDavid Howells 				      unsigned long new, int size)
1389f97da78SDavid Howells {
1399f97da78SDavid Howells 	unsigned long oldval, res;
1409f97da78SDavid Howells 
1419f97da78SDavid Howells 	switch (size) {
1429f97da78SDavid Howells #ifndef CONFIG_CPU_V6	/* min ARCH >= ARMv6K */
1439f97da78SDavid Howells 	case 1:
1449f97da78SDavid Howells 		do {
1459f97da78SDavid Howells 			asm volatile("@ __cmpxchg1\n"
1469f97da78SDavid Howells 			"	ldrexb	%1, [%2]\n"
1479f97da78SDavid Howells 			"	mov	%0, #0\n"
1489f97da78SDavid Howells 			"	teq	%1, %3\n"
1499f97da78SDavid Howells 			"	strexbeq %0, %4, [%2]\n"
1509f97da78SDavid Howells 				: "=&r" (res), "=&r" (oldval)
1519f97da78SDavid Howells 				: "r" (ptr), "Ir" (old), "r" (new)
1529f97da78SDavid Howells 				: "memory", "cc");
1539f97da78SDavid Howells 		} while (res);
1549f97da78SDavid Howells 		break;
1559f97da78SDavid Howells 	case 2:
1569f97da78SDavid Howells 		do {
1579f97da78SDavid Howells 			asm volatile("@ __cmpxchg1\n"
1589f97da78SDavid Howells 			"	ldrexh	%1, [%2]\n"
1599f97da78SDavid Howells 			"	mov	%0, #0\n"
1609f97da78SDavid Howells 			"	teq	%1, %3\n"
1619f97da78SDavid Howells 			"	strexheq %0, %4, [%2]\n"
1629f97da78SDavid Howells 				: "=&r" (res), "=&r" (oldval)
1639f97da78SDavid Howells 				: "r" (ptr), "Ir" (old), "r" (new)
1649f97da78SDavid Howells 				: "memory", "cc");
1659f97da78SDavid Howells 		} while (res);
1669f97da78SDavid Howells 		break;
1679f97da78SDavid Howells #endif
1689f97da78SDavid Howells 	case 4:
1699f97da78SDavid Howells 		do {
1709f97da78SDavid Howells 			asm volatile("@ __cmpxchg4\n"
1719f97da78SDavid Howells 			"	ldrex	%1, [%2]\n"
1729f97da78SDavid Howells 			"	mov	%0, #0\n"
1739f97da78SDavid Howells 			"	teq	%1, %3\n"
1749f97da78SDavid Howells 			"	strexeq %0, %4, [%2]\n"
1759f97da78SDavid Howells 				: "=&r" (res), "=&r" (oldval)
1769f97da78SDavid Howells 				: "r" (ptr), "Ir" (old), "r" (new)
1779f97da78SDavid Howells 				: "memory", "cc");
1789f97da78SDavid Howells 		} while (res);
1799f97da78SDavid Howells 		break;
1809f97da78SDavid Howells 	default:
1819f97da78SDavid Howells 		__bad_cmpxchg(ptr, size);
1829f97da78SDavid Howells 		oldval = 0;
1839f97da78SDavid Howells 	}
1849f97da78SDavid Howells 
1859f97da78SDavid Howells 	return oldval;
1869f97da78SDavid Howells }
1879f97da78SDavid Howells 
1889f97da78SDavid Howells static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
1899f97da78SDavid Howells 					 unsigned long new, int size)
1909f97da78SDavid Howells {
1919f97da78SDavid Howells 	unsigned long ret;
1929f97da78SDavid Howells 
1939f97da78SDavid Howells 	smp_mb();
1949f97da78SDavid Howells 	ret = __cmpxchg(ptr, old, new, size);
1959f97da78SDavid Howells 	smp_mb();
1969f97da78SDavid Howells 
1979f97da78SDavid Howells 	return ret;
1989f97da78SDavid Howells }
1999f97da78SDavid Howells 
2009f97da78SDavid Howells #define cmpxchg(ptr,o,n)						\
2019f97da78SDavid Howells 	((__typeof__(*(ptr)))__cmpxchg_mb((ptr),			\
2029f97da78SDavid Howells 					  (unsigned long)(o),		\
2039f97da78SDavid Howells 					  (unsigned long)(n),		\
2049f97da78SDavid Howells 					  sizeof(*(ptr))))
2059f97da78SDavid Howells 
2069f97da78SDavid Howells static inline unsigned long __cmpxchg_local(volatile void *ptr,
2079f97da78SDavid Howells 					    unsigned long old,
2089f97da78SDavid Howells 					    unsigned long new, int size)
2099f97da78SDavid Howells {
2109f97da78SDavid Howells 	unsigned long ret;
2119f97da78SDavid Howells 
2129f97da78SDavid Howells 	switch (size) {
2139f97da78SDavid Howells #ifdef CONFIG_CPU_V6	/* min ARCH == ARMv6 */
2149f97da78SDavid Howells 	case 1:
2159f97da78SDavid Howells 	case 2:
2169f97da78SDavid Howells 		ret = __cmpxchg_local_generic(ptr, old, new, size);
2179f97da78SDavid Howells 		break;
2189f97da78SDavid Howells #endif
2199f97da78SDavid Howells 	default:
2209f97da78SDavid Howells 		ret = __cmpxchg(ptr, old, new, size);
2219f97da78SDavid Howells 	}
2229f97da78SDavid Howells 
2239f97da78SDavid Howells 	return ret;
2249f97da78SDavid Howells }
2259f97da78SDavid Howells 
226*2523c67bSWill Deacon static inline unsigned long long __cmpxchg64(unsigned long long *ptr,
227*2523c67bSWill Deacon 					     unsigned long long old,
228*2523c67bSWill Deacon 					     unsigned long long new)
229*2523c67bSWill Deacon {
230*2523c67bSWill Deacon 	unsigned long long oldval;
231*2523c67bSWill Deacon 	unsigned long res;
232*2523c67bSWill Deacon 
233*2523c67bSWill Deacon 	__asm__ __volatile__(
234*2523c67bSWill Deacon "1:	ldrexd		%1, %H1, [%3]\n"
235*2523c67bSWill Deacon "	teq		%1, %4\n"
236*2523c67bSWill Deacon "	teqeq		%H1, %H4\n"
237*2523c67bSWill Deacon "	bne		2f\n"
238*2523c67bSWill Deacon "	strexd		%0, %5, %H5, [%3]\n"
239*2523c67bSWill Deacon "	teq		%0, #0\n"
240*2523c67bSWill Deacon "	bne		1b\n"
241*2523c67bSWill Deacon "2:"
242*2523c67bSWill Deacon 	: "=&r" (res), "=&r" (oldval), "+Qo" (*ptr)
243*2523c67bSWill Deacon 	: "r" (ptr), "r" (old), "r" (new)
244*2523c67bSWill Deacon 	: "cc");
245*2523c67bSWill Deacon 
246*2523c67bSWill Deacon 	return oldval;
247*2523c67bSWill Deacon }
248*2523c67bSWill Deacon 
249*2523c67bSWill Deacon static inline unsigned long long __cmpxchg64_mb(unsigned long long *ptr,
250*2523c67bSWill Deacon 						unsigned long long old,
251*2523c67bSWill Deacon 						unsigned long long new)
252*2523c67bSWill Deacon {
253*2523c67bSWill Deacon 	unsigned long long ret;
254*2523c67bSWill Deacon 
255*2523c67bSWill Deacon 	smp_mb();
256*2523c67bSWill Deacon 	ret = __cmpxchg64(ptr, old, new);
257*2523c67bSWill Deacon 	smp_mb();
258*2523c67bSWill Deacon 
259*2523c67bSWill Deacon 	return ret;
260*2523c67bSWill Deacon }
261*2523c67bSWill Deacon 
2629f97da78SDavid Howells #define cmpxchg_local(ptr,o,n)						\
2639f97da78SDavid Howells 	((__typeof__(*(ptr)))__cmpxchg_local((ptr),			\
2649f97da78SDavid Howells 				       (unsigned long)(o),		\
2659f97da78SDavid Howells 				       (unsigned long)(n),		\
2669f97da78SDavid Howells 				       sizeof(*(ptr))))
2679f97da78SDavid Howells 
2689f97da78SDavid Howells #define cmpxchg64(ptr, o, n)						\
269*2523c67bSWill Deacon 	((__typeof__(*(ptr)))__cmpxchg64_mb((ptr),			\
2706eabb330SJaccon Bastiaansen 					(unsigned long long)(o),	\
2716eabb330SJaccon Bastiaansen 					(unsigned long long)(n)))
2729f97da78SDavid Howells 
2739f97da78SDavid Howells #define cmpxchg64_local(ptr, o, n)					\
274*2523c67bSWill Deacon 	((__typeof__(*(ptr)))__cmpxchg64((ptr),				\
2756eabb330SJaccon Bastiaansen 					(unsigned long long)(o),	\
2766eabb330SJaccon Bastiaansen 					(unsigned long long)(n)))
2779f97da78SDavid Howells 
2789f97da78SDavid Howells #endif	/* __LINUX_ARM_ARCH__ >= 6 */
2799f97da78SDavid Howells 
2809f97da78SDavid Howells #endif /* __ASM_ARM_CMPXCHG_H */
281