xref: /openbmc/linux/arch/sh/include/asm/cmpxchg-llsc.h (revision 3226aad8)
1ee43a844SPaul Mundt #ifndef __ASM_SH_CMPXCHG_LLSC_H
2ee43a844SPaul Mundt #define __ASM_SH_CMPXCHG_LLSC_H
3ee43a844SPaul Mundt 
43226aad8SMichael S. Tsirkin #include <linux/bitops.h>
53226aad8SMichael S. Tsirkin #include <asm/byteorder.h>
63226aad8SMichael S. Tsirkin 
7ee43a844SPaul Mundt static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
8ee43a844SPaul Mundt {
9ee43a844SPaul Mundt 	unsigned long retval;
10ee43a844SPaul Mundt 	unsigned long tmp;
11ee43a844SPaul Mundt 
12ee43a844SPaul Mundt 	__asm__ __volatile__ (
13ee43a844SPaul Mundt 		"1:					\n\t"
1442990701SMatt Fleming 		"movli.l	@%2, %0	! xchg_u32	\n\t"
1542990701SMatt Fleming 		"mov		%0, %1			\n\t"
1642990701SMatt Fleming 		"mov		%3, %0			\n\t"
1742990701SMatt Fleming 		"movco.l	%0, @%2			\n\t"
18ee43a844SPaul Mundt 		"bf		1b			\n\t"
19ee43a844SPaul Mundt 		"synco					\n\t"
2042990701SMatt Fleming 		: "=&z"(tmp), "=&r" (retval)
2142990701SMatt Fleming 		: "r" (m), "r" (val)
22ee43a844SPaul Mundt 		: "t", "memory"
23ee43a844SPaul Mundt 	);
24ee43a844SPaul Mundt 
25ee43a844SPaul Mundt 	return retval;
26ee43a844SPaul Mundt }
27ee43a844SPaul Mundt 
28ee43a844SPaul Mundt static inline unsigned long
293226aad8SMichael S. Tsirkin __cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
30ee43a844SPaul Mundt {
31ee43a844SPaul Mundt 	unsigned long retval;
32ee43a844SPaul Mundt 	unsigned long tmp;
33ee43a844SPaul Mundt 
34ee43a844SPaul Mundt 	__asm__ __volatile__ (
35ee43a844SPaul Mundt 		"1:						\n\t"
3642990701SMatt Fleming 		"movli.l	@%2, %0	! __cmpxchg_u32		\n\t"
3742990701SMatt Fleming 		"mov		%0, %1				\n\t"
3842990701SMatt Fleming 		"cmp/eq		%1, %3				\n\t"
39ee43a844SPaul Mundt 		"bf		2f				\n\t"
404c7c9978SAoi Shinkai 		"mov		%4, %0				\n\t"
41ee43a844SPaul Mundt 		"2:						\n\t"
4242990701SMatt Fleming 		"movco.l	%0, @%2				\n\t"
43ee43a844SPaul Mundt 		"bf		1b				\n\t"
44ee43a844SPaul Mundt 		"synco						\n\t"
4542990701SMatt Fleming 		: "=&z" (tmp), "=&r" (retval)
4642990701SMatt Fleming 		: "r" (m), "r" (old), "r" (new)
47ee43a844SPaul Mundt 		: "t", "memory"
48ee43a844SPaul Mundt 	);
49ee43a844SPaul Mundt 
50ee43a844SPaul Mundt 	return retval;
51ee43a844SPaul Mundt }
52ee43a844SPaul Mundt 
533226aad8SMichael S. Tsirkin static inline u32 __xchg_cmpxchg(volatile void *ptr, u32 x, int size)
543226aad8SMichael S. Tsirkin {
553226aad8SMichael S. Tsirkin 	int off = (unsigned long)ptr % sizeof(u32);
563226aad8SMichael S. Tsirkin 	volatile u32 *p = ptr - off;
573226aad8SMichael S. Tsirkin #ifdef __BIG_ENDIAN
583226aad8SMichael S. Tsirkin 	int bitoff = (sizeof(u32) - 1 - off) * BITS_PER_BYTE;
593226aad8SMichael S. Tsirkin #else
603226aad8SMichael S. Tsirkin 	int bitoff = off * BITS_PER_BYTE;
613226aad8SMichael S. Tsirkin #endif
623226aad8SMichael S. Tsirkin 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
633226aad8SMichael S. Tsirkin 	u32 oldv, newv;
643226aad8SMichael S. Tsirkin 	u32 ret;
653226aad8SMichael S. Tsirkin 
663226aad8SMichael S. Tsirkin 	do {
673226aad8SMichael S. Tsirkin 		oldv = READ_ONCE(*p);
683226aad8SMichael S. Tsirkin 		ret = (oldv & bitmask) >> bitoff;
693226aad8SMichael S. Tsirkin 		newv = (oldv & ~bitmask) | (x << bitoff);
703226aad8SMichael S. Tsirkin 	} while (__cmpxchg_u32(p, oldv, newv) != oldv);
713226aad8SMichael S. Tsirkin 
723226aad8SMichael S. Tsirkin 	return ret;
733226aad8SMichael S. Tsirkin }
743226aad8SMichael S. Tsirkin 
753226aad8SMichael S. Tsirkin static inline unsigned long xchg_u16(volatile u16 *m, unsigned long val)
763226aad8SMichael S. Tsirkin {
773226aad8SMichael S. Tsirkin 	return __xchg_cmpxchg(m, val, sizeof *m);
783226aad8SMichael S. Tsirkin }
793226aad8SMichael S. Tsirkin 
803226aad8SMichael S. Tsirkin static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val)
813226aad8SMichael S. Tsirkin {
823226aad8SMichael S. Tsirkin 	return __xchg_cmpxchg(m, val, sizeof *m);
833226aad8SMichael S. Tsirkin }
843226aad8SMichael S. Tsirkin 
85ee43a844SPaul Mundt #endif /* __ASM_SH_CMPXCHG_LLSC_H */
86