xref: /openbmc/linux/arch/openrisc/include/asm/cmpxchg.h (revision 1ac731c529cd4d6adbce134754b51ff7d822b145)
111595172SStefan Kristiansson /*
2489e0f80SStafford Horne  * 1,2 and 4 byte cmpxchg and xchg implementations for OpenRISC.
3489e0f80SStafford Horne  *
411595172SStefan Kristiansson  * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
5489e0f80SStafford Horne  * Copyright (C) 2017 Stafford Horne <shorne@gmail.com>
611595172SStefan Kristiansson  *
711595172SStefan Kristiansson  * This file is licensed under the terms of the GNU General Public License
811595172SStefan Kristiansson  * version 2.  This program is licensed "as is" without any warranty of any
911595172SStefan Kristiansson  * kind, whether express or implied.
10489e0f80SStafford Horne  *
11489e0f80SStafford Horne  * Note:
12489e0f80SStafford Horne  * The portable implementations of 1 and 2 byte xchg and cmpxchg using a 4
13489e0f80SStafford Horne  * byte cmpxchg is sourced heavily from the sh and mips implementations.
1411595172SStefan Kristiansson  */
1511595172SStefan Kristiansson 
1611595172SStefan Kristiansson #ifndef __ASM_OPENRISC_CMPXCHG_H
1711595172SStefan Kristiansson #define __ASM_OPENRISC_CMPXCHG_H
1811595172SStefan Kristiansson 
19b22d73c2SWill Deacon #include  <linux/bits.h>
20b22d73c2SWill Deacon #include  <linux/compiler.h>
2111595172SStefan Kristiansson #include  <linux/types.h>
2211595172SStefan Kristiansson 
2311595172SStefan Kristiansson #define __HAVE_ARCH_CMPXCHG 1
2411595172SStefan Kristiansson 
cmpxchg_u32(volatile void * ptr,unsigned long old,unsigned long new)25489e0f80SStafford Horne static inline unsigned long cmpxchg_u32(volatile void *ptr,
26489e0f80SStafford Horne 		unsigned long old, unsigned long new)
2711595172SStefan Kristiansson {
2811595172SStefan Kristiansson 	__asm__ __volatile__(
2911595172SStefan Kristiansson 		"1:	l.lwa %0, 0(%1)		\n"
3011595172SStefan Kristiansson 		"	l.sfeq %0, %2		\n"
3111595172SStefan Kristiansson 		"	l.bnf 2f		\n"
3211595172SStefan Kristiansson 		"	 l.nop			\n"
3311595172SStefan Kristiansson 		"	l.swa 0(%1), %3		\n"
3411595172SStefan Kristiansson 		"	l.bnf 1b		\n"
3511595172SStefan Kristiansson 		"	 l.nop			\n"
3611595172SStefan Kristiansson 		"2:				\n"
3711595172SStefan Kristiansson 		: "=&r"(old)
3811595172SStefan Kristiansson 		: "r"(ptr), "r"(old), "r"(new)
3911595172SStefan Kristiansson 		: "cc", "memory");
4011595172SStefan Kristiansson 
4111595172SStefan Kristiansson 	return old;
4211595172SStefan Kristiansson }
4311595172SStefan Kristiansson 
xchg_u32(volatile void * ptr,unsigned long val)44489e0f80SStafford Horne static inline unsigned long xchg_u32(volatile void *ptr,
45489e0f80SStafford Horne 		unsigned long val)
4611595172SStefan Kristiansson {
4711595172SStefan Kristiansson 	__asm__ __volatile__(
4811595172SStefan Kristiansson 		"1:	l.lwa %0, 0(%1)		\n"
4911595172SStefan Kristiansson 		"	l.swa 0(%1), %2		\n"
5011595172SStefan Kristiansson 		"	l.bnf 1b		\n"
5111595172SStefan Kristiansson 		"	 l.nop			\n"
5211595172SStefan Kristiansson 		: "=&r"(val)
5311595172SStefan Kristiansson 		: "r"(ptr), "r"(val)
5411595172SStefan Kristiansson 		: "cc", "memory");
5511595172SStefan Kristiansson 
5611595172SStefan Kristiansson 	return val;
5711595172SStefan Kristiansson }
5811595172SStefan Kristiansson 
cmpxchg_small(volatile void * ptr,u32 old,u32 new,int size)59489e0f80SStafford Horne static inline u32 cmpxchg_small(volatile void *ptr, u32 old, u32 new,
60489e0f80SStafford Horne 				int size)
61489e0f80SStafford Horne {
62489e0f80SStafford Horne 	int off = (unsigned long)ptr % sizeof(u32);
63489e0f80SStafford Horne 	volatile u32 *p = ptr - off;
64489e0f80SStafford Horne #ifdef __BIG_ENDIAN
65489e0f80SStafford Horne 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
66489e0f80SStafford Horne #else
67489e0f80SStafford Horne 	int bitoff = off * BITS_PER_BYTE;
68489e0f80SStafford Horne #endif
69489e0f80SStafford Horne 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
70489e0f80SStafford Horne 	u32 load32, old32, new32;
71489e0f80SStafford Horne 	u32 ret;
72489e0f80SStafford Horne 
73489e0f80SStafford Horne 	load32 = READ_ONCE(*p);
74489e0f80SStafford Horne 
75489e0f80SStafford Horne 	while (true) {
76489e0f80SStafford Horne 		ret = (load32 & bitmask) >> bitoff;
77489e0f80SStafford Horne 		if (old != ret)
78489e0f80SStafford Horne 			return ret;
79489e0f80SStafford Horne 
80489e0f80SStafford Horne 		old32 = (load32 & ~bitmask) | (old << bitoff);
81489e0f80SStafford Horne 		new32 = (load32 & ~bitmask) | (new << bitoff);
82489e0f80SStafford Horne 
83489e0f80SStafford Horne 		/* Do 32 bit cmpxchg */
84489e0f80SStafford Horne 		load32 = cmpxchg_u32(p, old32, new32);
85489e0f80SStafford Horne 		if (load32 == old32)
86489e0f80SStafford Horne 			return old;
87489e0f80SStafford Horne 	}
88489e0f80SStafford Horne }
89489e0f80SStafford Horne 
90489e0f80SStafford Horne /* xchg */
91489e0f80SStafford Horne 
xchg_small(volatile void * ptr,u32 x,int size)92489e0f80SStafford Horne static inline u32 xchg_small(volatile void *ptr, u32 x, int size)
93489e0f80SStafford Horne {
94489e0f80SStafford Horne 	int off = (unsigned long)ptr % sizeof(u32);
95489e0f80SStafford Horne 	volatile u32 *p = ptr - off;
96489e0f80SStafford Horne #ifdef __BIG_ENDIAN
97489e0f80SStafford Horne 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
98489e0f80SStafford Horne #else
99489e0f80SStafford Horne 	int bitoff = off * BITS_PER_BYTE;
100489e0f80SStafford Horne #endif
101489e0f80SStafford Horne 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
102489e0f80SStafford Horne 	u32 oldv, newv;
103489e0f80SStafford Horne 	u32 ret;
104489e0f80SStafford Horne 
105489e0f80SStafford Horne 	do {
106489e0f80SStafford Horne 		oldv = READ_ONCE(*p);
107489e0f80SStafford Horne 		ret = (oldv & bitmask) >> bitoff;
108489e0f80SStafford Horne 		newv = (oldv & ~bitmask) | (x << bitoff);
109489e0f80SStafford Horne 	} while (cmpxchg_u32(p, oldv, newv) != oldv);
110489e0f80SStafford Horne 
111489e0f80SStafford Horne 	return ret;
112489e0f80SStafford Horne }
113489e0f80SStafford Horne 
114489e0f80SStafford Horne /*
115489e0f80SStafford Horne  * This function doesn't exist, so you'll get a linker error
116489e0f80SStafford Horne  * if something tries to do an invalid cmpxchg().
117489e0f80SStafford Horne  */
118489e0f80SStafford Horne extern unsigned long __cmpxchg_called_with_bad_pointer(void)
119489e0f80SStafford Horne 	__compiletime_error("Bad argument size for cmpxchg");
120489e0f80SStafford Horne 
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)121489e0f80SStafford Horne static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
122489e0f80SStafford Horne 		unsigned long new, int size)
123489e0f80SStafford Horne {
124489e0f80SStafford Horne 	switch (size) {
125489e0f80SStafford Horne 	case 1:
126489e0f80SStafford Horne 	case 2:
127489e0f80SStafford Horne 		return cmpxchg_small(ptr, old, new, size);
128489e0f80SStafford Horne 	case 4:
129489e0f80SStafford Horne 		return cmpxchg_u32(ptr, old, new);
130489e0f80SStafford Horne 	default:
131489e0f80SStafford Horne 		return __cmpxchg_called_with_bad_pointer();
132489e0f80SStafford Horne 	}
133489e0f80SStafford Horne }
134489e0f80SStafford Horne 
1353f1e931dSMark Rutland #define arch_cmpxchg(ptr, o, n)						\
136489e0f80SStafford Horne 	({								\
137489e0f80SStafford Horne 		(__typeof__(*(ptr))) __cmpxchg((ptr),			\
138489e0f80SStafford Horne 					       (unsigned long)(o),	\
139489e0f80SStafford Horne 					       (unsigned long)(n),	\
140489e0f80SStafford Horne 					       sizeof(*(ptr)));		\
141489e0f80SStafford Horne 	})
142489e0f80SStafford Horne 
143489e0f80SStafford Horne /*
144489e0f80SStafford Horne  * This function doesn't exist, so you'll get a linker error if
145489e0f80SStafford Horne  * something tries to do an invalidly-sized xchg().
146489e0f80SStafford Horne  */
147489e0f80SStafford Horne extern unsigned long __xchg_called_with_bad_pointer(void)
148489e0f80SStafford Horne 	__compiletime_error("Bad argument size for xchg");
149489e0f80SStafford Horne 
150*06855063SAndrzej Hajda static inline unsigned long
__arch_xchg(volatile void * ptr,unsigned long with,int size)151*06855063SAndrzej Hajda __arch_xchg(volatile void *ptr, unsigned long with, int size)
152489e0f80SStafford Horne {
153489e0f80SStafford Horne 	switch (size) {
154489e0f80SStafford Horne 	case 1:
155489e0f80SStafford Horne 	case 2:
156489e0f80SStafford Horne 		return xchg_small(ptr, with, size);
157489e0f80SStafford Horne 	case 4:
158489e0f80SStafford Horne 		return xchg_u32(ptr, with);
159489e0f80SStafford Horne 	default:
160489e0f80SStafford Horne 		return __xchg_called_with_bad_pointer();
161489e0f80SStafford Horne 	}
162489e0f80SStafford Horne }
163489e0f80SStafford Horne 
1643f1e931dSMark Rutland #define arch_xchg(ptr, with) 						\
1658af42949SStafford Horne 	({								\
166*06855063SAndrzej Hajda 		(__typeof__(*(ptr))) __arch_xchg((ptr),			\
167489e0f80SStafford Horne 						 (unsigned long)(with),	\
1688af42949SStafford Horne 						 sizeof(*(ptr)));	\
1698af42949SStafford Horne 	})
17011595172SStefan Kristiansson 
17111595172SStefan Kristiansson #endif /* __ASM_OPENRISC_CMPXCHG_H */
172