xref: /openbmc/linux/arch/openrisc/include/asm/cmpxchg.h (revision 489e0f802db708c69004f64d92a3e1b70731614a)
111595172SStefan Kristiansson /*
2*489e0f80SStafford Horne  * 1,2 and 4 byte cmpxchg and xchg implementations for OpenRISC.
3*489e0f80SStafford Horne  *
411595172SStefan Kristiansson  * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
5*489e0f80SStafford Horne  * Copyright (C) 2017 Stafford Horne <shorne@gmail.com>
611595172SStefan Kristiansson  *
711595172SStefan Kristiansson  * This file is licensed under the terms of the GNU General Public License
811595172SStefan Kristiansson  * version 2.  This program is licensed "as is" without any warranty of any
911595172SStefan Kristiansson  * kind, whether express or implied.
10*489e0f80SStafford Horne  *
11*489e0f80SStafford Horne  * Note:
12*489e0f80SStafford Horne  * The portable implementations of 1 and 2 byte xchg and cmpxchg using a 4
13*489e0f80SStafford Horne  * byte cmpxchg is sourced heavily from the sh and mips implementations.
1411595172SStefan Kristiansson  */
1511595172SStefan Kristiansson 
1611595172SStefan Kristiansson #ifndef __ASM_OPENRISC_CMPXCHG_H
1711595172SStefan Kristiansson #define __ASM_OPENRISC_CMPXCHG_H
1811595172SStefan Kristiansson 
1911595172SStefan Kristiansson #include  <linux/types.h>
20*489e0f80SStafford Horne #include  <linux/bitops.h>
2111595172SStefan Kristiansson 
2211595172SStefan Kristiansson #define __HAVE_ARCH_CMPXCHG 1
2311595172SStefan Kristiansson 
24*489e0f80SStafford Horne static inline unsigned long cmpxchg_u32(volatile void *ptr,
25*489e0f80SStafford Horne 		unsigned long old, unsigned long new)
2611595172SStefan Kristiansson {
2711595172SStefan Kristiansson 	__asm__ __volatile__(
2811595172SStefan Kristiansson 		"1:	l.lwa %0, 0(%1)		\n"
2911595172SStefan Kristiansson 		"	l.sfeq %0, %2		\n"
3011595172SStefan Kristiansson 		"	l.bnf 2f		\n"
3111595172SStefan Kristiansson 		"	 l.nop			\n"
3211595172SStefan Kristiansson 		"	l.swa 0(%1), %3		\n"
3311595172SStefan Kristiansson 		"	l.bnf 1b		\n"
3411595172SStefan Kristiansson 		"	 l.nop			\n"
3511595172SStefan Kristiansson 		"2:				\n"
3611595172SStefan Kristiansson 		: "=&r"(old)
3711595172SStefan Kristiansson 		: "r"(ptr), "r"(old), "r"(new)
3811595172SStefan Kristiansson 		: "cc", "memory");
3911595172SStefan Kristiansson 
4011595172SStefan Kristiansson 	return old;
4111595172SStefan Kristiansson }
4211595172SStefan Kristiansson 
43*489e0f80SStafford Horne static inline unsigned long xchg_u32(volatile void *ptr,
44*489e0f80SStafford Horne 		unsigned long val)
4511595172SStefan Kristiansson {
4611595172SStefan Kristiansson 	__asm__ __volatile__(
4711595172SStefan Kristiansson 		"1:	l.lwa %0, 0(%1)		\n"
4811595172SStefan Kristiansson 		"	l.swa 0(%1), %2		\n"
4911595172SStefan Kristiansson 		"	l.bnf 1b		\n"
5011595172SStefan Kristiansson 		"	 l.nop			\n"
5111595172SStefan Kristiansson 		: "=&r"(val)
5211595172SStefan Kristiansson 		: "r"(ptr), "r"(val)
5311595172SStefan Kristiansson 		: "cc", "memory");
5411595172SStefan Kristiansson 
5511595172SStefan Kristiansson 	return val;
5611595172SStefan Kristiansson }
5711595172SStefan Kristiansson 
58*489e0f80SStafford Horne static inline u32 cmpxchg_small(volatile void *ptr, u32 old, u32 new,
59*489e0f80SStafford Horne 				int size)
60*489e0f80SStafford Horne {
61*489e0f80SStafford Horne 	int off = (unsigned long)ptr % sizeof(u32);
62*489e0f80SStafford Horne 	volatile u32 *p = ptr - off;
63*489e0f80SStafford Horne #ifdef __BIG_ENDIAN
64*489e0f80SStafford Horne 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
65*489e0f80SStafford Horne #else
66*489e0f80SStafford Horne 	int bitoff = off * BITS_PER_BYTE;
67*489e0f80SStafford Horne #endif
68*489e0f80SStafford Horne 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
69*489e0f80SStafford Horne 	u32 load32, old32, new32;
70*489e0f80SStafford Horne 	u32 ret;
71*489e0f80SStafford Horne 
72*489e0f80SStafford Horne 	load32 = READ_ONCE(*p);
73*489e0f80SStafford Horne 
74*489e0f80SStafford Horne 	while (true) {
75*489e0f80SStafford Horne 		ret = (load32 & bitmask) >> bitoff;
76*489e0f80SStafford Horne 		if (old != ret)
77*489e0f80SStafford Horne 			return ret;
78*489e0f80SStafford Horne 
79*489e0f80SStafford Horne 		old32 = (load32 & ~bitmask) | (old << bitoff);
80*489e0f80SStafford Horne 		new32 = (load32 & ~bitmask) | (new << bitoff);
81*489e0f80SStafford Horne 
82*489e0f80SStafford Horne 		/* Do 32 bit cmpxchg */
83*489e0f80SStafford Horne 		load32 = cmpxchg_u32(p, old32, new32);
84*489e0f80SStafford Horne 		if (load32 == old32)
85*489e0f80SStafford Horne 			return old;
86*489e0f80SStafford Horne 	}
87*489e0f80SStafford Horne }
88*489e0f80SStafford Horne 
89*489e0f80SStafford Horne /* xchg */
90*489e0f80SStafford Horne 
91*489e0f80SStafford Horne static inline u32 xchg_small(volatile void *ptr, u32 x, int size)
92*489e0f80SStafford Horne {
93*489e0f80SStafford Horne 	int off = (unsigned long)ptr % sizeof(u32);
94*489e0f80SStafford Horne 	volatile u32 *p = ptr - off;
95*489e0f80SStafford Horne #ifdef __BIG_ENDIAN
96*489e0f80SStafford Horne 	int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
97*489e0f80SStafford Horne #else
98*489e0f80SStafford Horne 	int bitoff = off * BITS_PER_BYTE;
99*489e0f80SStafford Horne #endif
100*489e0f80SStafford Horne 	u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
101*489e0f80SStafford Horne 	u32 oldv, newv;
102*489e0f80SStafford Horne 	u32 ret;
103*489e0f80SStafford Horne 
104*489e0f80SStafford Horne 	do {
105*489e0f80SStafford Horne 		oldv = READ_ONCE(*p);
106*489e0f80SStafford Horne 		ret = (oldv & bitmask) >> bitoff;
107*489e0f80SStafford Horne 		newv = (oldv & ~bitmask) | (x << bitoff);
108*489e0f80SStafford Horne 	} while (cmpxchg_u32(p, oldv, newv) != oldv);
109*489e0f80SStafford Horne 
110*489e0f80SStafford Horne 	return ret;
111*489e0f80SStafford Horne }
112*489e0f80SStafford Horne 
113*489e0f80SStafford Horne /*
114*489e0f80SStafford Horne  * This function doesn't exist, so you'll get a linker error
115*489e0f80SStafford Horne  * if something tries to do an invalid cmpxchg().
116*489e0f80SStafford Horne  */
117*489e0f80SStafford Horne extern unsigned long __cmpxchg_called_with_bad_pointer(void)
118*489e0f80SStafford Horne 	__compiletime_error("Bad argument size for cmpxchg");
119*489e0f80SStafford Horne 
120*489e0f80SStafford Horne static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
121*489e0f80SStafford Horne 		unsigned long new, int size)
122*489e0f80SStafford Horne {
123*489e0f80SStafford Horne 	switch (size) {
124*489e0f80SStafford Horne 	case 1:
125*489e0f80SStafford Horne 	case 2:
126*489e0f80SStafford Horne 		return cmpxchg_small(ptr, old, new, size);
127*489e0f80SStafford Horne 	case 4:
128*489e0f80SStafford Horne 		return cmpxchg_u32(ptr, old, new);
129*489e0f80SStafford Horne 	default:
130*489e0f80SStafford Horne 		return __cmpxchg_called_with_bad_pointer();
131*489e0f80SStafford Horne 	}
132*489e0f80SStafford Horne }
133*489e0f80SStafford Horne 
134*489e0f80SStafford Horne #define cmpxchg(ptr, o, n)						\
135*489e0f80SStafford Horne 	({								\
136*489e0f80SStafford Horne 		(__typeof__(*(ptr))) __cmpxchg((ptr),			\
137*489e0f80SStafford Horne 					       (unsigned long)(o),	\
138*489e0f80SStafford Horne 					       (unsigned long)(n),	\
139*489e0f80SStafford Horne 					       sizeof(*(ptr)));		\
140*489e0f80SStafford Horne 	})
141*489e0f80SStafford Horne 
142*489e0f80SStafford Horne /*
143*489e0f80SStafford Horne  * This function doesn't exist, so you'll get a linker error if
144*489e0f80SStafford Horne  * something tries to do an invalidly-sized xchg().
145*489e0f80SStafford Horne  */
146*489e0f80SStafford Horne extern unsigned long __xchg_called_with_bad_pointer(void)
147*489e0f80SStafford Horne 	__compiletime_error("Bad argument size for xchg");
148*489e0f80SStafford Horne 
149*489e0f80SStafford Horne static inline unsigned long __xchg(volatile void *ptr, unsigned long with,
150*489e0f80SStafford Horne 		int size)
151*489e0f80SStafford Horne {
152*489e0f80SStafford Horne 	switch (size) {
153*489e0f80SStafford Horne 	case 1:
154*489e0f80SStafford Horne 	case 2:
155*489e0f80SStafford Horne 		return xchg_small(ptr, with, size);
156*489e0f80SStafford Horne 	case 4:
157*489e0f80SStafford Horne 		return xchg_u32(ptr, with);
158*489e0f80SStafford Horne 	default:
159*489e0f80SStafford Horne 		return __xchg_called_with_bad_pointer();
160*489e0f80SStafford Horne 	}
161*489e0f80SStafford Horne }
162*489e0f80SStafford Horne 
16311595172SStefan Kristiansson #define xchg(ptr, with) 						\
1648af42949SStafford Horne 	({								\
165*489e0f80SStafford Horne 		(__typeof__(*(ptr))) __xchg((ptr),			\
166*489e0f80SStafford Horne 					    (unsigned long)(with),	\
1678af42949SStafford Horne 					    sizeof(*(ptr)));		\
1688af42949SStafford Horne 	})
16911595172SStefan Kristiansson 
17011595172SStefan Kristiansson #endif /* __ASM_OPENRISC_CMPXCHG_H */
171