xref: /openbmc/linux/arch/sparc/include/asm/cmpxchg_64.h (revision c900529f3d9161bfde5cca0754f83b4d3c3e0220)
1b2441318SGreg Kroah-Hartman /* SPDX-License-Identifier: GPL-2.0 */
2d550bbd4SDavid Howells /* 64-bit atomic xchg() and cmpxchg() definitions.
3d550bbd4SDavid Howells  *
4d550bbd4SDavid Howells  * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com)
5d550bbd4SDavid Howells  */
6d550bbd4SDavid Howells 
7d550bbd4SDavid Howells #ifndef __ARCH_SPARC64_CMPXCHG__
8d550bbd4SDavid Howells #define __ARCH_SPARC64_CMPXCHG__
9d550bbd4SDavid Howells 
1079d39e2bSBabu Moger static inline unsigned long
__cmpxchg_u32(volatile int * m,int old,int new)1179d39e2bSBabu Moger __cmpxchg_u32(volatile int *m, int old, int new)
1279d39e2bSBabu Moger {
1379d39e2bSBabu Moger 	__asm__ __volatile__("cas [%2], %3, %0"
1479d39e2bSBabu Moger 			     : "=&r" (new)
1579d39e2bSBabu Moger 			     : "0" (new), "r" (m), "r" (old)
1679d39e2bSBabu Moger 			     : "memory");
1779d39e2bSBabu Moger 
1879d39e2bSBabu Moger 	return new;
1979d39e2bSBabu Moger }
2079d39e2bSBabu Moger 
xchg32(__volatile__ unsigned int * m,unsigned int val)21d550bbd4SDavid Howells static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val)
22d550bbd4SDavid Howells {
23d550bbd4SDavid Howells 	unsigned long tmp1, tmp2;
24d550bbd4SDavid Howells 
25d550bbd4SDavid Howells 	__asm__ __volatile__(
26d550bbd4SDavid Howells "	mov		%0, %1\n"
27d550bbd4SDavid Howells "1:	lduw		[%4], %2\n"
28d550bbd4SDavid Howells "	cas		[%4], %2, %0\n"
29d550bbd4SDavid Howells "	cmp		%2, %0\n"
30d550bbd4SDavid Howells "	bne,a,pn	%%icc, 1b\n"
31d550bbd4SDavid Howells "	 mov		%1, %0\n"
32d550bbd4SDavid Howells 	: "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
33d550bbd4SDavid Howells 	: "0" (val), "r" (m)
34d550bbd4SDavid Howells 	: "cc", "memory");
35d550bbd4SDavid Howells 	return val;
36d550bbd4SDavid Howells }
37d550bbd4SDavid Howells 
xchg64(__volatile__ unsigned long * m,unsigned long val)38d550bbd4SDavid Howells static inline unsigned long xchg64(__volatile__ unsigned long *m, unsigned long val)
39d550bbd4SDavid Howells {
40d550bbd4SDavid Howells 	unsigned long tmp1, tmp2;
41d550bbd4SDavid Howells 
42d550bbd4SDavid Howells 	__asm__ __volatile__(
43d550bbd4SDavid Howells "	mov		%0, %1\n"
44d550bbd4SDavid Howells "1:	ldx		[%4], %2\n"
45d550bbd4SDavid Howells "	casx		[%4], %2, %0\n"
46d550bbd4SDavid Howells "	cmp		%2, %0\n"
47d550bbd4SDavid Howells "	bne,a,pn	%%xcc, 1b\n"
48d550bbd4SDavid Howells "	 mov		%1, %0\n"
49d550bbd4SDavid Howells 	: "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
50d550bbd4SDavid Howells 	: "0" (val), "r" (m)
51d550bbd4SDavid Howells 	: "cc", "memory");
52d550bbd4SDavid Howells 	return val;
53d550bbd4SDavid Howells }
54d550bbd4SDavid Howells 
55ff5b4f1eSMark Rutland #define arch_xchg(ptr,x)							\
566c2fc9cdSDavid S. Miller ({	__typeof__(*(ptr)) __ret;					\
576c2fc9cdSDavid S. Miller 	__ret = (__typeof__(*(ptr)))					\
5806855063SAndrzej Hajda 		__arch_xchg((unsigned long)(x), (ptr), sizeof(*(ptr)));	\
596c2fc9cdSDavid S. Miller 	__ret;								\
606c2fc9cdSDavid S. Miller })
61d550bbd4SDavid Howells 
62f05a6865SSam Ravnborg void __xchg_called_with_bad_pointer(void);
63d550bbd4SDavid Howells 
6479d39e2bSBabu Moger /*
6579d39e2bSBabu Moger  * Use 4 byte cas instruction to achieve 2 byte xchg. Main logic
6679d39e2bSBabu Moger  * here is to get the bit shift of the byte we are interested in.
6779d39e2bSBabu Moger  * The XOR is handy for reversing the bits for big-endian byte order.
6879d39e2bSBabu Moger  */
6979d39e2bSBabu Moger static inline unsigned long
xchg16(__volatile__ unsigned short * m,unsigned short val)7079d39e2bSBabu Moger xchg16(__volatile__ unsigned short *m, unsigned short val)
7179d39e2bSBabu Moger {
7279d39e2bSBabu Moger 	unsigned long maddr = (unsigned long)m;
7379d39e2bSBabu Moger 	int bit_shift = (((unsigned long)m & 2) ^ 2) << 3;
7479d39e2bSBabu Moger 	unsigned int mask = 0xffff << bit_shift;
7579d39e2bSBabu Moger 	unsigned int *ptr = (unsigned int  *) (maddr & ~2);
7679d39e2bSBabu Moger 	unsigned int old32, new32, load32;
7779d39e2bSBabu Moger 
7879d39e2bSBabu Moger 	/* Read the old value */
7979d39e2bSBabu Moger 	load32 = *ptr;
8079d39e2bSBabu Moger 
8179d39e2bSBabu Moger 	do {
8279d39e2bSBabu Moger 		old32 = load32;
8379d39e2bSBabu Moger 		new32 = (load32 & (~mask)) | val << bit_shift;
8479d39e2bSBabu Moger 		load32 = __cmpxchg_u32(ptr, old32, new32);
8579d39e2bSBabu Moger 	} while (load32 != old32);
8679d39e2bSBabu Moger 
8779d39e2bSBabu Moger 	return (load32 & mask) >> bit_shift;
8879d39e2bSBabu Moger }
8979d39e2bSBabu Moger 
90*ec7633deSArnd Bergmann static __always_inline unsigned long
__arch_xchg(unsigned long x,__volatile__ void * ptr,int size)9106855063SAndrzej Hajda __arch_xchg(unsigned long x, __volatile__ void * ptr, int size)
92d550bbd4SDavid Howells {
93d550bbd4SDavid Howells 	switch (size) {
9479d39e2bSBabu Moger 	case 2:
9579d39e2bSBabu Moger 		return xchg16(ptr, x);
96d550bbd4SDavid Howells 	case 4:
97d550bbd4SDavid Howells 		return xchg32(ptr, x);
98d550bbd4SDavid Howells 	case 8:
99d550bbd4SDavid Howells 		return xchg64(ptr, x);
100d550bbd4SDavid Howells 	}
101d550bbd4SDavid Howells 	__xchg_called_with_bad_pointer();
102d550bbd4SDavid Howells 	return x;
103d550bbd4SDavid Howells }
104d550bbd4SDavid Howells 
105d550bbd4SDavid Howells /*
106d550bbd4SDavid Howells  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
107d550bbd4SDavid Howells  * store NEW in MEM.  Return the initial value in MEM.  Success is
108d550bbd4SDavid Howells  * indicated by comparing RETURN with OLD.
109d550bbd4SDavid Howells  */
110d550bbd4SDavid Howells 
111d550bbd4SDavid Howells #include <asm-generic/cmpxchg-local.h>
112d550bbd4SDavid Howells 
113d550bbd4SDavid Howells 
114d550bbd4SDavid Howells static inline unsigned long
__cmpxchg_u64(volatile long * m,unsigned long old,unsigned long new)115d550bbd4SDavid Howells __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
116d550bbd4SDavid Howells {
117d550bbd4SDavid Howells 	__asm__ __volatile__("casx [%2], %3, %0"
118d550bbd4SDavid Howells 			     : "=&r" (new)
119d550bbd4SDavid Howells 			     : "0" (new), "r" (m), "r" (old)
120d550bbd4SDavid Howells 			     : "memory");
121d550bbd4SDavid Howells 
122d550bbd4SDavid Howells 	return new;
123d550bbd4SDavid Howells }
124d550bbd4SDavid Howells 
125a12ee234SBabu Moger /*
126a12ee234SBabu Moger  * Use 4 byte cas instruction to achieve 1 byte cmpxchg. Main logic
127a12ee234SBabu Moger  * here is to get the bit shift of the byte we are interested in.
128a12ee234SBabu Moger  * The XOR is handy for reversing the bits for big-endian byte order
129a12ee234SBabu Moger  */
130a12ee234SBabu Moger static inline unsigned long
__cmpxchg_u8(volatile unsigned char * m,unsigned char old,unsigned char new)131a12ee234SBabu Moger __cmpxchg_u8(volatile unsigned char *m, unsigned char old, unsigned char new)
132a12ee234SBabu Moger {
133a12ee234SBabu Moger 	unsigned long maddr = (unsigned long)m;
134a12ee234SBabu Moger 	int bit_shift = (((unsigned long)m & 3) ^ 3) << 3;
135a12ee234SBabu Moger 	unsigned int mask = 0xff << bit_shift;
136a12ee234SBabu Moger 	unsigned int *ptr = (unsigned int *) (maddr & ~3);
137a12ee234SBabu Moger 	unsigned int old32, new32, load;
138a12ee234SBabu Moger 	unsigned int load32 = *ptr;
139a12ee234SBabu Moger 
140a12ee234SBabu Moger 	do {
141a12ee234SBabu Moger 		new32 = (load32 & ~mask) | (new << bit_shift);
142a12ee234SBabu Moger 		old32 = (load32 & ~mask) | (old << bit_shift);
143a12ee234SBabu Moger 		load32 = __cmpxchg_u32(ptr, old32, new32);
144a12ee234SBabu Moger 		if (load32 == old32)
145a12ee234SBabu Moger 			return old;
146a12ee234SBabu Moger 		load = (load32 & mask) >> bit_shift;
147a12ee234SBabu Moger 	} while (load == old);
148a12ee234SBabu Moger 
149a12ee234SBabu Moger 	return load;
150a12ee234SBabu Moger }
151a12ee234SBabu Moger 
152d550bbd4SDavid Howells /* This function doesn't exist, so you'll get a linker error
153d550bbd4SDavid Howells    if something tries to do an invalid cmpxchg().  */
154f05a6865SSam Ravnborg void __cmpxchg_called_with_bad_pointer(void);
155d550bbd4SDavid Howells 
156d550bbd4SDavid Howells static inline unsigned long
__cmpxchg(volatile void * ptr,unsigned long old,unsigned long new,int size)157d550bbd4SDavid Howells __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
158d550bbd4SDavid Howells {
159d550bbd4SDavid Howells 	switch (size) {
160a12ee234SBabu Moger 		case 1:
161a12ee234SBabu Moger 			return __cmpxchg_u8(ptr, old, new);
162d550bbd4SDavid Howells 		case 4:
163d550bbd4SDavid Howells 			return __cmpxchg_u32(ptr, old, new);
164d550bbd4SDavid Howells 		case 8:
165d550bbd4SDavid Howells 			return __cmpxchg_u64(ptr, old, new);
166d550bbd4SDavid Howells 	}
167d550bbd4SDavid Howells 	__cmpxchg_called_with_bad_pointer();
168d550bbd4SDavid Howells 	return old;
169d550bbd4SDavid Howells }
170d550bbd4SDavid Howells 
171ff5b4f1eSMark Rutland #define arch_cmpxchg(ptr,o,n)						 \
172d550bbd4SDavid Howells   ({									 \
173d550bbd4SDavid Howells      __typeof__(*(ptr)) _o_ = (o);					 \
174d550bbd4SDavid Howells      __typeof__(*(ptr)) _n_ = (n);					 \
175d550bbd4SDavid Howells      (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,		 \
176d550bbd4SDavid Howells 				    (unsigned long)_n_, sizeof(*(ptr))); \
177d550bbd4SDavid Howells   })
178d550bbd4SDavid Howells 
179d550bbd4SDavid Howells /*
180d550bbd4SDavid Howells  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
181d550bbd4SDavid Howells  * them available.
182d550bbd4SDavid Howells  */
183d550bbd4SDavid Howells 
__cmpxchg_local(volatile void * ptr,unsigned long old,unsigned long new,int size)184d550bbd4SDavid Howells static inline unsigned long __cmpxchg_local(volatile void *ptr,
185d550bbd4SDavid Howells 				      unsigned long old,
186d550bbd4SDavid Howells 				      unsigned long new, int size)
187d550bbd4SDavid Howells {
188d550bbd4SDavid Howells 	switch (size) {
189d550bbd4SDavid Howells 	case 4:
190d550bbd4SDavid Howells 	case 8:	return __cmpxchg(ptr, old, new, size);
191d550bbd4SDavid Howells 	default:
1926988631bSMark Rutland 		return __generic_cmpxchg_local(ptr, old, new, size);
193d550bbd4SDavid Howells 	}
194d550bbd4SDavid Howells 
195d550bbd4SDavid Howells 	return old;
196d550bbd4SDavid Howells }
197d550bbd4SDavid Howells 
198ff5b4f1eSMark Rutland #define arch_cmpxchg_local(ptr, o, n)				  	\
199d550bbd4SDavid Howells 	((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o),	\
200d550bbd4SDavid Howells 			(unsigned long)(n), sizeof(*(ptr))))
201ff5b4f1eSMark Rutland #define arch_cmpxchg64_local(ptr, o, n)					\
202d550bbd4SDavid Howells   ({									\
203d550bbd4SDavid Howells 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
2047e108876SMark Rutland 	arch_cmpxchg_local((ptr), (o), (n));					\
205d550bbd4SDavid Howells   })
206ff5b4f1eSMark Rutland #define arch_cmpxchg64(ptr, o, n)	arch_cmpxchg64_local((ptr), (o), (n))
207d550bbd4SDavid Howells 
208d550bbd4SDavid Howells #endif /* __ARCH_SPARC64_CMPXCHG__ */
209