xref: /openbmc/linux/arch/arc/include/asm/cmpxchg.h (revision 1ac731c529cd4d6adbce134754b51ff7d822b145)
1d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
214e968baSVineet Gupta /*
314e968baSVineet Gupta  * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
414e968baSVineet Gupta  */
514e968baSVineet Gupta 
614e968baSVineet Gupta #ifndef __ASM_ARC_CMPXCHG_H
714e968baSVineet Gupta #define __ASM_ARC_CMPXCHG_H
814e968baSVineet Gupta 
9e188f333SVineet Gupta #include <linux/build_bug.h>
1014e968baSVineet Gupta #include <linux/types.h>
112576c28eSVineet Gupta 
122576c28eSVineet Gupta #include <asm/barrier.h>
1314e968baSVineet Gupta #include <asm/smp.h>
1414e968baSVineet Gupta 
1514e968baSVineet Gupta #ifdef CONFIG_ARC_HAS_LLSC
1614e968baSVineet Gupta 
172576c28eSVineet Gupta /*
18e188f333SVineet Gupta  * if (*ptr == @old)
19e188f333SVineet Gupta  *      *ptr = @new
202576c28eSVineet Gupta  */
21e188f333SVineet Gupta #define __cmpxchg(ptr, old, new)					\
22e188f333SVineet Gupta ({									\
23e188f333SVineet Gupta 	__typeof__(*(ptr)) _prev;					\
24e188f333SVineet Gupta 									\
25e188f333SVineet Gupta 	__asm__ __volatile__(						\
26e188f333SVineet Gupta 	"1:	llock  %0, [%1]	\n"					\
27e188f333SVineet Gupta 	"	brne   %0, %2, 2f	\n"				\
28e188f333SVineet Gupta 	"	scond  %3, [%1]	\n"					\
29e188f333SVineet Gupta 	"	bnz     1b		\n"				\
30e188f333SVineet Gupta 	"2:				\n"				\
31e188f333SVineet Gupta 	: "=&r"(_prev)	/* Early clobber prevent reg reuse */		\
32e188f333SVineet Gupta 	: "r"(ptr),	/* Not "m": llock only supports reg */		\
33e188f333SVineet Gupta 	  "ir"(old),							\
34e188f333SVineet Gupta 	  "r"(new)	/* Not "ir": scond can't take LIMM */		\
35e188f333SVineet Gupta 	: "cc",								\
36e188f333SVineet Gupta 	  "memory");	/* gcc knows memory is clobbered */		\
37e188f333SVineet Gupta 									\
38e188f333SVineet Gupta 	_prev;								\
39e188f333SVineet Gupta })
402576c28eSVineet Gupta 
41ddc348c4SVineet Gupta #define arch_cmpxchg_relaxed(ptr, old, new)				\
42e188f333SVineet Gupta ({									\
43e188f333SVineet Gupta 	__typeof__(ptr) _p_ = (ptr);					\
44e188f333SVineet Gupta 	__typeof__(*(ptr)) _o_ = (old);					\
45e188f333SVineet Gupta 	__typeof__(*(ptr)) _n_ = (new);					\
46e188f333SVineet Gupta 	__typeof__(*(ptr)) _prev_;					\
47e188f333SVineet Gupta 									\
48e188f333SVineet Gupta 	switch(sizeof((_p_))) {						\
49e188f333SVineet Gupta 	case 4:								\
50e188f333SVineet Gupta 		_prev_ = __cmpxchg(_p_, _o_, _n_);			\
51e188f333SVineet Gupta 		break;							\
52e188f333SVineet Gupta 	default:							\
53e188f333SVineet Gupta 		BUILD_BUG();						\
54e188f333SVineet Gupta 	}								\
55e188f333SVineet Gupta 	_prev_;								\
56e188f333SVineet Gupta })
5714e968baSVineet Gupta 
58e188f333SVineet Gupta #else
592576c28eSVineet Gupta 
60e188f333SVineet Gupta #define arch_cmpxchg(ptr, old, new)				        \
61e188f333SVineet Gupta ({									\
62e188f333SVineet Gupta 	volatile __typeof__(ptr) _p_ = (ptr);				\
63e188f333SVineet Gupta 	__typeof__(*(ptr)) _o_ = (old);					\
64e188f333SVineet Gupta 	__typeof__(*(ptr)) _n_ = (new);					\
65e188f333SVineet Gupta 	__typeof__(*(ptr)) _prev_;					\
66e188f333SVineet Gupta 	unsigned long __flags;						\
67e188f333SVineet Gupta 									\
68e188f333SVineet Gupta 	BUILD_BUG_ON(sizeof(_p_) != 4);					\
69e188f333SVineet Gupta 									\
70e188f333SVineet Gupta 	/*								\
71e188f333SVineet Gupta 	 * spin lock/unlock provide the needed smp_mb() before/after	\
72e188f333SVineet Gupta 	 */								\
73e188f333SVineet Gupta 	atomic_ops_lock(__flags);					\
74e188f333SVineet Gupta 	_prev_ = *_p_;							\
75e188f333SVineet Gupta 	if (_prev_ == _o_)						\
76e188f333SVineet Gupta 		*_p_ = _n_;						\
77e188f333SVineet Gupta 	atomic_ops_unlock(__flags);					\
78e188f333SVineet Gupta 	_prev_;								\
79e188f333SVineet Gupta })
8014e968baSVineet Gupta 
81dd7c7ab0SVineet Gupta #endif
8214e968baSVineet Gupta 
8314e968baSVineet Gupta /*
84e188f333SVineet Gupta  * xchg
8514e968baSVineet Gupta  */
86e188f333SVineet Gupta #ifdef CONFIG_ARC_HAS_LLSC
8714e968baSVineet Gupta 
88*06855063SAndrzej Hajda #define __arch_xchg(ptr, val)						\
8914e968baSVineet Gupta ({									\
90e188f333SVineet Gupta 	__asm__ __volatile__(						\
91e188f333SVineet Gupta 	"	ex  %0, [%1]	\n"	/* set new value */	        \
92e188f333SVineet Gupta 	: "+r"(val)							\
93e188f333SVineet Gupta 	: "r"(ptr)							\
94e188f333SVineet Gupta 	: "memory");							\
95e188f333SVineet Gupta 	_val_;		/* get old value */				\
9614e968baSVineet Gupta })
9714e968baSVineet Gupta 
98ddc348c4SVineet Gupta #define arch_xchg_relaxed(ptr, val)					\
99e188f333SVineet Gupta ({									\
100e188f333SVineet Gupta 	__typeof__(ptr) _p_ = (ptr);					\
101e188f333SVineet Gupta 	__typeof__(*(ptr)) _val_ = (val);				\
102e188f333SVineet Gupta 									\
103e188f333SVineet Gupta 	switch(sizeof(*(_p_))) {					\
104e188f333SVineet Gupta 	case 4:								\
105*06855063SAndrzej Hajda 		_val_ = __arch_xchg(_p_, _val_);			\
106e188f333SVineet Gupta 		break;							\
107e188f333SVineet Gupta 	default:							\
108e188f333SVineet Gupta 		BUILD_BUG();						\
109e188f333SVineet Gupta 	}								\
110e188f333SVineet Gupta 	_val_;								\
111e188f333SVineet Gupta })
11214e968baSVineet Gupta 
113e188f333SVineet Gupta #else  /* !CONFIG_ARC_HAS_LLSC */
114e188f333SVineet Gupta 
115e188f333SVineet Gupta /*
116e188f333SVineet Gupta  * EX instructions is baseline and present in !LLSC too. But in this
117e188f333SVineet Gupta  * regime it still needs use @atomic_ops_lock spinlock to allow interop
118e188f333SVineet Gupta  * with cmpxchg() which uses spinlock in !LLSC
119e188f333SVineet Gupta  * (llist.h use xchg and cmpxchg on sama data)
120e188f333SVineet Gupta  */
121e188f333SVineet Gupta 
122e188f333SVineet Gupta #define arch_xchg(ptr, val)					        \
123e188f333SVineet Gupta ({									\
124e188f333SVineet Gupta 	__typeof__(ptr) _p_ = (ptr);					\
125e188f333SVineet Gupta 	__typeof__(*(ptr)) _val_ = (val);				\
126e188f333SVineet Gupta 									\
127e188f333SVineet Gupta 	unsigned long __flags;						\
128e188f333SVineet Gupta 									\
129e188f333SVineet Gupta 	atomic_ops_lock(__flags);					\
130e188f333SVineet Gupta 									\
131e188f333SVineet Gupta 	__asm__ __volatile__(						\
132e188f333SVineet Gupta 	"	ex  %0, [%1]	\n"					\
133e188f333SVineet Gupta 	: "+r"(_val_)							\
134e188f333SVineet Gupta 	: "r"(_p_)							\
135e188f333SVineet Gupta 	: "memory");							\
136e188f333SVineet Gupta 									\
137e188f333SVineet Gupta 	atomic_ops_unlock(__flags);					\
138e188f333SVineet Gupta 	_val_;								\
139e188f333SVineet Gupta })
14014e968baSVineet Gupta 
14114e968baSVineet Gupta #endif
14214e968baSVineet Gupta 
14314e968baSVineet Gupta #endif
144