xref: /openbmc/linux/arch/csky/include/asm/cmpxchg.h (revision 7ac3945d)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 #ifndef __ASM_CSKY_CMPXCHG_H
4 #define __ASM_CSKY_CMPXCHG_H
5 
6 #ifdef CONFIG_SMP
7 #include <asm/barrier.h>
8 
9 extern void __bad_xchg(void);
10 
11 #define __xchg_relaxed(new, ptr, size)				\
12 ({								\
13 	__typeof__(ptr) __ptr = (ptr);				\
14 	__typeof__(new) __new = (new);				\
15 	__typeof__(*(ptr)) __ret;				\
16 	unsigned long tmp;					\
17 	switch (size) {						\
18 	case 4:							\
19 		asm volatile (					\
20 		"1:	ldex.w		%0, (%3) \n"		\
21 		"	mov		%1, %2   \n"		\
22 		"	stex.w		%1, (%3) \n"		\
23 		"	bez		%1, 1b   \n"		\
24 			: "=&r" (__ret), "=&r" (tmp)		\
25 			: "r" (__new), "r"(__ptr)		\
26 			:);					\
27 		break;						\
28 	default:						\
29 		__bad_xchg();					\
30 	}							\
31 	__ret;							\
32 })
33 
34 #define arch_xchg_relaxed(ptr, x) \
35 		(__xchg_relaxed((x), (ptr), sizeof(*(ptr))))
36 
37 #define __cmpxchg_relaxed(ptr, old, new, size)			\
38 ({								\
39 	__typeof__(ptr) __ptr = (ptr);				\
40 	__typeof__(new) __new = (new);				\
41 	__typeof__(new) __tmp;					\
42 	__typeof__(old) __old = (old);				\
43 	__typeof__(*(ptr)) __ret;				\
44 	switch (size) {						\
45 	case 4:							\
46 		asm volatile (					\
47 		"1:	ldex.w		%0, (%3) \n"		\
48 		"	cmpne		%0, %4   \n"		\
49 		"	bt		2f       \n"		\
50 		"	mov		%1, %2   \n"		\
51 		"	stex.w		%1, (%3) \n"		\
52 		"	bez		%1, 1b   \n"		\
53 		"2:				 \n"		\
54 			: "=&r" (__ret), "=&r" (__tmp)		\
55 			: "r" (__new), "r"(__ptr), "r"(__old)	\
56 			:);					\
57 		break;						\
58 	default:						\
59 		__bad_xchg();					\
60 	}							\
61 	__ret;							\
62 })
63 
64 #define arch_cmpxchg_relaxed(ptr, o, n) \
65 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
66 
67 #define __cmpxchg_acquire(ptr, old, new, size)			\
68 ({								\
69 	__typeof__(ptr) __ptr = (ptr);				\
70 	__typeof__(new) __new = (new);				\
71 	__typeof__(new) __tmp;					\
72 	__typeof__(old) __old = (old);				\
73 	__typeof__(*(ptr)) __ret;				\
74 	switch (size) {						\
75 	case 4:							\
76 		asm volatile (					\
77 		"1:	ldex.w		%0, (%3) \n"		\
78 		"	cmpne		%0, %4   \n"		\
79 		"	bt		2f       \n"		\
80 		"	mov		%1, %2   \n"		\
81 		"	stex.w		%1, (%3) \n"		\
82 		"	bez		%1, 1b   \n"		\
83 		ACQUIRE_FENCE					\
84 		"2:				 \n"		\
85 			: "=&r" (__ret), "=&r" (__tmp)		\
86 			: "r" (__new), "r"(__ptr), "r"(__old)	\
87 			:);					\
88 		break;						\
89 	default:						\
90 		__bad_xchg();					\
91 	}							\
92 	__ret;							\
93 })
94 
95 #define arch_cmpxchg_acquire(ptr, o, n) \
96 	(__cmpxchg_acquire((ptr), (o), (n), sizeof(*(ptr))))
97 
98 #define __cmpxchg(ptr, old, new, size)				\
99 ({								\
100 	__typeof__(ptr) __ptr = (ptr);				\
101 	__typeof__(new) __new = (new);				\
102 	__typeof__(new) __tmp;					\
103 	__typeof__(old) __old = (old);				\
104 	__typeof__(*(ptr)) __ret;				\
105 	switch (size) {						\
106 	case 4:							\
107 		asm volatile (					\
108 		RELEASE_FENCE					\
109 		"1:	ldex.w		%0, (%3) \n"		\
110 		"	cmpne		%0, %4   \n"		\
111 		"	bt		2f       \n"		\
112 		"	mov		%1, %2   \n"		\
113 		"	stex.w		%1, (%3) \n"		\
114 		"	bez		%1, 1b   \n"		\
115 		FULL_FENCE					\
116 		"2:				 \n"		\
117 			: "=&r" (__ret), "=&r" (__tmp)		\
118 			: "r" (__new), "r"(__ptr), "r"(__old)	\
119 			:);					\
120 		break;						\
121 	default:						\
122 		__bad_xchg();					\
123 	}							\
124 	__ret;							\
125 })
126 
127 #define arch_cmpxchg(ptr, o, n)					\
128 	(__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
129 
130 #define arch_cmpxchg_local(ptr, o, n)				\
131 	(__cmpxchg_relaxed((ptr), (o), (n), sizeof(*(ptr))))
132 #else
133 #include <asm-generic/cmpxchg.h>
134 #endif
135 
136 #endif /* __ASM_CSKY_CMPXCHG_H */
137