1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2020-2022 Loongson Technology Corporation Limited
4  */
5 #ifndef __ASM_CMPXCHG_H
6 #define __ASM_CMPXCHG_H
7 
8 #include <asm/barrier.h>
9 #include <linux/build_bug.h>
10 
11 #define __xchg_asm(amswap_db, m, val)		\
12 ({						\
13 		__typeof(val) __ret;		\
14 						\
15 		__asm__ __volatile__ (		\
16 		" "amswap_db" %1, %z2, %0 \n"	\
17 		: "+ZB" (*m), "=&r" (__ret)	\
18 		: "Jr" (val)			\
19 		: "memory");			\
20 						\
21 		__ret;				\
22 })
23 
24 static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
25 				   int size)
26 {
27 	switch (size) {
28 	case 4:
29 		return __xchg_asm("amswap_db.w", (volatile u32 *)ptr, (u32)x);
30 
31 	case 8:
32 		return __xchg_asm("amswap_db.d", (volatile u64 *)ptr, (u64)x);
33 
34 	default:
35 		BUILD_BUG();
36 	}
37 
38 	return 0;
39 }
40 
41 #define arch_xchg(ptr, x)						\
42 ({									\
43 	__typeof__(*(ptr)) __res;					\
44 									\
45 	__res = (__typeof__(*(ptr)))					\
46 		__xchg((ptr), (unsigned long)(x), sizeof(*(ptr)));	\
47 									\
48 	__res;								\
49 })
50 
51 #define __cmpxchg_asm(ld, st, m, old, new)				\
52 ({									\
53 	__typeof(old) __ret;						\
54 									\
55 	__asm__ __volatile__(						\
56 	"1:	" ld "	%0, %2		# __cmpxchg_asm \n"		\
57 	"	bne	%0, %z3, 2f			\n"		\
58 	"	or	$t0, %z4, $zero			\n"		\
59 	"	" st "	$t0, %1				\n"		\
60 	"	beq	$zero, $t0, 1b			\n"		\
61 	"2:						\n"		\
62 	__WEAK_LLSC_MB							\
63 	: "=&r" (__ret), "=ZB"(*m)					\
64 	: "ZB"(*m), "Jr" (old), "Jr" (new)				\
65 	: "t0", "memory");						\
66 									\
67 	__ret;								\
68 })
69 
70 static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
71 				      unsigned long new, unsigned int size)
72 {
73 	switch (size) {
74 	case 4:
75 		return __cmpxchg_asm("ll.w", "sc.w", (volatile u32 *)ptr,
76 				     (u32)old, new);
77 
78 	case 8:
79 		return __cmpxchg_asm("ll.d", "sc.d", (volatile u64 *)ptr,
80 				     (u64)old, new);
81 
82 	default:
83 		BUILD_BUG();
84 	}
85 
86 	return 0;
87 }
88 
89 #define arch_cmpxchg_local(ptr, old, new)				\
90 	((__typeof__(*(ptr)))						\
91 		__cmpxchg((ptr),					\
92 			  (unsigned long)(__typeof__(*(ptr)))(old),	\
93 			  (unsigned long)(__typeof__(*(ptr)))(new),	\
94 			  sizeof(*(ptr))))
95 
96 #define arch_cmpxchg(ptr, old, new)					\
97 ({									\
98 	__typeof__(*(ptr)) __res;					\
99 									\
100 	__res = arch_cmpxchg_local((ptr), (old), (new));		\
101 									\
102 	__res;								\
103 })
104 
105 #ifdef CONFIG_64BIT
106 #define arch_cmpxchg64_local(ptr, o, n)					\
107   ({									\
108 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
109 	arch_cmpxchg_local((ptr), (o), (n));				\
110   })
111 
112 #define arch_cmpxchg64(ptr, o, n)					\
113   ({									\
114 	BUILD_BUG_ON(sizeof(*(ptr)) != 8);				\
115 	arch_cmpxchg((ptr), (o), (n));					\
116   })
117 #else
118 #include <asm-generic/cmpxchg-local.h>
119 #define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
120 #define arch_cmpxchg64(ptr, o, n) arch_cmpxchg64_local((ptr), (o), (n))
121 #endif
122 
123 #endif /* __ASM_CMPXCHG_H */
124